prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>zip.js<|end_file_name|><|fim▁begin|>/* */
"use strict";
var __extends = (this && this.__extends) || function(d, b) {
for (var p in b)
if (b.hasOwnProperty(p))
d[p] = b[p];
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var ArrayObservable_1 = require('../observable/ArrayObservable');
var isArray_1 = require('../util/isArray');
var Subscriber_1 = require('../Subscriber');
var OuterSubscriber_1 = require('../OuterSubscriber');
var subscribeToResult_1 = require('../util/subscribeToResult');
var iterator_1 = require('../symbol/iterator');
function zipProto() {
var observables = [];
for (var _i = 0; _i < arguments.length; _i++) {
observables[_i - 0] = arguments[_i];
}
return this.lift.call(zipStatic.apply(void 0, [this].concat(observables)));
}
exports.zipProto = zipProto;
function zipStatic() {
var observables = [];
for (var _i = 0; _i < arguments.length; _i++) {
observables[_i - 0] = arguments[_i];
}
var project = observables[observables.length - 1];
if (typeof project === 'function') {
observables.pop();
}
return new ArrayObservable_1.ArrayObservable(observables).lift(new ZipOperator(project));
}
exports.zipStatic = zipStatic;
var ZipOperator = (function() {
function ZipOperator(project) {
this.project = project;
}
ZipOperator.prototype.call = function(subscriber, source) {
return source.subscribe(new ZipSubscriber(subscriber, this.project));
};
return ZipOperator;
}());
exports.ZipOperator = ZipOperator;
var ZipSubscriber = (function(_super) {
__extends(ZipSubscriber, _super);
function ZipSubscriber(destination, project, values) {
if (values === void 0) {
values = Object.create(null);
}
_super.call(this, destination);
this.iterators = [];
this.active = 0;
this.project = (typeof project === 'function') ? project : null;
this.values = values;
}
ZipSubscriber.prototype._next = function(value) {
var iterators = this.iterators;
if (isArray_1.isArray(value)) {
iterators.push(new StaticArrayIterator(value));
} else if (typeof value[iterator_1.$$iterator] === 'function') {
iterators.push(new StaticIterator(value[iterator_1.$$iterator]()));
} else {
iterators.push(new ZipBufferIterator(this.destination, this, value));
}
};
ZipSubscriber.prototype._complete = function() {
var iterators = this.iterators;
var len = iterators.length;
this.active = len;
for (var i = 0; i < len; i++) {
var iterator = iterators[i];
if (iterator.stillUnsubscribed) {
this.add(iterator.subscribe(iterator, i));
} else {
this.active--;
}
}
};
ZipSubscriber.prototype.notifyInactive = function() {
this.active--;
if (this.active === 0) {
this.destination.complete();
}
};
ZipSubscriber.prototype.checkIterators = function() {
var iterators = this.iterators;
var len = iterators.length;
var destination = this.destination;
for (var i = 0; i < len; i++) {
var iterator = iterators[i];
if (typeof iterator.hasValue === 'function' && !iterator.hasValue()) {
return;
}
}
var shouldComplete = false;
var args = [];
for (var i = 0; i < len; i++) {
var iterator = iterators[i];
var result = iterator.next();
if (iterator.hasCompleted()) {
shouldComplete = true;
}
if (result.done) {
destination.complete();
return;
}
args.push(result.value);
}
if (this.project) {
this._tryProject(args);
} else {
destination.next(args);
}
if (shouldComplete) {
destination.complete();
}
};
ZipSubscriber.prototype._tryProject = function(args) {
var result;
try {
result = this.project.apply(this, args);
} catch (err) {
this.destination.error(err);
return;
}
this.destination.next(result);
};
return ZipSubscriber;
}(Subscriber_1.Subscriber));
exports.ZipSubscriber = ZipSubscriber;
var StaticIterator = (function() {
function StaticIterator(iterator) {
this.iterator = iterator;
this.nextResult = iterator.next();
}
StaticIterator.prototype.hasValue = function() {
return true;
};
StaticIterator.prototype.next = function() {
var result = this.nextResult;
this.nextResult = this.iterator.next();
return result;
};
StaticIterator.prototype.hasCompleted = function() {
var nextResult = this.nextResult;
return nextResult && nextResult.done;
};
return StaticIterator;
}());
var StaticArrayIterator = (function() {
function StaticArrayIterator(array) {
this.array = array;
this.index = 0;
this.length = 0;
this.length = array.length;
}
StaticArrayIterator.prototype[iterator_1.$$iterator] = function() {
return this;
};
StaticArrayIterator.prototype.next = function(value) {
var i = this.index++;
var array = this.array;
return i < this.length ? {
value: array[i],
done: false
} : {
value: null,
done: true
};
};
StaticArrayIterator.prototype.hasValue = function() {
return this.array.length > this.index;
};
StaticArrayIterator.prototype.hasCompleted = function() {
return this.array.length === this.index;
};
return StaticArrayIterator;
}());
var ZipBufferIterator = (function(_super) {
__extends(ZipBufferIterator, _super);
function ZipBufferIterator(destination, parent, observable) {
_super.call(this, destination);
this.parent = parent;
this.observable = observable;
this.stillUnsubscribed = true;
this.buffer = [];
this.isComplete = false;
}
ZipBufferIterator.prototype[iterator_1.$$iterator] = function() {
return this;
};<|fim▁hole|> ZipBufferIterator.prototype.next = function() {
var buffer = this.buffer;
if (buffer.length === 0 && this.isComplete) {
return {
value: null,
done: true
};
} else {
return {
value: buffer.shift(),
done: false
};
}
};
ZipBufferIterator.prototype.hasValue = function() {
return this.buffer.length > 0;
};
ZipBufferIterator.prototype.hasCompleted = function() {
return this.buffer.length === 0 && this.isComplete;
};
ZipBufferIterator.prototype.notifyComplete = function() {
if (this.buffer.length > 0) {
this.isComplete = true;
this.parent.notifyInactive();
} else {
this.destination.complete();
}
};
ZipBufferIterator.prototype.notifyNext = function(outerValue, innerValue, outerIndex, innerIndex, innerSub) {
this.buffer.push(innerValue);
this.parent.checkIterators();
};
ZipBufferIterator.prototype.subscribe = function(value, index) {
return subscribeToResult_1.subscribeToResult(this, this.observable, this, index);
};
return ZipBufferIterator;
}(OuterSubscriber_1.OuterSubscriber));<|fim▁end|> | |
<|file_name|>User.js<|end_file_name|><|fim▁begin|>"use strict";
var EventEmitter = require('events').EventEmitter;
var util = require( './util' );
/**
* Single user on the server.
*/
var User = function(data, client) {
this.client = client;
this._applyProperties(data);
};
User.prototype = Object.create(EventEmitter.prototype);
/**
* @summary Moves the user to a channel
*
* @param {Channel|String} channel - Channel name or a channel object
*/
User.prototype.moveToChannel = function(channel) {
var id;
if(typeof channel === "string") {
id = this.client.channelByName(channel).id;
}
else if(typeof channel === "object") {
id = channel.id;
}
else {
return;
}
this.client.connection.sendMessage( 'UserState', { session: this.session, actor: this.client.user.session, channel_id: id });
};
/**
* @summary Change a user's self deafened state. (Obviously) only works on yourself.
*
* @param {Boolean} isSelfDeaf - The new self deafened state
*/
User.prototype.setSelfDeaf = function(isSelfDeaf){
this.client.connection.sendMessage( 'UserState', { session: this.session, actor: this.client.user.session, self_deaf: isSelfDeaf });
};
/**
* @summary Change a user's self muted state. (Obviously) only works on yourself.
*
* @param {Boolean} isSelfMute - The new self muted state
*/
User.prototype.setSelfMute = function(isSelfMute){
this.client.connection.sendMessage( 'UserState', { session: this.session, actor: this.client.user.session, self_mute: isSelfMute });
};
/**
* @summary Attempts to kick the user.
*
* @param {String} [reason] - The reason to kick the user for.
*/
User.prototype.kick = function(reason) {
this._sendRemoveUser( reason || "You have been kicked", false );
};
/**
* @summary Attempts to ban the user.
*
* @param {String} [reason] - The reason to ban the user for.
*/
User.prototype.ban = function(reason) {
this._sendRemoveUser( reason || "You have been banned", true );
};
/**
* @summary Sends a message to the user.
*
* @param {String} message - The message to send.
*/
User.prototype.sendMessage = function(message) {
this.client.sendMessage( message, { session: [ this.session ] } );
};
/**
* @summary Returns an output stream for listening to the user audio.
*
* @param {Boolean} [noEmptyFrames]
* True to cut the output stream during silence. If the output stream
* isn't cut it will keep emitting zero-values when the user isn't
* talking.
*
* @returns {MumbleOutputStream} Output stream.
*/
User.prototype.outputStream = function(noEmptyFrames) {
return this.client.connection.outputStream(this.session, noEmptyFrames);
};
/**
* @summary Returns an input stream for sending audio to the user.<|fim▁hole|> *
* @returns {MumbleInputStream} Input stream.
*/
User.prototype.inputStream = function() {
return this.client.inputStreamForUser( this.session );
};
/**
* @summary Checks whether the user can talk or not.
*
* @returns {Boolean} True if the user can talk.
*/
User.prototype.canTalk = function() {
return !this.mute && !this.selfMute && !this.suppress;
};
/**
* @summary Checks whether the user can hear other people.
*
* @returns {Boolean} True if the user can hear.
*/
User.prototype.canHear = function() {
return !this.selfDeaf;
};
User.prototype._applyProperties = function(data) {
/**
* @summary Session ID
*
* @description
* Session ID is present for all users. The ID specifies the current user
* session and will change when the user reconnects.
*
* @see User#id
*
* @name User#session
* @type Number
*/
this.session = data.session;
/**
* @summary User name
*
* @name User#name
* @type String
*/
this.name = data.name;
/**
* @summary User ID
*
* @description
* User ID is specified only for users who are registered on the server.
* The user ID won't change when the user reconnects.
*
* @see User#session
*
* @name User#id
* @type Number
*/
this.id = data.user_id;
/**
* @summary _true_ when the user is muted by an admin.
*
* @name User#mute
* @type Boolean
*/
this.mute = data.mute;
/**
* @summary _true_ when the user is deafened by an admin.
*
* @name User#deaf
* @type Boolean
*/
this.deaf = data.deaf;
/**
* @summary _true_ when the user is suppressed due to lack of
* permissions.
*
* @description
* The user will be suppressed by the server if they don't have permissions
* to speak on the current channel.
*
* @name User#suppress
* @type Boolean
*/
this.suppress = data.suppress;
/**
* @summary _true_ when the user has muted themselves.
*
* @name User#selfMute
* @type Boolean
*/
this.selfMute = data.self_mute;
/**
* @summary _true_ when the user has deafened themselves.
*
* @name User#selfDeaf
* @type Boolean
*/
this.selfDeaf = data.self_deaf;
/**
* @summary The hash of the user certificate
*
* @name User#hash
* @type String
*/
this.hash = data.hash;
/**
* @summary _true_ when the user is recording the conversation.
*
* @name User#recording
* @type Boolean
*/
this.recording = data.recording;
/**
* @summary _true_ when the user is a priority speaker.
*
* @name User#prioritySpeaker
* @type Boolean
*/
this.prioritySpeaker = data.priority_speaker;
/**
* @summary User's current channel.
*
* @name User#channel
* @type Channel
*/
if(data.channel_id !== null) {
this.channel = this.client.channelById(data.channel_id);
}
else { // New users always enter root
this.channel = this.client.rootChannel;
}
this.channel._addUser(this);
//TODO: Comments, textures
};
/**
* @summary Emitted when the user disconnects
*
* @description
* Also available through the client `user-disconnect` event.
*
* @event User#disconnect
*/
User.prototype._detach = function() {
this.emit('disconnect');
this.channel._removeUser(this);
};
/**
* @summary Emitted when the user moves between channels.
*
* @event User#move
* @param {Channel} oldChannel - The channel where the user was moved from.
* @param {Channel} newChannel - The channel where the user was moved to.
* @param {User} actor - The user who moved the channel or undefined for server.
*/
User.prototype._checkChangeChannel = function( data ) {
// Get the two channel instances.
var newChannel = this.client.channelById( data.channel_id );
var oldChannel = this.channel;
// Make sure there is a change in the channel.
if( newChannel === oldChannel )
return;
// Make the channel change and notify listeners.
this.channel = newChannel;
oldChannel._removeUser( this );
newChannel._addUser( this );
var actor = this.client.userBySession( data.actor );
this.emit( 'move', oldChannel, newChannel, actor );
};
/**
* @summary Emitted when the user is muted or unmuted by the server.
*
* @description
* Also available through the client `user-mute` event.
*
* @event User#mute
* @param {Boolean} status
* True when the user is muted, false when unmuted.
*/
/**
* @summary Emitted when the user mutes or unmutes themselves.
*
* @description
* Also available through the client `user-self-mute` event.
*
* @event User#self-mute
* @param {Boolean} status
* True when the user mutes themselves. False when unmuting.
*/
/**
* @summary Emitted when the user deafens or undeafens themselves.
*
* @description
* Also available through the client `user-self-deaf` event.
*
* @event User#self-deaf
* @param {Boolean} status
* True when the user deafens themselves. False when undeafening.
*/
/**
* @summary Emitted when the user is suppressed or unsuppressed.
*
* @description
* Also available through the client `user-suppress` event.
*
* @event User#suppress
* @param {Boolean} status
* True when the user is suppressed. False when unsuppressed.
*/
/**
* @summary Emitted when the user starts or stops recording.
*
* @description
* Also available through the client `user-recording` event.
*
* @event User#recording
* @param {Boolean} status
* True when the user starts recording. False when they stop.
*/
/**
* @summary Emitted when the user gains or loses priority speaker status.
*
* @description
* Also available through the client `user-priority-speaker` event.
*
* @event User#priority-speaker
* @param {Boolean} status
* True when the user gains priority speaker status. False when they lose
* it.
*/
User.prototype.update = function(data) {
var self = this;
// Check the simple fields.
[
'mute', 'selfMute', 'suppress',
'selfDeaf',
'recording', 'prioritySpeaker',
].forEach( function(f) {
self._checkField( data, f );
});
// Channel check
if( data.channel_id !== null ) {
this._checkChangeChannel( data );
}
};
User.prototype._sendRemoveUser = function( reason, ban ) {
this.client.connection.sendMessage( 'UserRemove', {
session: this.session,
actor: this.client.user.session,
reason: reason,
ban: ban
} );
};
User.prototype._checkField = function( data, field ) {
// Make sure the field has a value.
var newValue = data[ util.toFieldName( field ) ];
if( newValue === undefined )
return;
// Make sure the new value differs.
var oldValue = this[ field ];
if( newValue === oldValue )
return;
// All checks succeeded. Store the new value and emit change event.
this[ field ] = newValue;
var actor = this.client.userBySession( data.actor );
this.emit( util.toEventName( field ), newValue, actor );
};
module.exports = User;<|fim▁end|> | |
<|file_name|>SortMe.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
template<typename T> T gcd(T a, T b) {
if(!b) return a;
return gcd(b, a % b);
}
template<typename T> T lcm(T a, T b) {
return a * b / gcd(a, b);
}
template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; }
template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }
int in() { int x; scanf("%d", &x); return x; }
using namespace std;
typedef long long Int;
typedef unsigned uint;
const int MAXN = 35;
int N;
string S, P[MAXN];
bool cmp(string a, string b) {
for (int i = 0; i < min(a.size(), b.size()); i++) {
int p = S.find(a[i]);
int q = S.find(b[i]);
if (p != q) return p < q;
}
return a.size() < b.size();
}
int main(void) {
int t = 1;
for ( ; scanf("%d", &N) == 1 && N != 0; ) {
cin >> S;<|fim▁hole|>
for (int i = 0; i < N; i++) {
cin >> P[i];
}
sort(P, P + N, cmp);
cout << "year " << t++ << "\n";
for (int i = 0; i < N; i++) {
cout << P[i] << "\n";
}
}
return 0;
}<|fim▁end|> | |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>"""Routines for bubble format validation"""
import os
import itertools as it
from collections import Counter
from bubbletools.bbltree import BubbleTree
from bubbletools import utils
def validate(bbllines:iter, *, profiling=False):
"""Yield lines of warnings and errors about input bbl lines.
profiling -- yield also info lines about input bbl file.
If bbllines is a valid file name, it will be read.
Else, it should be an iterable of bubble file lines.
"""
if isinstance(bbllines, str):
if os.path.exists(bbllines): # filename containing bubble
bbllines = utils.file_lines(bbllines)
elif '\n' not in bbllines or '\t' not in bbllines:
# probably a bad file name: let's rise the proper error
bbllines = utils.file_lines(bbllines)
else: # bubble itself
bbllines = bbllines.split('\n')
bubble = tuple(bbllines)
data = tuple(utils.line_data(line) for line in bubble)
types = tuple(utils.line_type(line) for line in bubble)
# launch profiling
if profiling:
ltype_counts = Counter(types)
for ltype, count in ltype_counts.items():
yield 'INFO {} lines of type {}'.format(count, ltype)
yield 'INFO {} lines of payload'.format(
ltype_counts['EDGE'] + ltype_counts['IN'] +
ltype_counts['NODE'] + ltype_counts['SET'])
# launch validation
for errline in (l for l, t in zip(bubble, types) if t == 'ERROR'):
yield 'ERROR line is not bubble: "{}"'.format(errline)
tree = BubbleTree.from_bubble_data(data)
cc, subroots = tree.connected_components()
# print('cc:', cc)
# print('subroots:', subroots)
if profiling:
yield 'INFO {} top (power)nodes'.format(len(tree.roots))
yield 'INFO {} connected components'.format(len(cc))
yield 'INFO {} nodes are defined, {} are used'.format(
ltype_counts['NODE'], len(tuple(tree.nodes())))
yield 'INFO {} powernodes are defined, {} are used'.format(
ltype_counts['SET'], len(tuple(tree.powernodes())))
yield from inclusions_validation(tree)
yield from mergeability_validation(tree)
def inclusions_validation(tree:BubbleTree) -> iter:
"""Yield message about inclusions inconsistancies"""
# search for powernode overlapping
for one, two in it.combinations(tree.inclusions, 2):
assert len(one) == len(one.strip())
assert len(two) == len(two.strip())
one_inc = set(included(one, tree.inclusions))
two_inc = set(included(two, tree.inclusions))
common_inc = one_inc & two_inc
if len(common_inc) == one_inc:
if not two in one_inc:
yield ("ERROR inconsistency in inclusions: {} is both"
" included and not included in {}.".format(two, one))
if len(common_inc) == two_inc:
if not one in two_inc:
yield ("ERROR inconsistency in inclusions: {} is both"
" included and not included in {}.".format(one, two))
if len(common_inc) > 0: # one and two are not disjoint
if len(common_inc) == len(one_inc) or len(common_inc) == len(two_inc):
# one is included in the other
pass
else: # problem: some nodes are shared, but not all
yield ("ERROR overlapping powernodes:"
" {} nodes are shared by {} and {},"
" which are not in inclusion."
" Shared nodes are {}".format(
len(common_inc), one, two, common_inc))
for pwn in tree.powernodes():
# search for empty powernodes
if len(tree.inclusions[pwn]) == 0:
yield ("WARNING empty powernode: {} is defined,"
" but contains nothing".format(pwn))
# search for singleton powernodes
if len(tree.inclusions[pwn]) == 1:
yield ("WARNING singleton powernode: {} is defined,"
" but contains only {}".format(pwn, tree.inclusions[pwn]))
# search for cycles
nodes_in_cycles = utils.have_cycle(tree.inclusions)
if nodes_in_cycles:
yield ("ERROR inclusion cycle: the following {}"
" nodes are involved: {}".format(
len(nodes_in_cycles), set(nodes_in_cycles)))
def included(powernode:str, inclusions:dict, nodes_only=False) -> iter:
"""Yield (power)nodes below given powernode (contained by it,
or contained by a powernode contained by it, etc).
>>> sorted(included('p1', {'p1': ('p2', 1), 'p2': (3,), 1: (), 3: ()}), key=str)
[1, 3, 'p2']
>>> sorted(included('p1', {'p1': ('p2', 1), 'p2': (3,), 1: (), 3: ()}, nodes_only=True), key=str)<|fim▁hole|> """
if nodes_only:
condition = lambda e: e != powernode and inclusions[e] == ()
else:
condition = lambda e: e != powernode
yield from (elem for elem in utils.walk(powernode, (inclusions,))
if condition(elem))
def mergeability_validation(tree:BubbleTree) -> iter:
"""Yield message about mergables powernodes"""
def gen_warnings(one, two, inc_message:str) -> [str]:
"Yield the warning for given (power)nodes if necessary"
nodetype = ''
if tree.inclusions[one] and tree.inclusions[two]:
nodetype = 'power'
elif tree.inclusions[one] or tree.inclusions[two]:
nodetype = '(power)'
if one > two: one, two = two, one
shared = set(tree.edges.get(one, ())) & set(tree.edges.get(two, ()))
if shared:
yield (f"WARNING mergeable {nodetype}nodes: {one} and {two}"
f" are {inc_message}, and share"
f" {len(shared)} neigbor{'s' if len(shared) > 1 else ''}")
for one, two in it.combinations(tree.roots, 2):
yield from gen_warnings(one, two, inc_message='both roots')
for parent, childs in tree.inclusions.items():
for one, two in it.combinations(childs, 2):
yield from gen_warnings(one, two, inc_message=f'in the same level (under {parent})')<|fim▁end|> | [1, 3]
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub struct RailFence;
impl RailFence {
pub fn new(rails: u32) -> RailFence {<|fim▁hole|> }
pub fn encode(&self, text: &str) -> String {
unimplemented!("Encode this text: {}", text)
}
pub fn decode(&self, cipher: &str) -> String {
unimplemented!("Decode this ciphertext: {}", cipher)
}
}<|fim▁end|> | unimplemented!("Construct a new fence with {} rails", rails) |
<|file_name|>ex4_9.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"os"
"fmt"
)
func main() {
counts := make(map[string]int)
<|fim▁hole|> }
defer fileReader.Close()
scanner := bufio.NewScanner(fileReader)
// Set the split function for the scanning operation.
scanner.Split(bufio.ScanWords)
for scanner.Scan() {
word := scanner.Text()
counts[word]++
}
if err := scanner.Err(); err != nil {
fmt.Fprintf(os.Stderr, "wordfreq: %v\n", err)
os.Exit(1)
}
fmt.Printf("word\t freq\n")
for c, n := range counts {
fmt.Printf("%q\t %d\n", c, n)
}
}<|fim▁end|> | fileReader, err := os.Open("words.txt")
if err != nil {
fmt.Println(err)
os.Exit(1) |
<|file_name|>phases.py<|end_file_name|><|fim▁begin|>from base import Phase
<|fim▁hole|>preparation = Phase('Preparation', 'Initializing connections, fetching data etc.')
volume_creation = Phase('Volume creation', 'Creating the volume to bootstrap onto')
volume_preparation = Phase('Volume preparation', 'Formatting the bootstrap volume')
volume_mounting = Phase('Volume mounting', 'Mounting bootstrap volume')
os_installation = Phase('OS installation', 'Installing the operating system')
package_installation = Phase('Package installation', 'Installing software')
system_modification = Phase('System modification', 'Modifying configuration files, adding resources, etc.')
system_cleaning = Phase('System cleaning', 'Removing sensitive data, temporary files and other leftovers')
volume_unmounting = Phase('Volume unmounting', 'Unmounting the bootstrap volume')
image_registration = Phase('Image registration', 'Uploading/Registering with the provider')
cleaning = Phase('Cleaning', 'Removing temporary files')
order = [preparation,
volume_creation,
volume_preparation,
volume_mounting,
os_installation,
package_installation,
system_modification,
system_cleaning,
volume_unmounting,
image_registration,
cleaning,
]<|fim▁end|> | |
<|file_name|>TaskCondition.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
interface TaskCondition{
onAccept(task: TaskConditionContext);
}
class NPCTalkTaskCondition implements TaskCondition{
onAccept(task:TaskConditionContext){
task.onFinish();
}
}
class KillMonsterTaskCondition implements TaskCondition{
private onAcceptButton:boolean=true;
onAccept(task:TaskConditionContext){
if(!this.onAcceptButton){
task.setCurrent();
}
this.onAcceptButton=false;
}
}<|fim▁end|> | interface TaskConditionContext{
setCurrent():void;
onFinish():void;
} |
<|file_name|>bin.py<|end_file_name|><|fim▁begin|>#PROJECT
from outcome import Outcome
from odds import Odds
class Bin:
def __init__(
self,
*outcomes
):
self.outcomes = set([outcome for outcome in outcomes])
def add_outcome(
self,
outcome
):
self.outcomes.add(outcome)
def __str__(self):
return ', '.join([str(outcome) for outcome in self.outcomes])
class BinBuilder:
def __init__(
self,
wheel
):
self.wheel = wheel
def build_bins(self):
self.straight_bets()
self.split_bets()
self.street_bets()
self.corner_bets()
self.five_bet()<|fim▁hole|>
def straight_bets(self):
outcomes = [
Outcome(str(i), Odds.STRAIGHT_BET)
for i in range(37)
] + [Outcome('00', Odds.STRAIGHT_BET)]
for i, outcome in enumerate(outcomes):
self.wheel.add_outcome(i, outcome)
def split_bets(self):
for row in range(12):
for direction in [1, 2]:
n = 3 * row + direction
bins = [n, n + 1]
outcome = Outcome(
'split {}'.format('-'.join([str(i) for i in bins])),
Odds.SPLIT_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
for n in range(1, 34):
bins = [n, n + 3]
outcome = Outcome(
'split {}'.format('-'.join([str(i) for i in bins])),
Odds.SPLIT_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def street_bets(self):
for row in range(12):
n = 3 * row + 1
bins = [n, n + 1, n + 2]
outcome = Outcome(
'street {}-{}'.format(bins[0], bins[-1]),
Odds.STREET_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def corner_bets(self):
for col in [1, 2]:
for row in range(11):
n = 3 * row + col
bins = [n + i for i in [0, 1, 3, 4]]
outcome = Outcome(
'corner {}'.format('-'.join([str(i) for i in bins])),
Odds.CORNER_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def five_bet(self):
outcome = Outcome(
'five bet 00-0-1-2-3',
Odds.FIVE_BET
)
for bin in [0, 1, 2, 3, 37]:
self.wheel.add_outcome(bin, outcome)
def line_bets(self):
for row in range(11):
n = 3 * row + 1
bins = [n + i for i in range(6)]
outcome = Outcome(
'line {}-{}'.format(bins[0], bins[-1]),
Odds.LINE_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def dozen_bets(self):
#https://pypi.python.org/pypi/inflect/0.2.4
dozen_map = {
1: '1st',
2: '2nd',
3: '3rd'
}
for d in range(3):
outcome = Outcome(
'{} 12'.format(dozen_map[d + 1]),
Odds.DOZEN_BET
)
for m in range(12):
self.wheel.add_outcome(12 * d + m + 1, outcome)
def column_bets(self):
for c in range(3):
outcome = Outcome(
'column {}'.format(c + 1),
Odds.COLUMN_BET
)
for r in range(12):
self.wheel.add_outcome(3 * r + c + 1, outcome)
def even_money_bets(self):
for bin in range(1, 37):
if 1 <= bin < 19:
name = '1 to 18' #low
else:
name = '19 to 36' #high
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)
if bin % 2:
name = 'odd'
else:
name = 'even'
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)
if bin in (
[1, 3, 5, 7, 9] +
[12, 14, 16, 18] +
[19, 21, 23, 25, 27] +
[30, 32, 34, 36]
):
name = 'red'
else:
name = 'black'
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)<|fim▁end|> | self.line_bets()
self.dozen_bets()
self.column_bets()
self.even_money_bets() |
<|file_name|>NCBITaxResult.java<|end_file_name|><|fim▁begin|>package uk.ac.ebi.ddi.extservices.entrez.ncbiresult;
<|fim▁hole|> * @author Yasset Perez-Riverol ([email protected])
* @date 18/05/2015
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class NCBITaxResult {
@JsonProperty("header")
NCBIHeader header;
@JsonProperty("esearchresult")
NCBIEResult result;
public NCBIHeader getHeader() {
return header;
}
public void setHeader(NCBIHeader header) {
this.header = header;
}
public NCBIEResult getResult() {
return result;
}
public void setResult(NCBIEResult result) {
this.result = result;
}
public String[] getNCBITaxonomy() {
if (getResult() != null && getResult().getIdList() != null && getResult().getIdList().length == 1) {
return getResult().getIdList();
}
return null;
}
}<|fim▁end|> | import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/** |
<|file_name|>StructType.ts<|end_file_name|><|fim▁begin|>import { Type } from '~/syntax/types/Type';
import INodeVisitor from '~/syntax/INodeVisitor';
import { nonTerminal, parser, exp, ParseResult } from '~/parser/Parser';
import { TokenType, Token } from '~/parser/Tokenizer';
const Field = {
typeNode: exp(Type, { definite: true }),
name: exp(TokenType.IDENT, { err: 'INVALID_FIELD_NAME' }),
};
@nonTerminal({ implements: Type })
export class StructType extends Type {
@parser(TokenType.LBRACE, { definite: true })
setOpenBrace(token: Token) {
this.registerLocation('openBrace', token.getLocation());
}
@parser(Field, { repeat: '*' })
setFields(fields: ParseResult[]) {
for (const field of fields) {
const name = field.name as Token;<|fim▁hole|>
@parser(TokenType.RBRACE, { err: 'INVALID_STRUCT_NO_CLOSE_BRACE' })
setCloseBrace(token: Token) {
this.createAndRegisterLocation('self', this.locations.openBrace, token.getLocation());
}
fields: { type: Type, name: string }[] = [];
visit<T>(visitor: INodeVisitor<T>) {
return visitor.visitStructType(this);
}
}<|fim▁end|> | this.fields.push({ type: field.typeNode as Type, name: name.image });
this.registerLocation(`field_${name}`, name.getLocation());
}
} |
<|file_name|>HTMLShadowElement.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 The Polymer Authors. All rights reserved.
* Use of this source code is goverened by a BSD-style
* license that can be found in the LICENSE file.
*/
suite('HTMLShadowElement', function() {
<|fim▁hole|> host.innerHTML = '<a>a</a><b>b</b>';
var a = host.firstChild;
var b = host.lastChild;
var sr = host.createShadowRoot();
sr.innerHTML = 'a<shadow>b</shadow>c';
var shadow = sr.firstElementChild;
host.offsetWidth;
assert.isTrue(shadow instanceof HTMLShadowElement);
assert.isNull(shadow.olderShadowRoot);
var sr2 = host.createShadowRoot();
sr2.innerHTML = 'd<shadow>e</shadow>f';
var shadow2 = sr2.firstElementChild;
host.offsetWidth;
assert.isTrue(shadow instanceof HTMLShadowElement);
assert.isNull(shadow.olderShadowRoot);
assert.isTrue(shadow2 instanceof HTMLShadowElement);
assert.equal(shadow2.olderShadowRoot, sr);
assert.equal(unwrap(host).innerHTML, 'dabcf');
});
});<|fim▁end|> | var unwrap = ShadowDOMPolyfill.unwrap;
test('olderShadowRoot', function() {
var host = document.createElement('div'); |
<|file_name|>i18n.js<|end_file_name|><|fim▁begin|>define([], function() {
return function($translateProvider) {
$translateProvider.translations('en', {
WELCOME_TO_PIO: 'Welcome to PIO',
SIGN_IN: 'Sign in',
SIGN_UP: 'Sign up',
SIGN_OUT: 'Sign out',
FORGOT_PASSWORD: 'Forgot password?',
DO_NOT_HAVE_AN_ACCOUNT: 'Do not have an account?',
CREATE_AN_ACCOUNT: 'Create an account',
POLLS: 'Polls',
ADMINISTRATION: 'Administration',
TITLE: 'Title',
USERS: 'Users',
CREATE: 'Create',
DASHBOARD: 'Dashboard',
DETAILS: 'Details',
DETAIL: 'Detail',
CREATE_POLL: 'Create poll',
CREATE_USER: 'Create user',
POLL_DETAILS: 'Poll details',
USER_DETAILS: 'User details',
TAGS: 'Tags',
SUMMARY: 'Summary',
STATUS: 'Status',
NAME: 'Name',
AVATAR: 'Avatar',
POLLINGS: 'Pollings',
NOTES: 'Notes',
EMAIL: 'Email',
DATE: 'Date',
SAVE: 'Save',
CANCEL: 'Cancel'
});
$translateProvider.translations('es', {
WELCOME_TO_PIO: 'Bienvenido a PIO',
SIGN_IN: 'Acceder',
SIGN_UP: 'Registro',
SIGN_OUT: 'Salir',
FORGOT_PASSWORD: '¿Olvidó la contraseña?',
DO_NOT_HAVE_AN_ACCOUNT: '¿No tiene una cuenta?',<|fim▁hole|> CREATE_AN_ACCOUNT: 'Crear un acuenta',
POLLS: 'Encuestas',
ADMINISTRATION: 'Administración',
TITLE: 'Título',
USERS: 'Usuarios',
CREATE: 'Crear',
DASHBOARD: 'Panel',
DETAILS: 'Detalles',
DETAIL: 'Detalle',
CREATE_POLL: 'Crear encuesta',
CREATE_USER: 'Crear usuario',
POLL_DETAILS: 'Detalles de encuesta',
USER_DETAILS: 'Detalles de usuario',
TAGS: 'Etiquetas',
SUMMARY: 'Resumen',
STATUS: 'Estado',
NAME: 'Nombre',
AVATAR: 'Avatar',
POLLINGS: 'Votaciones',
NOTES: 'Notas',
EMAIL: 'Correo',
DATE: 'Fecha',
SAVE: 'Guardar',
CANCEL: 'Cancelar'
});
$translateProvider.useSanitizeValueStrategy('escapeParameters');
$translateProvider.preferredLanguage('es');
};
});<|fim▁end|> | |
<|file_name|>RelayDefaultNetworkLayer.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule RelayDefaultNetworkLayer
* @typechecks
* @flow
*/
'use strict';
var Promise = require('Promise');
import type RelayMutationRequest from 'RelayMutationRequest';
import type RelayQueryRequest from 'RelayQueryRequest';
var fetchWithRetries = require('fetchWithRetries');
import type {InitWithRetries} from 'fetchWithRetries';
type GraphQLError = {
message: string;
locations: Array<GraphQLErrorLocation>;
};
type GraphQLErrorLocation = {
column: number;
line: number;
};
class RelayDefaultNetworkLayer {
_uri: string;
_init: $FlowIssue; // InitWithRetries
constructor(uri: string, init?: ?InitWithRetries) {
this._uri = uri;
this._init = {...init};
// Bind instance methods to facilitate reuse when creating custom network
// layers.
var self: any = this;
self.sendMutation = this.sendMutation.bind(this);
self.sendQueries = this.sendQueries.bind(this);
self.supports = this.supports.bind(this);
}
sendMutation(request: RelayMutationRequest): Promise {
return this._sendMutation(request).then(
result => result.json()<|fim▁hole|> var error = new Error(
'Server request for mutation `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else {
request.resolve({response: payload.data});
}
}).catch(
error => request.reject(error)
);
}
sendQueries(requests: Array<RelayQueryRequest>): Promise {
return Promise.all(requests.map(request => (
this._sendQuery(request).then(
result => result.json()
).then(payload => {
if (payload.hasOwnProperty('errors')) {
var error = new Error(
'Server request for query `' + request.getDebugName() + '` ' +
'failed for the following reasons:\n\n' +
formatRequestErrors(request, payload.errors)
);
(error: any).source = payload;
request.reject(error);
} else if (!payload.hasOwnProperty('data')) {
request.reject(new Error(
'Server response was missing for query `' + request.getDebugName() +
'`.'
));
} else {
request.resolve({response: payload.data});
}
}).catch(
error => request.reject(error)
)
)));
}
supports(...options: Array<string>): boolean {
// Does not support the only defined option, "defer".
return false;
}
/**
* Sends a POST request with optional files.
*/
_sendMutation(request: RelayMutationRequest): Promise {
var init;
var files = request.getFiles();
if (files) {
if (!global.FormData) {
throw new Error('Uploading files without `FormData` not supported.');
}
var formData = new FormData();
formData.append('query', request.getQueryString());
formData.append('variables', JSON.stringify(request.getVariables()));
for (var filename in files) {
if (files.hasOwnProperty(filename)) {
formData.append(filename, files[filename]);
}
}
init = {
...this._init,
body: formData,
method: 'POST',
};
} else {
init = {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
};
}
return fetch(this._uri, init).then(throwOnServerError);
}
/**
* Sends a POST request and retries if the request fails or times out.
*/
_sendQuery(request: RelayQueryRequest): Promise {
return fetchWithRetries(this._uri, {
...this._init,
body: JSON.stringify({
query: request.getQueryString(),
variables: request.getVariables(),
}),
headers: {
...this._init.headers,
'Content-Type': 'application/json',
},
method: 'POST',
});
}
}
/**
* Rejects HTTP responses with a status code that is not >= 200 and < 300.
* This is done to follow the internal behavior of `fetchWithRetries`.
*/
function throwOnServerError(response: any): any {
if (response.status >= 200 && response.status < 300) {
return response;
} else {
throw response;
}
}
/**
* Formats an error response from GraphQL server request.
*/
function formatRequestErrors(
request: RelayMutationRequest | RelayQueryRequest,
errors: Array<GraphQLError>
): string {
var CONTEXT_BEFORE = 20;
var CONTEXT_LENGTH = 60;
var queryLines = request.getQueryString().split('\n');
return errors.map(({locations, message}, ii) => {
var prefix = (ii + 1) + '. ';
var indent = ' '.repeat(prefix.length);
//custom errors thrown in graphql-server may not have locations
var locationMessage = locations ?
('\n' + locations.map(({column, line}) => {
var queryLine = queryLines[line - 1];
var offset = Math.min(column - 1, CONTEXT_BEFORE);
return [
queryLine.substr(column - 1 - offset, CONTEXT_LENGTH),
' '.repeat(offset) + '^^^'
].map(messageLine => indent + messageLine).join('\n');
}).join('\n')) :
'';
return prefix + message + locationMessage;
}).join('\n');
}
module.exports = RelayDefaultNetworkLayer;<|fim▁end|> | ).then(payload => {
if (payload.hasOwnProperty('errors')) { |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Asserts that device property values match properties in |expectedProperties|.
* The method will *not* assert that the device contains *only* properties
* specified in expected properties.
* @param {Object} expectedProperties Expected device properties.
* @param {Object} device Device object to test.
*/
function assertDeviceMatches(expectedProperties, device) {
Object.keys(expectedProperties).forEach(function(key) {
chrome.test.assertEq(expectedProperties[key], device[key],
'Property ' + key + ' of device ' + device.id);
});
}
/**
* Verifies that list of devices contains all and only devices from set of
* expected devices. If will fail the test if an unexpected device is found.
*
* @param {Object.<string, Object>} expectedDevices Expected set of test
* devices. Maps device ID to device properties.
* @param {Array.<Object>} devices List of input devices.
*/
function assertDevicesMatch(expectedDevices, devices) {
var deviceIds = {};
devices.forEach(function(device) {
chrome.test.assertFalse(!!deviceIds[device.id],
'Duplicated device id: \'' + device.id + '\'.');
deviceIds[device.id] = true;
});
function sortedKeys(obj) {
return Object.keys(obj).sort();
}
chrome.test.assertEq(sortedKeys(expectedDevices), sortedKeys(deviceIds));
devices.forEach(function(device) {
assertDeviceMatches(expectedDevices[device.id], device);
});
}
/**
*
* @param {Array.<Object>} devices List of devices returned by
* chrome.audio.getInfo or chrome.audio.getDevices.
* @return {Object.<string, Object>} List of devices formatted as map of
* expected devices used to assert devices match expectation.
*/
function deviceListToExpectedDevicesMap(devices) {
var expectedDevicesMap = {};
devices.forEach(function(device) {
expectedDevicesMap[device.id] = device;
});
return expectedDevicesMap;
}
/**
* @param {Array.<Object>} devices List of devices returned by
* chrome.audio.getInfo or chrome.audio.getDevices.
* @return {Array.<string>} Sorted list devices IDs for devices in |devices|.
*/
function getDeviceIds(devices) {
return devices.map(function(device) {return device.id;}).sort();
}
function EventListener(targetEvent) {
this.targetEvent = targetEvent;
this.listener = this.handleEvent.bind(this);
this.targetEvent.addListener(this.listener);
this.eventCount = 0;
}
EventListener.prototype.handleEvent = function() {
++this.eventCount;
}
EventListener.prototype.reset = function() {
this.targetEvent.removeListener(this.listener);
}
var deviceChangedListener = null;
chrome.test.runTests([
// Sets up a listener for audio.onDeviceChanged event -
// |verifyNoDeviceChangedEvents| test will later verify that no
// onDeviceChanged events have been observed.
function startDeviceChangedListener() {
deviceChangedListener = new EventListener(chrome.audio.onDeviceChanged);
chrome.test.succeed();
},
function getDevicesTest() {
// Test output devices. Maps device ID -> tested device properties.
var kTestDevices = {
'30001': {
id: '30001',
stableDeviceId: '0',
displayName: 'Jabra Speaker 1',
deviceName: 'Jabra Speaker',
streamType: 'OUTPUT'
},
'30002': {
id: '30002',
stableDeviceId: '1',
displayName: 'Jabra Speaker 2',
deviceName: 'Jabra Speaker',
streamType: 'OUTPUT'
},
'30003': {
id: '30003',
stableDeviceId: '2',
displayName: 'HDA Intel MID',
deviceName: 'HDMI output',
streamType: 'OUTPUT'
},
'40001': {
id: '40001',
stableDeviceId: '3',
displayName: 'Jabra Mic 1',
deviceName: 'Jabra Mic',
streamType: 'INPUT'
},
'40002': {
id: '40002',
stableDeviceId: '4',
displayName: 'Jabra Mic 2',
deviceName: 'Jabra Mic',
streamType: 'INPUT'
},
'40003': {
id: '40003',
stableDeviceId: '5',
displayName: 'Logitech Webcam',
deviceName: 'Webcam Mic',
streamType: 'INPUT'
}
};
chrome.audio.getDevices(chrome.test.callbackPass(function(devices) {
assertDevicesMatch(kTestDevices, devices);
}));
},
function getDevicesWithEmptyFilterTest() {
// Test output devices. Maps device ID -> tested device properties.
var kTestDevices = {
'30001': {
id: '30001',
stableDeviceId: '0',
displayName: 'Jabra Speaker 1',
deviceName: 'Jabra Speaker',
streamType: 'OUTPUT'
},
'30002': {
id: '30002',
stableDeviceId: '1',
displayName: 'Jabra Speaker 2',
deviceName: 'Jabra Speaker',
streamType: 'OUTPUT'
},
'30003': {
id: '30003',
stableDeviceId: '2',
displayName: 'HDA Intel MID',
deviceName: 'HDMI output',
streamType: 'OUTPUT'
},
'40001': {
id: '40001',
stableDeviceId: '3',
displayName: 'Jabra Mic 1',
deviceName: 'Jabra Mic',
streamType: 'INPUT'
},
'40002': {
id: '40002',
stableDeviceId: '4',
displayName: 'Jabra Mic 2',
deviceName: 'Jabra Mic',
streamType: 'INPUT'
},
'40003': {
id: '40003',
stableDeviceId: '5',
displayName: 'Logitech Webcam',
deviceName: 'Webcam Mic',
streamType: 'INPUT'
}
};
chrome.audio.getDevices({}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(kTestDevices, devices);
}));
},
function getInputDevicesTest() {
var kTestDevices = {
'40001': {
id: '40001',
streamType: 'INPUT'
},
'40002': {
id: '40002',
streamType: 'INPUT'
},
'40003': {
id: '40003',
streamType: 'INPUT'
}
};
chrome.audio.getDevices({
streamTypes: ['INPUT']
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(kTestDevices, devices);
}));
},
function getOutputDevicesTest() {
var kTestDevices = {
'30001': {
id: '30001',
streamType: 'OUTPUT'
},
'30002': {
id: '30002',
streamType: 'OUTPUT'
},
'30003': {
id: '30003',
streamType: 'OUTPUT'
},
};
chrome.audio.getDevices({
streamTypes: ['OUTPUT']
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(kTestDevices, devices);
}));
},
function getActiveDevicesTest() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var initialActiveDevices = initial.filter(function(device) {
return device.isActive;
});
chrome.test.assertTrue(initialActiveDevices.length > 0);
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(
deviceListToExpectedDevicesMap(initialActiveDevices),
devices);
}));
var initialActiveInputs = initialActiveDevices.filter(function(device) {
return device.streamType === 'INPUT';
});
chrome.test.assertTrue(initialActiveInputs.length > 0);
chrome.audio.getDevices({
isActive: true,
streamTypes: ['INPUT']
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(
deviceListToExpectedDevicesMap(initialActiveInputs),
devices);
}));
var initialActiveOutputs = initialActiveDevices.filter(function(device) {
return device.streamType === 'OUTPUT';
});
chrome.test.assertTrue(initialActiveOutputs.length > 0);
chrome.audio.getDevices({
isActive: true,
streamTypes: ['OUTPUT']
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(
deviceListToExpectedDevicesMap(initialActiveOutputs),
devices);
}));
}));
},
function getInactiveDevicesTest() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var initialInactiveDevices = initial.filter(function(device) {
return !device.isActive;
});
chrome.test.assertTrue(initialInactiveDevices.length > 0);
chrome.audio.getDevices({
isActive: false
}, chrome.test.callbackPass(function(devices) {
assertDevicesMatch(
deviceListToExpectedDevicesMap(initialInactiveDevices),
devices);
}));
}));
},
function setPropertiesTest() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var expectedDevices = deviceListToExpectedDevicesMap(initial);
// Update expected input devices with values that should be changed in
// test.
var updatedInput = expectedDevices['40002'];
chrome.test.assertFalse(updatedInput.gain === 65);
updatedInput.level = 65;
// Update expected output devices with values that should be changed in
// test.
var updatedOutput = expectedDevices['30001'];
chrome.test.assertFalse(updatedOutput.volume === 45);
updatedOutput.level = 45;
chrome.audio.setProperties('30001', {
level: 45
}, chrome.test.callbackPass(function() {
chrome.audio.setProperties('40002', {
level: 65
}, chrome.test.callbackPass(function() {
chrome.audio.getDevices(chrome.test.callbackPass(function(devices) {
assertDevicesMatch(expectedDevices, devices);
}));
}));
}));
}));
},
function inputMuteTest() {
var getMute = function(callback) {
chrome.audio.getMute('INPUT', chrome.test.callbackPass(callback));
};
getMute(function(originalValue) {
chrome.audio.setMute(
'INPUT', !originalValue, chrome.test.callbackPass(function() {
getMute(function(value) {
chrome.test.assertEq(!originalValue, value);
});
}));
});
},
function outputMuteTest() {
var getMute = function(callback) {
chrome.audio.getMute('OUTPUT', chrome.test.callbackPass(callback));
};
getMute(function(originalValue) {
chrome.audio.setMute(
'OUTPUT', !originalValue, chrome.test.callbackPass(function() {
getMute(function(value) {
chrome.test.assertEq(!originalValue, value);
});
}));
});
},
function setActiveDevicesTest() {
chrome.audio.setActiveDevices({
input: ['40002', '40003'],
output: ['30001']
}, chrome.test.callbackPass(function() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(activeDevices) {
chrome.test.assertEq(['30001', '40002', '40003'],
getDeviceIds(activeDevices));
}));
}));
},
function setActiveDevicesOutputOnlyTest() {
chrome.audio.getDevices({
streamTypes: ['INPUT'],
isActive: true
}, chrome.test.callbackPass(function(initial) {
var initialActiveInputs = getDeviceIds(initial);
chrome.test.assertTrue(initialActiveInputs.length > 0);
chrome.audio.setActiveDevices({
output: ['30003']
}, chrome.test.callbackPass(function() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(devices) {
var expected = ['30003'].concat(initialActiveInputs).sort();
chrome.test.assertEq(expected, getDeviceIds(devices));
}));
}));
}));
},
function setActiveDevicesFailInputTest() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(initial) {
var initialActiveIds = getDeviceIds(initial);
chrome.test.assertTrue(initialActiveIds.length > 0);
chrome.audio.setActiveDevices({
input: ['0000000'], /* does not exist */
output: []
}, chrome.test.callbackFail('Failed to set active devices.', function() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(devices) {
chrome.test.assertEq(initialActiveIds, getDeviceIds(devices));
}));
}));
}));
},
function setActiveDevicesFailOutputTest() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(initial) {
var initialActiveIds = getDeviceIds(initial);
chrome.test.assertTrue(initialActiveIds.length > 0);
chrome.audio.setActiveDevices({
input: [],
output: ['40001'] /* id is input node ID */
}, chrome.test.callbackFail('Failed to set active devices.', function() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(devices) {
chrome.test.assertEq(initialActiveIds, getDeviceIds(devices));
}));
}));
}));
},
function clearActiveDevicesTest() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(initial) {
chrome.test.assertTrue(getDeviceIds(initial).length > 0);
chrome.audio.setActiveDevices({
input: [],
output: []
}, chrome.test.callbackPass(function() {
chrome.audio.getDevices({
isActive: true
}, chrome.test.callbackPass(function(devices) {
chrome.test.assertEq([], devices);
}));
}));
}));
},
function verifyNoDeviceChangedEvents() {
chrome.test.assertTrue(!!deviceChangedListener);<|fim▁hole|> deviceChangedListener.reset();
deviceChangedListener = null;
chrome.test.succeed();
},
// Tests verifying the app doesn't have access to deprecated part of the API:
function deprecated_GetInfoTest() {
chrome.audio.getInfo(chrome.test.callbackFail(
'audio.getInfo is deprecated, use audio.getDevices instead.'));
},
function deprecated_setProperties_isMuted() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var expectedDevices = deviceListToExpectedDevicesMap(initial);
var expectedError =
'|isMuted| property is deprecated, use |audio.setMute|.';
chrome.audio.setProperties('30001', {
isMuted: true,
// Output device - should have volume set.
level: 55
}, chrome.test.callbackFail(expectedError, function() {
// Assert that device properties haven't changed.
chrome.audio.getDevices(chrome.test.callbackPass(function(devices) {
assertDevicesMatch(expectedDevices, devices);
}));
}));
}));
},
function deprecated_setProperties_volume() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var expectedDevices = deviceListToExpectedDevicesMap(initial);
var expectedError = '|volume| property is deprecated, use |level|.';
chrome.audio.setProperties('30001', {
volume: 2,
// Output device - should have volume set.
level: 55
}, chrome.test.callbackFail(expectedError, function() {
// Assert that device properties haven't changed.
chrome.audio.getDevices(chrome.test.callbackPass(function(devices) {
assertDevicesMatch(expectedDevices, devices);
}));
}));
}));
},
function deprecated_setProperties_gain() {
chrome.audio.getDevices(chrome.test.callbackPass(function(initial) {
var expectedDevices = deviceListToExpectedDevicesMap(initial);
var expectedError = '|gain| property is deprecated, use |level|.';
chrome.audio.setProperties('40001', {
gain: 2,
// Output device - should have volume set.
level: 55
}, chrome.test.callbackFail(expectedError, function() {
// Assert that device properties haven't changed.
chrome.audio.getDevices(chrome.test.callbackPass(function(devices) {
assertDevicesMatch(expectedDevices, devices);
}));
}));
}));
},
function deprecated_SetActiveDevicesTest() {
var kExpectedError =
'String list |ids| is deprecated, use DeviceIdLists type.';
chrome.audio.setActiveDevices([
'30003',
'40002'
], chrome.test.callbackFail(kExpectedError));
},
]);<|fim▁end|> | chrome.test.assertEq(0, deviceChangedListener.eventCount); |
<|file_name|>crate_a2.rs<|end_file_name|><|fim▁begin|>pub struct Foo;
pub trait Bar{}
pub fn bar() -> Box<Bar> {<|fim▁hole|>}<|fim▁end|> | unimplemented!() |
<|file_name|>JpaUtil.java<|end_file_name|><|fim▁begin|>package com.target.control;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
<|fim▁hole|> static {
try {
factory = Persistence.createEntityManagerFactory("jpa");
em = factory.createEntityManager();
} catch (RuntimeException e) {
e.printStackTrace();
}
}
public static EntityManager getFactory() {
return em;
}
}<|fim▁end|> | public class JpaUtil {
private static EntityManager em;
private static EntityManagerFactory factory;
|
<|file_name|>buffer-concat.js<|end_file_name|><|fim▁begin|>'use strict';
const common = require('../common.js');
const bench = common.createBenchmark(main, {
pieces: [1, 4, 16],
pieceSize: [1, 16, 256],
withTotalLength: [0, 1],
n: [1024]
});
function main(conf) {<|fim▁hole|> const pieces = +conf.pieces;
const list = new Array(pieces);
list.fill(Buffer.allocUnsafe(size));
const totalLength = conf.withTotalLength ? pieces * size : undefined;
bench.start();
for (var i = 0; i < n * 1024; i++) {
Buffer.concat(list, totalLength);
}
bench.end(n);
}<|fim▁end|> | const n = +conf.n;
const size = +conf.pieceSize; |
<|file_name|>AddIn.cpp<|end_file_name|><|fim▁begin|>//Copyright (c) Microsoft Corporation. All rights reserved.
// AddIn.cpp : Implementation of DLL Exports.
#include "stdafx.h"
#include "resource.h"
#include "AddIn.h"
CAddInModule _AtlModule;
// DLL Entry Point
extern "C" BOOL WINAPI DllMain(HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
_AtlModule.SetResourceInstance(hInstance);
return _AtlModule.DllMain(dwReason, lpReserved);
}
// Used to determine whether the DLL can be unloaded by OLE
STDAPI DllCanUnloadNow(void)
{
return _AtlModule.DllCanUnloadNow();
}
// Returns a class factory to create an object of the requested type
STDAPI DllGetClassObject(REFCLSID rclsid, REFIID riid, LPVOID* ppv)
{
return _AtlModule.DllGetClassObject(rclsid, riid, ppv);
}
void CreateRegistrationKey(const CString& version, const CString& modulePath, const CString& moduleShortName)
{
CString path = "Software\\Microsoft\\VisualStudio\\" + version;
CRegKey devKey;
if (devKey.Open(HKEY_LOCAL_MACHINE, path) == ERROR_SUCCESS)
{
// Auto create the addins key if it isn't already there.
if (devKey.Create(HKEY_LOCAL_MACHINE, path + "\\AddIns") == ERROR_SUCCESS)
{
// Create the WorkspaceWhiz.DSAddin.1 key.
if (devKey.Create(HKEY_LOCAL_MACHINE, path + "\\AddIns\\LuaPlusDebugger.Connect") == ERROR_SUCCESS)
{
// Remove all old entries.
devKey.SetStringValue("SatelliteDLLPath", modulePath);
devKey.SetStringValue("SatelliteDLLName", moduleShortName);
devKey.SetDWORDValue("LoadBehavior", 3);
devKey.SetStringValue("FriendlyName", "LuaPlus Debugger Window");
devKey.SetStringValue("Description", "The LuaPlus Debugger Window add-in provides support for viewing Lua tables while debugging.");
devKey.SetDWORDValue("CommandPreload", 1);
}
}
}
if (devKey.Open(HKEY_CURRENT_USER, path + "\\PreloadAddinState") == ERROR_SUCCESS)
{
devKey.SetDWORDValue("LuaPlusDebugger.Connect", 1);
}
}
void DestroyRegistrationKey(const CString& version)
{
CString path = "Software\\Microsoft\\VisualStudio\\" + version;
CRegKey key;
if (key.Open(HKEY_LOCAL_MACHINE, path + "\\AddIns") == ERROR_SUCCESS)
{
// Remove all old entries.
key.RecurseDeleteKey("LuaPlusDebugger.Connect");
}
}
// DllRegisterServer - Adds entries to the system registry
STDAPI DllRegisterServer(void)
{
// registers object, typelib and all interfaces in typelib
HRESULT hr = _AtlModule.DllRegisterServer();
// Get the module name.
TCHAR moduleName[_MAX_PATH];
moduleName[0] = 0;
::GetModuleFileName(_AtlModule.GetResourceInstance(), (TCHAR*)&moduleName, _MAX_PATH);
// Get the module path.
TCHAR modulePath[_MAX_PATH];
_tcscpy(modulePath, moduleName);
TCHAR* ptr = _tcsrchr(modulePath, '\\');
ptr++;
*ptr++ = 0;<|fim▁hole|> _tcscpy(moduleShortName, ptr + 1);
// Register the add-in?
CreateRegistrationKey("7.0", modulePath, moduleShortName);
CreateRegistrationKey("7.1", modulePath, moduleShortName);
return hr;
}
// DllUnregisterServer - Removes entries from the system registry
STDAPI DllUnregisterServer(void)
{
HRESULT hr = _AtlModule.DllUnregisterServer();
// Remove entries.
DestroyRegistrationKey("7.0");
DestroyRegistrationKey("7.1");
return hr;
}<|fim▁end|> |
// Get the short module name.
TCHAR moduleShortName[_MAX_PATH];
ptr = _tcsrchr(moduleName, '\\'); |
<|file_name|>_9_3_normal_visualization.rs<|end_file_name|><|fim▁begin|>#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
use std::ffi::CStr;
extern crate glfw;
use self::glfw::Context;
extern crate gl;
use cgmath::{Matrix4, Deg, vec3, Point3, perspective};
use common::{process_events, processInput};
use shader::Shader;
use camera::Camera;
use model::Model;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_4_9_3() {
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, normalShader, nanoSuit) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
// build and compile shaders
// -------------------------
let shader = Shader::new(
"src/_4_advanced_opengl/shaders/9.3.default.vs",
"src/_4_advanced_opengl/shaders/9.3.default.fs",
);
let normalShader = Shader::with_geometry_shader(
"src/_4_advanced_opengl/shaders/9.3.normal_visualization.vs",
"src/_4_advanced_opengl/shaders/9.3.normal_visualization.fs",
"src/_4_advanced_opengl/shaders/9.3.normal_visualization.gs"
);
// load models
// -----------
let nanoSuit = Model::new("resources/objects/nanosuit/nanosuit.obj");
(shader, normalShader, nanoSuit)
};
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera);
<|fim▁hole|> gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// configure transformation matrices
let projection: Matrix4<f32> = perspective(Deg(45.0), SCR_WIDTH as f32 / SCR_HEIGHT as f32, 0.1, 100.0);
let view = camera.GetViewMatrix();
let mut model = Matrix4::<f32>::from_translation(vec3(0.0, -1.75, 0.0)); // translate it down so it's at the center of the scene
model = model * Matrix4::from_scale(0.2); // it's a bit too big for our scene, so scale it down
shader.useProgram();
shader.setMat4(c_str!("projection"), &projection);
shader.setMat4(c_str!("view"), &view);
shader.setMat4(c_str!("model"), &model);
// draw model as usual
nanoSuit.Draw(&shader);
// then draw model with normal visualizing geometry shader
normalShader.useProgram();
normalShader.setMat4(c_str!("projection"), &projection);
normalShader.setMat4(c_str!("view"), &view);
normalShader.setMat4(c_str!("model"), &model);
nanoSuit.Draw(&normalShader);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
}<|fim▁end|> | // render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0); |
<|file_name|>iPushable.cpp<|end_file_name|><|fim▁begin|>/*----------------------------------------------------------------------------------*/
//This code is part of a larger project whose main purpose is to entretain either //
//by working on it and helping it be better or by playing it in it's actual state //
// //
//Copyright (C) 2011 Three Legged Studio //
// //
// This program is free software; you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation; either version 2, or (at your option) //
// any later version. //
// //
// This program is distributed in the hope that it will be useful, //
// but WITHOUT ANY WARRANTY; without even the implied warranty of //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //
// GNU General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with this program; if not, write to the Free Software Foundation, //
// Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. //
// //
// You can contact us at projectpgz.blogspot.com //
/*----------------------------------------------------------------------------------*/
#include "iPushable.h"
iPushable::iPushable(){
init(stepPushDist, useConstraints);
}
iPushable::iPushable(int stepPushDist, bool useConstraints){
init(stepPushDist, useConstraints);
}
void iPushable::init(int stepPushDist, bool useConstraints){
this->stepPushDist = stepPushDist;
this->useConstraints = useConstraints;
locked = false;
}
iPushable::~iPushable(){
<|fim▁hole|> if(!locked){
// Mover en las direcciones aceptadas por los constraints
if(useConstraints)
if(pushConstraints.find(dir) != pushConstraints.end()){
return move(ent, dir);
}
else
return make_pair(0, 0);
// Mover sin restricciones
return move(ent, dir);
}
return make_pair(0, 0);
}
void iPushable::lockPush(){
locked = true;
}
void iPushable::unlockPush(){
locked = false;
}
bool iPushable::isLockedPush(){
return locked;
}
void iPushable::setConstraints(set<Direction> pushConstrains){
this->pushConstraints = pushConstrains;
useConstraints = true;
}
std::pair<int, int> iPushable::move(Entity *ent, Direction dir){
int xtemp, ytemp;
int xorig, yorig;
xorig = ent->x;
yorig = ent->y;
switch (dir) {
case UP:
xtemp = ent->x;
ytemp = ent->y-stepPushDist;
break;
case DOWN:
xtemp = ent->x;
ytemp = ent->y+stepPushDist;
break;
case LEFT:
xtemp = ent->x-stepPushDist;
ytemp = ent->y;
break;
case RIGHT:
xtemp = ent->x+stepPushDist;
ytemp = ent->y;
break;
}
if (xtemp == ent->x && ytemp == ent->y)
return make_pair(0, 0);
/*if (ent->world->place_free(ent->x, ytemp, ent)){
ent->y = ytemp;
}
else{
ent->world->moveToContact(ent->x,ytemp, ent);
}
if (ent->world->place_free(xtemp, ent->y, ent)){
ent->x = xtemp;
}
else{
ent->world->moveToContact(xtemp,ent->y, ent);
}*/
if (!ent->world->place_free(xtemp, ytemp, ent))
ent->world->moveToContact(xtemp, ytemp, ent);
else
ent->x = xtemp, ent->y = ytemp;
return make_pair(abs(ent->x - xorig), abs(ent-> y - yorig));
}<|fim▁end|> | }
std::pair<int, int> iPushable::onPush(Entity *ent, Direction dir){
// mover en base al stepPushDist si no estamos locked
|
<|file_name|>babel.config.js<|end_file_name|><|fim▁begin|>module.exports = (api) => {
api.cache(true);
return {
presets: [
[
"@babel/preset-env",
{
useBuiltIns: "usage",
corejs: 3,
},
],
],<|fim▁hole|><|fim▁end|> | };
}; |
<|file_name|>upgrade.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the<|fim▁hole|>
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import format
from resource_management.libraries.functions import Direction
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
def run_migration(env, upgrade_type):
"""
If the acl migration script is present, then run it for either upgrade or downgrade.
That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
Requires configs to be present.
:param env: Environment.
:param upgrade_type: "rolling" or "nonrolling
"""
import params
if upgrade_type is None:
raise Fail('Parameter "upgrade_type" is missing.')
if params.upgrade_direction is None:
raise Fail('Parameter "upgrade_direction" is missing.')
if not params.security_enabled:
Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
return
Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
# If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
kafka_acls_script = None
command_suffix = ""
if params.upgrade_direction == Direction.UPGRADE:
kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
command_suffix = "--upgradeAcls"
elif params.upgrade_direction == Direction.DOWNGRADE:
kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
command_suffix = "--downgradeAcls"
if kafka_acls_script is not None:
if os.path.exists(kafka_acls_script):
Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
if params.zookeeper_connect is None:
raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
format(kafka_acls_script, params.zookeeper_connect, command_suffix)
Execute(acls_command,
user=params.kafka_user,
logoutput=True)
else:
Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script))<|fim▁end|> | "License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import uuid
from sockjs.tornado import SockJSRouter
from flask import Flask, g, request, session, Blueprint
from flask.ext.login import LoginManager, current_user
from flask.ext.principal import Principal, Permission, RoleNeed, identity_loaded, UserNeed
from flask.ext.babel import Babel, gettext, ngettext
from flask.ext.assets import Environment, Bundle
from babel import Locale
from watchdog.observers import Observer
from watchdog.observers.polling import PollingObserver
from collections import defaultdict
import os
import logging
import logging.config
import atexit
import signal
SUCCESS = {}
NO_CONTENT = ("", 204)
app = Flask("octoprint")
assets = None
babel = None
debug = False
printer = None
printerProfileManager = None
fileManager = None
slicingManager = None
analysisQueue = None
userManager = None
eventManager = None
loginManager = None
pluginManager = None
appSessionManager = None
pluginLifecycleManager = None
principals = Principal(app)
admin_permission = Permission(RoleNeed("admin"))
user_permission = Permission(RoleNeed("user"))
# only import the octoprint stuff down here, as it might depend on things defined above to be initialized already
from octoprint.printer import get_connection_options
from octoprint.printer.profile import PrinterProfileManager
from octoprint.printer.standard import Printer
from octoprint.settings import settings
import octoprint.users as users
import octoprint.events as events
import octoprint.plugin
import octoprint.timelapse
import octoprint._version
import octoprint.util
import octoprint.filemanager.storage
import octoprint.filemanager.analysis
import octoprint.slicing
from . import util
UI_API_KEY = ''.join('%02X' % ord(z) for z in uuid.uuid4().bytes)
versions = octoprint._version.get_versions()
VERSION = versions['version']
BRANCH = versions['branch'] if 'branch' in versions else None
DISPLAY_VERSION = "%s (%s branch)" % (VERSION, BRANCH) if BRANCH else VERSION
del versions
LOCALES = []
LANGUAGES = set()
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
user = load_user(identity.id)
if user is None:
return
identity.provides.add(UserNeed(user.get_name()))
if user.is_user():
identity.provides.add(RoleNeed("user"))
if user.is_admin():
identity.provides.add(RoleNeed("admin"))
def load_user(id):
if id == "_api":
return users.ApiUser()
if session and "usersession.id" in session:
sessionid = session["usersession.id"]
else:
sessionid = None
if userManager is not None:
if sessionid:
return userManager.findUser(username=id, session=sessionid)
else:
return userManager.findUser(username=id)
return users.DummyUser()
#~~ startup code
class Server():
def __init__(self, configfile=None, basedir=None, host="0.0.0.0", port=5000, debug=False, allowRoot=False, logConf=None):
self._configfile = configfile
self._basedir = basedir
self._host = host
self._port = port
self._debug = debug
self._allowRoot = allowRoot
self._logConf = logConf
self._server = None
self._logger = None
self._lifecycle_callbacks = defaultdict(list)
self._template_searchpaths = []
def run(self):
if not self._allowRoot:
self._check_for_root()
global app
global babel
global printer
global printerProfileManager
global fileManager
global slicingManager
global analysisQueue
global userManager
global eventManager
global loginManager
global pluginManager
global appSessionManager
global pluginLifecycleManager
global debug
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler
import sys
debug = self._debug
# first initialize the settings singleton and make sure it uses given configfile and basedir if available
s = settings(init=True, basedir=self._basedir, configfile=self._configfile)
# then monkey patch a bunch of stuff
util.tornado.fix_ioloop_scheduling()
util.flask.enable_additional_translations(additional_folders=[s.getBaseFolder("translations")])
# setup app
self._setup_app()
# setup i18n
self._setup_i18n(app)
# then initialize logging
self._setup_logging(self._debug, self._logConf)
self._logger = logging.getLogger(__name__)
def exception_logger(exc_type, exc_value, exc_tb):
self._logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_tb))
sys.excepthook = exception_logger
self._logger.info("Starting OctoPrint %s" % DISPLAY_VERSION)
# then initialize the plugin manager
pluginManager = octoprint.plugin.plugin_manager(init=True)
printerProfileManager = PrinterProfileManager()
eventManager = events.eventManager()
analysisQueue = octoprint.filemanager.analysis.AnalysisQueue()
slicingManager = octoprint.slicing.SlicingManager(s.getBaseFolder("slicingProfiles"), printerProfileManager)
storage_managers = dict()
storage_managers[octoprint.filemanager.FileDestinations.LOCAL] = octoprint.filemanager.storage.LocalFileStorage(s.getBaseFolder("uploads"))
fileManager = octoprint.filemanager.FileManager(analysisQueue, slicingManager, printerProfileManager, initial_storage_managers=storage_managers)
printer = Printer(fileManager, analysisQueue, printerProfileManager)
appSessionManager = util.flask.AppSessionManager()
pluginLifecycleManager = LifecycleManager(pluginManager)
def octoprint_plugin_inject_factory(name, implementation):
if not isinstance(implementation, octoprint.plugin.OctoPrintPlugin):
return None
return dict(
plugin_manager=pluginManager,
printer_profile_manager=printerProfileManager,
event_bus=eventManager,
analysis_queue=analysisQueue,
slicing_manager=slicingManager,
file_manager=fileManager,
printer=printer,
app_session_manager=appSessionManager,
plugin_lifecycle_manager=pluginLifecycleManager,
data_folder=os.path.join(settings().getBaseFolder("data"), name)
)
def settings_plugin_inject_factory(name, implementation):
if not isinstance(implementation, octoprint.plugin.SettingsPlugin):
return None
default_settings = implementation.get_settings_defaults()
get_preprocessors, set_preprocessors = implementation.get_settings_preprocessors()
plugin_settings = octoprint.plugin.plugin_settings(name,
defaults=default_settings,
get_preprocessors=get_preprocessors,
set_preprocessors=set_preprocessors)
return dict(settings=plugin_settings)
def settings_plugin_config_migration(name, implementation):
if not isinstance(implementation, octoprint.plugin.SettingsPlugin):
return
settings_version = implementation.get_settings_version()
settings_migrator = implementation.on_settings_migrate
if settings_version is not None and settings_migrator is not None:
stored_version = implementation._settings.get_int(["_config_version"])
if stored_version is None or stored_version < settings_version:
settings_migrator(settings_version, stored_version)
implementation._settings.set_int(["_config_version"], settings_version)
implementation._settings.save()
implementation.on_settings_initialized()
pluginManager.implementation_inject_factories=[octoprint_plugin_inject_factory, settings_plugin_inject_factory]
pluginManager.initialize_implementations()
settingsPlugins = pluginManager.get_implementations(octoprint.plugin.SettingsPlugin)
for implementation in settingsPlugins:
try:
settings_plugin_config_migration(implementation._identifier, implementation)
except:
self._logger.exception("Error while trying to migrate settings for plugin {}, ignoring it".format(implementation._identifier))
pluginManager.implementation_post_inits=[settings_plugin_config_migration]
pluginManager.log_all_plugins()
# initialize file manager and register it for changes in the registered plugins
fileManager.initialize()
pluginLifecycleManager.add_callback(["enabled", "disabled"], lambda name, plugin: fileManager.reload_plugins())
# initialize slicing manager and register it for changes in the registered plugins
slicingManager.initialize()
pluginLifecycleManager.add_callback(["enabled", "disabled"], lambda name, plugin: slicingManager.reload_slicers())
# setup jinja2
self._setup_jinja2()
def template_enabled(name, plugin):
if plugin.implementation is None or not isinstance(plugin.implementation, octoprint.plugin.TemplatePlugin):
return
self._register_additional_template_plugin(plugin.implementation)
def template_disabled(name, plugin):
if plugin.implementation is None or not isinstance(plugin.implementation, octoprint.plugin.TemplatePlugin):
return
self._unregister_additional_template_plugin(plugin.implementation)
pluginLifecycleManager.add_callback("enabled", template_enabled)
pluginLifecycleManager.add_callback("disabled", template_disabled)
# setup assets
self._setup_assets()
# configure timelapse
octoprint.timelapse.configureTimelapse()
# setup command triggers
events.CommandTrigger(printer)
if self._debug:
events.DebugEventListener()
# setup access control
if s.getBoolean(["accessControl", "enabled"]):
userManagerName = s.get(["accessControl", "userManager"])
try:
clazz = octoprint.util.get_class(userManagerName)
userManager = clazz()
except AttributeError, e:
self._logger.exception("Could not instantiate user manager %s, will run with accessControl disabled!" % userManagerName)
app.wsgi_app = util.ReverseProxied(
app.wsgi_app,
s.get(["server", "reverseProxy", "prefixHeader"]),
s.get(["server", "reverseProxy", "schemeHeader"]),
s.get(["server", "reverseProxy", "hostHeader"]),
s.get(["server", "reverseProxy", "prefixFallback"]),
s.get(["server", "reverseProxy", "schemeFallback"]),
s.get(["server", "reverseProxy", "hostFallback"])
)
secret_key = s.get(["server", "secretKey"])
if not secret_key:
import string
from random import choice
chars = string.ascii_lowercase + string.ascii_uppercase + string.digits
secret_key = "".join(choice(chars) for _ in xrange(32))
s.set(["server", "secretKey"], secret_key)
s.save()
app.secret_key = secret_key
loginManager = LoginManager()
loginManager.session_protection = "strong"
loginManager.user_callback = load_user
if userManager is None:
loginManager.anonymous_user = users.DummyUser
principals.identity_loaders.appendleft(users.dummy_identity_loader)
loginManager.init_app(app)
if self._host is None:
self._host = s.get(["server", "host"])
if self._port is None:
self._port = s.getInt(["server", "port"])
app.debug = self._debug
# register API blueprint
self._setup_blueprints()
## Tornado initialization starts here
ioloop = IOLoop()
ioloop.install()
self._router = SockJSRouter(self._create_socket_connection, "/sockjs")
upload_suffixes = dict(name=s.get(["server", "uploads", "nameSuffix"]), path=s.get(["server", "uploads", "pathSuffix"]))
server_routes = self._router.urls + [
# various downloads
(r"/downloads/timelapse/([^/]*\.mpg)", util.tornado.LargeResponseHandler, dict(path=s.getBaseFolder("timelapse"), as_attachment=True)),
(r"/downloads/files/local/(.*)", util.tornado.LargeResponseHandler, dict(path=s.getBaseFolder("uploads"), as_attachment=True, path_validation=util.tornado.path_validation_factory(lambda path: not os.path.basename(path).startswith("."), status_code=404))),
(r"/downloads/logs/([^/]*)", util.tornado.LargeResponseHandler, dict(path=s.getBaseFolder("logs"), as_attachment=True, access_validation=util.tornado.access_validation_factory(app, loginManager, util.flask.admin_validator))),
# camera snapshot
(r"/downloads/camera/current", util.tornado.UrlForwardHandler, dict(url=s.get(["webcam", "snapshot"]), as_attachment=True, access_validation=util.tornado.access_validation_factory(app, loginManager, util.flask.user_validator))),
# generated webassets
(r"/static/webassets/(.*)", util.tornado.LargeResponseHandler, dict(path=os.path.join(s.getBaseFolder("generated"), "webassets")))
]
for name, hook in pluginManager.get_hooks("octoprint.server.http.routes").items():
try:
result = hook(list(server_routes))
except:
self._logger.exception("There was an error while retrieving additional server routes from plugin hook {name}".format(**locals()))
else:
if isinstance(result, (list, tuple)):
for entry in result:
if not isinstance(entry, tuple) or not len(entry) == 3:
continue
if not isinstance(entry[0], basestring):
continue
if not isinstance(entry[2], dict):
continue
route, handler, kwargs = entry
route = r"/plugin/{name}/{route}".format(name=name, route=route if not route.startswith("/") else route[1:])
self._logger.debug("Adding additional route {route} handled by handler {handler} and with additional arguments {kwargs!r}".format(**locals()))
server_routes.append((route, handler, kwargs))
server_routes.append((r".*", util.tornado.UploadStorageFallbackHandler, dict(fallback=util.tornado.WsgiInputContainer(app.wsgi_app), file_prefix="octoprint-file-upload-", file_suffix=".tmp", suffixes=upload_suffixes)))
self._tornado_app = Application(server_routes)
max_body_sizes = [
("POST", r"/api/files/([^/]*)", s.getInt(["server", "uploads", "maxSize"])),
("POST", r"/api/languages", 5 * 1024 * 1024)
]
# allow plugins to extend allowed maximum body sizes
for name, hook in pluginManager.get_hooks("octoprint.server.http.bodysize").items():
try:
result = hook(list(max_body_sizes))
except:
self._logger.exception("There was an error while retrieving additional upload sizes from plugin hook {name}".format(**locals()))
else:
if isinstance(result, (list, tuple)):
for entry in result:
if not isinstance(entry, tuple) or not len(entry) == 3:
continue
if not entry[0] in util.tornado.UploadStorageFallbackHandler.BODY_METHODS:
continue
if not isinstance(entry[2], int):
continue
method, route, size = entry
route = r"/plugin/{name}/{route}".format(name=name, route=route if not route.startswith("/") else route[1:])
self._logger.debug("Adding maximum body size of {size}B for {method} requests to {route})".format(**locals()))
max_body_sizes.append((method, route, size))
self._server = util.tornado.CustomHTTPServer(self._tornado_app, max_body_sizes=max_body_sizes, default_max_body_size=s.getInt(["server", "maxSize"]))
self._server.listen(self._port, address=self._host)
eventManager.fire(events.Events.STARTUP)
if s.getBoolean(["serial", "autoconnect"]):
(port, baudrate) = s.get(["serial", "port"]), s.getInt(["serial", "baudrate"])
printer_profile = printerProfileManager.get_default()
connectionOptions = get_connection_options()
if port in connectionOptions["ports"]:
printer.connect(port=port, baudrate=baudrate, profile=printer_profile["id"] if "id" in printer_profile else "_default")
# start up watchdogs
if s.getBoolean(["feature", "pollWatched"]):
# use less performant polling observer if explicitely configured
observer = PollingObserver()
else:
# use os default
observer = Observer()
observer.schedule(util.watchdog.GcodeWatchdogHandler(fileManager, printer), s.getBaseFolder("watched"))
observer.start()
# run our startup plugins
octoprint.plugin.call_plugin(octoprint.plugin.StartupPlugin,
"on_startup",
args=(self._host, self._port))
def call_on_startup(name, plugin):
implementation = plugin.get_implementation(octoprint.plugin.StartupPlugin)
if implementation is None:
return
implementation.on_startup(self._host, self._port)
pluginLifecycleManager.add_callback("enabled", call_on_startup)
# prepare our after startup function
def on_after_startup():
self._logger.info("Listening on http://%s:%d" % (self._host, self._port))
# now this is somewhat ugly, but the issue is the following: startup plugins might want to do things for
# which they need the server to be already alive (e.g. for being able to resolve urls, such as favicons
# or service xmls or the like). While they are working though the ioloop would block. Therefore we'll
# create a single use thread in which to perform our after-startup-tasks, start that and hand back
# control to the ioloop
def work():
octoprint.plugin.call_plugin(octoprint.plugin.StartupPlugin,
"on_after_startup")
def call_on_after_startup(name, plugin):
implementation = plugin.get_implementation(octoprint.plugin.StartupPlugin)
if implementation is None:
return
implementation.on_after_startup()
pluginLifecycleManager.add_callback("enabled", call_on_after_startup)
import threading
threading.Thread(target=work).start()
ioloop.add_callback(on_after_startup)
# prepare our shutdown function
def on_shutdown():
# will be called on clean system exit and shutdown the watchdog observer and call the on_shutdown methods
# on all registered ShutdownPlugins
self._logger.info("Shutting down...")
observer.stop()
observer.join()
octoprint.plugin.call_plugin(octoprint.plugin.ShutdownPlugin,
"on_shutdown")
self._logger.info("Goodbye!")
atexit.register(on_shutdown)
def sigterm_handler(*args, **kwargs):
# will stop tornado on SIGTERM, making the program exit cleanly
def shutdown_tornado():
ioloop.stop()
ioloop.add_callback_from_signal(shutdown_tornado)
signal.signal(signal.SIGTERM, sigterm_handler)
try:
# this is the main loop - as long as tornado is running, OctoPrint is running
ioloop.start()
except (KeyboardInterrupt, SystemExit):
pass
except:
self._logger.fatal("Now that is embarrassing... Something really really went wrong here. Please report this including the stacktrace below in OctoPrint's bugtracker. Thanks!")
self._logger.exception("Stacktrace follows:")
def _create_socket_connection(self, session):
global printer, fileManager, analysisQueue, userManager, eventManager
return util.sockjs.PrinterStateConnection(printer, fileManager, analysisQueue, userManager, eventManager, pluginManager, session)
def _check_for_root(self):
if "geteuid" in dir(os) and os.geteuid() == 0:
exit("You should not run OctoPrint as root!")
def _get_locale(self):
global LANGUAGES
if "l10n" in request.values:
return Locale.negotiate([request.values["l10n"]], LANGUAGES)
if hasattr(g, "identity") and g.identity and userManager is not None:
userid = g.identity.id
try:
user_language = userManager.getUserSetting(userid, ("interface", "language"))
if user_language is not None and not user_language == "_default":
return Locale.negotiate([user_language], LANGUAGES)
except octoprint.users.UnknownUser:
pass
default_language = settings().get(["appearance", "defaultLanguage"])
if default_language is not None and not default_language == "_default" and default_language in LANGUAGES:
return Locale.negotiate([default_language], LANGUAGES)
return request.accept_languages.best_match(LANGUAGES)
def _setup_logging(self, debug, logConf=None):
defaultConfig = {
"version": 1,
"formatters": {
"simple": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "simple",
"stream": "ext://sys.stdout"
},
"file": {
"class": "logging.handlers.TimedRotatingFileHandler",
"level": "DEBUG",
"formatter": "simple",
"when": "D",
"backupCount": "1",
"filename": os.path.join(settings().getBaseFolder("logs"), "octoprint.log")
},
"serialFile": {
"class": "logging.handlers.RotatingFileHandler",
"level": "DEBUG",
"formatter": "simple",
"maxBytes": 2 * 1024 * 1024, # let's limit the serial log to 2MB in size
"filename": os.path.join(settings().getBaseFolder("logs"), "serial.log")
}
},
"loggers": {
"SERIAL": {
"level": "CRITICAL",
"handlers": ["serialFile"],
"propagate": False
},
"tornado.application": {
"level": "INFO"
},
"tornado.general": {
"level": "INFO"<|fim▁hole|> "level": "WARN"
}
},
"root": {
"level": "INFO",
"handlers": ["console", "file"]
}
}
if debug:
defaultConfig["root"]["level"] = "DEBUG"
if logConf is None:
logConf = os.path.join(settings().getBaseFolder("base"), "logging.yaml")
configFromFile = {}
if os.path.exists(logConf) and os.path.isfile(logConf):
import yaml
with open(logConf, "r") as f:
configFromFile = yaml.safe_load(f)
config = octoprint.util.dict_merge(defaultConfig, configFromFile)
logging.config.dictConfig(config)
logging.captureWarnings(True)
import warnings
warnings.simplefilter("always")
if settings().getBoolean(["serial", "log"]):
# enable debug logging to serial.log
logging.getLogger("SERIAL").setLevel(logging.DEBUG)
logging.getLogger("SERIAL").debug("Enabling serial logging")
def _setup_app(self):
@app.before_request
def before_request():
g.locale = self._get_locale()
@app.after_request
def after_request(response):
# send no-cache headers with all POST responses
if request.method == "POST":
response.cache_control.no_cache = True
response.headers.add("X-Clacks-Overhead", "GNU Terry Pratchett")
return response
def _setup_i18n(self, app):
global babel
global LOCALES
global LANGUAGES
babel = Babel(app)
def get_available_locale_identifiers(locales):
result = set()
# add available translations
for locale in locales:
result.add(locale.language)
if locale.territory:
# if a territory is specified, add that too
result.add("%s_%s" % (locale.language, locale.territory))
return result
LOCALES = babel.list_translations()
LANGUAGES = get_available_locale_identifiers(LOCALES)
@babel.localeselector
def get_locale():
return self._get_locale()
def _setup_jinja2(self):
app.jinja_env.add_extension("jinja2.ext.do")
# configure additional template folders for jinja2
import jinja2
filesystem_loader = jinja2.FileSystemLoader([])
filesystem_loader.searchpath = self._template_searchpaths
jinja_loader = jinja2.ChoiceLoader([
app.jinja_loader,
filesystem_loader
])
app.jinja_loader = jinja_loader
del jinja2
self._register_template_plugins()
def _register_template_plugins(self):
template_plugins = pluginManager.get_implementations(octoprint.plugin.TemplatePlugin)
for plugin in template_plugins:
try:
self._register_additional_template_plugin(plugin)
except:
self._logger.exception("Error while trying to register templates of plugin {}, ignoring it".format(plugin._identifier))
def _register_additional_template_plugin(self, plugin):
folder = plugin.get_template_folder()
if folder is not None and not folder in self._template_searchpaths:
self._template_searchpaths.append(folder)
def _unregister_additional_template_plugin(self, plugin):
folder = plugin.get_template_folder()
if folder is not None and folder in self._template_searchpaths:
self._template_searchpaths.remove(folder)
def _setup_blueprints(self):
from octoprint.server.api import api
from octoprint.server.apps import apps, clear_registered_app
import octoprint.server.views
app.register_blueprint(api, url_prefix="/api")
app.register_blueprint(apps, url_prefix="/apps")
# also register any blueprints defined in BlueprintPlugins
self._register_blueprint_plugins()
# and register a blueprint for serving the static files of asset plugins which are not blueprint plugins themselves
self._register_asset_plugins()
global pluginLifecycleManager
def clear_apps(name, plugin):
clear_registered_app()
pluginLifecycleManager.add_callback("enabled", clear_apps)
pluginLifecycleManager.add_callback("disabled", clear_apps)
def _register_blueprint_plugins(self):
blueprint_plugins = octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.BlueprintPlugin)
for plugin in blueprint_plugins:
try:
self._register_blueprint_plugin(plugin)
except:
self._logger.exception("Error while registering blueprint of plugin {}, ignoring it".format(plugin._identifier))
continue
def _register_asset_plugins(self):
asset_plugins = octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.AssetPlugin)
for plugin in asset_plugins:
if isinstance(plugin, octoprint.plugin.BlueprintPlugin):
continue
try:
self._register_asset_plugin(plugin)
except:
self._logger.exception("Error while registering assets of plugin {}, ignoring it".format(plugin._identifier))
continue
def _register_blueprint_plugin(self, plugin):
name = plugin._identifier
blueprint = plugin.get_blueprint()
if blueprint is None:
return
if plugin.is_blueprint_protected():
from octoprint.server.util import apiKeyRequestHandler, corsResponseHandler
blueprint.before_request(apiKeyRequestHandler)
blueprint.after_request(corsResponseHandler)
url_prefix = "/plugin/{name}".format(name=name)
app.register_blueprint(blueprint, url_prefix=url_prefix)
if self._logger:
self._logger.debug("Registered API of plugin {name} under URL prefix {url_prefix}".format(name=name, url_prefix=url_prefix))
def _register_asset_plugin(self, plugin):
name = plugin._identifier
url_prefix = "/plugin/{name}".format(name=name)
blueprint = Blueprint("plugin." + name, name, static_folder=plugin.get_asset_folder())
app.register_blueprint(blueprint, url_prefix=url_prefix)
if self._logger:
self._logger.debug("Registered assets of plugin {name} under URL prefix {url_prefix}".format(name=name, url_prefix=url_prefix))
def _setup_assets(self):
global app
global assets
global pluginManager
util.flask.fix_webassets_cache()
util.flask.fix_webassets_filtertool()
base_folder = settings().getBaseFolder("generated")
# clean the folder
if settings().getBoolean(["devel", "webassets", "clean_on_startup"]):
import shutil
import errno
import sys
for entry in ("webassets", ".webassets-cache"):
path = os.path.join(base_folder, entry)
# delete path if it exists
if os.path.isdir(path):
try:
self._logger.debug("Deleting {path}...".format(**locals()))
shutil.rmtree(path)
except:
self._logger.exception("Error while trying to delete {path}, leaving it alone".format(**locals()))
continue
# re-create path
self._logger.debug("Creating {path}...".format(**locals()))
error_text = "Error while trying to re-create {path}, that might cause errors with the webassets cache".format(**locals())
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EACCES:
# that might be caused by the user still having the folder open somewhere, let's try again after
# waiting a bit
import time
for n in xrange(3):
time.sleep(0.5)
self._logger.debug("Creating {path}: Retry #{retry} after {time}s".format(path=path, retry=n+1, time=(n + 1)*0.5))
try:
os.makedirs(path)
break
except:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.exception("Ignored error while creating directory {path}".format(**locals()))
pass
else:
# this will only get executed if we never did
# successfully execute makedirs above
self._logger.exception(error_text)
continue
else:
# not an access error, so something we don't understand
# went wrong -> log an error and stop
self._logger.exception(error_text)
continue
except:
# not an OSError, so something we don't understand
# went wrong -> log an error and stop
self._logger.exception(error_text)
continue
self._logger.info("Reset webasset folder {path}...".format(**locals()))
AdjustedEnvironment = type(Environment)(Environment.__name__, (Environment,), dict(
resolver_class=util.flask.PluginAssetResolver
))
class CustomDirectoryEnvironment(AdjustedEnvironment):
@property
def directory(self):
return base_folder
assets = CustomDirectoryEnvironment(app)
assets.debug = not settings().getBoolean(["devel", "webassets", "bundle"])
UpdaterType = type(util.flask.SettingsCheckUpdater)(util.flask.SettingsCheckUpdater.__name__, (util.flask.SettingsCheckUpdater,), dict(
updater=assets.updater
))
assets.updater = UpdaterType
enable_gcodeviewer = settings().getBoolean(["gcodeViewer", "enabled"])
preferred_stylesheet = settings().get(["devel", "stylesheet"])
dynamic_assets = util.flask.collect_plugin_assets(
enable_gcodeviewer=enable_gcodeviewer,
preferred_stylesheet=preferred_stylesheet
)
js_libs = [
"js/lib/jquery/jquery.min.js",
"js/lib/modernizr.custom.js",
"js/lib/lodash.min.js",
"js/lib/sprintf.min.js",
"js/lib/knockout.js",
"js/lib/knockout.mapping-latest.js",
"js/lib/babel.js",
"js/lib/avltree.js",
"js/lib/bootstrap/bootstrap.js",
"js/lib/bootstrap/bootstrap-modalmanager.js",
"js/lib/bootstrap/bootstrap-modal.js",
"js/lib/bootstrap/bootstrap-slider.js",
"js/lib/bootstrap/bootstrap-tabdrop.js",
"js/lib/jquery/jquery.ui.core.js",
"js/lib/jquery/jquery.ui.widget.js",
"js/lib/jquery/jquery.ui.mouse.js",
"js/lib/jquery/jquery.flot.js",
"js/lib/jquery/jquery.iframe-transport.js",
"js/lib/jquery/jquery.fileupload.js",
"js/lib/jquery/jquery.slimscroll.min.js",
"js/lib/jquery/jquery.qrcode.min.js",
"js/lib/moment-with-locales.min.js",
"js/lib/pusher.color.min.js",
"js/lib/detectmobilebrowser.js",
"js/lib/md5.min.js",
"js/lib/pnotify.min.js",
"js/lib/bootstrap-slider-knockout-binding.js",
"js/lib/loglevel.min.js",
"js/lib/sockjs-0.3.4.min.js"
]
js_app = dynamic_assets["js"] + [
"js/app/dataupdater.js",
"js/app/helpers.js",
"js/app/main.js",
]
css_libs = [
"css/bootstrap.min.css",
"css/bootstrap-modal.css",
"css/bootstrap-slider.css",
"css/bootstrap-tabdrop.css",
"css/font-awesome.min.css",
"css/jquery.fileupload-ui.css",
"css/pnotify.min.css"
]
css_app = list(dynamic_assets["css"])
if len(css_app) == 0:
css_app = ["empty"]
less_app = list(dynamic_assets["less"])
if len(less_app) == 0:
less_app = ["empty"]
from webassets.filter import register_filter, Filter
from webassets.filter.cssrewrite.base import PatternRewriter
import re
class LessImportRewrite(PatternRewriter):
name = "less_importrewrite"
patterns = {
"import_rewrite": re.compile("(@import(\s+\(.*\))?\s+)\"(.*)\";")
}
def import_rewrite(self, m):
import_with_options = m.group(1)
import_url = m.group(3)
if not import_url.startswith("http:") and not import_url.startswith("https:") and not import_url.startswith("/"):
import_url = "../less/" + import_url
return "{import_with_options}\"{import_url}\";".format(**locals())
class JsDelimiterBundle(Filter):
name = "js_delimiter_bundler"
options = {}
def input(self, _in, out, **kwargs):
out.write(_in.read())
out.write("\n;\n")
register_filter(LessImportRewrite)
register_filter(JsDelimiterBundle)
js_libs_bundle = Bundle(*js_libs, output="webassets/packed_libs.js", filters="js_delimiter_bundler")
if settings().getBoolean(["devel", "webassets", "minify"]):
js_app_bundle = Bundle(*js_app, output="webassets/packed_app.js", filters="rjsmin, js_delimiter_bundler")
else:
js_app_bundle = Bundle(*js_app, output="webassets/packed_app.js", filters="js_delimiter_bundler")
css_libs_bundle = Bundle(*css_libs, output="webassets/packed_libs.css")
css_app_bundle = Bundle(*css_app, output="webassets/packed_app.css", filters="cssrewrite")
all_less_bundle = Bundle(*less_app, output="webassets/packed_app.less", filters="cssrewrite, less_importrewrite")
assets.register("js_libs", js_libs_bundle)
assets.register("js_app", js_app_bundle)
assets.register("css_libs", css_libs_bundle)
assets.register("css_app", css_app_bundle)
assets.register("less_app", all_less_bundle)
class LifecycleManager(object):
def __init__(self, plugin_manager):
self._plugin_manager = plugin_manager
self._plugin_lifecycle_callbacks = defaultdict(list)
self._logger = logging.getLogger(__name__)
def on_plugin_event_factory(lifecycle_event):
def on_plugin_event(name, plugin):
self.on_plugin_event(lifecycle_event, name, plugin)
return on_plugin_event
self._plugin_manager.on_plugin_loaded = on_plugin_event_factory("loaded")
self._plugin_manager.on_plugin_unloaded = on_plugin_event_factory("unloaded")
self._plugin_manager.on_plugin_activated = on_plugin_event_factory("activated")
self._plugin_manager.on_plugin_deactivated = on_plugin_event_factory("deactivated")
self._plugin_manager.on_plugin_enabled = on_plugin_event_factory("enabled")
self._plugin_manager.on_plugin_disabled = on_plugin_event_factory("disabled")
def on_plugin_event(self, event, name, plugin):
for lifecycle_callback in self._plugin_lifecycle_callbacks[event]:
lifecycle_callback(name, plugin)
def add_callback(self, events, callback):
if isinstance(events, (str, unicode)):
events = [events]
for event in events:
self._plugin_lifecycle_callbacks[event].append(callback)
def remove_callback(self, callback, events=None):
if events is None:
for event in self._plugin_lifecycle_callbacks:
if callback in self._plugin_lifecycle_callbacks[event]:
self._plugin_lifecycle_callbacks[event].remove(callback)
else:
if isinstance(events, (str, unicode)):
events = [events]
for event in events:
if callback in self._plugin_lifecycle_callbacks[event]:
self._plugin_lifecycle_callbacks[event].remove(callback)
if __name__ == "__main__":
server = Server()
server.run()<|fim▁end|> | },
"octoprint.server.util.flask": { |
<|file_name|>mixer.js<|end_file_name|><|fim▁begin|>/*
* Copyright © 2020 Luciano Iam <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
import { ChildComponent } from '../base/component.js';
import { StateNode } from '../base/protocol.js';
import Strip from './strip.js';
export default class Mixer extends ChildComponent {
constructor (parent) {
super(parent);
this._strips = {};
this._ready = false;
}
get ready () {
return this._ready;
}
get strips () {
return Object.values(this._strips);
}
getStripByName (name) {
name = name.trim().toLowerCase();
return this.strips.find(strip => strip.name.trim().toLowerCase() == name);
}
handle (node, addr, val) {
if (node.startsWith('strip')) {
if (node == StateNode.STRIP_DESCRIPTION) {
this._strips[addr] = new Strip(this, addr, val);
this.notifyPropertyChanged('strips');
return true;
} else {
const stripAddr = [addr[0]];
if (stripAddr in this._strips) {
return this._strips[stripAddr].handle(node, addr, val);
}
}<|fim▁hole|> // all initial strip description messages have been received at this point
if (!this._ready) {
this.updateLocal('ready', true);
// passthrough by allowing to return false
}
}
return false;
}
}<|fim▁end|> | } else { |
<|file_name|>pool.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::collections::HashMap;
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
use std::vec::Vec;
use serde_json;
use uuid::Uuid;
use devicemapper::{Device, DmName, DmNameBuf, Sectors};
use crate::engine::{
BlockDev, BlockDevTier, DevUuid, Filesystem, FilesystemUuid, MaybeDbusPath, Name, Pool,
PoolUuid, Redundancy, RenameAction,
};
use crate::stratis::{ErrorEnum, StratisError, StratisResult};
use crate::engine::types::{FreeSpaceState, PoolExtendState, PoolState};
use crate::engine::strat_engine::backstore::{Backstore, StratBlockDev, MIN_MDA_SECTORS};
use crate::engine::strat_engine::names::validate_name;
use crate::engine::strat_engine::serde_structs::{FlexDevsSave, PoolSave, Recordable};<|fim▁hole|>use crate::engine::strat_engine::thinpool::{ThinPool, ThinPoolSizeParams, DATA_BLOCK_SIZE};
/// Get the index which indicates the start of unallocated space in the cap
/// device.
/// NOTE: Since segments are always allocated to each flex dev in order, the
/// last segment for each is the highest. This allows avoiding sorting all the
/// segments and just sorting the set consisting of the last segment from
/// each list of segments.
/// Precondition: This method is called only when setting up a pool, which
/// ensures that the flex devs metadata lists are all non-empty.
fn next_index(flex_devs: &FlexDevsSave) -> Sectors {
let expect_msg = "Setting up rather than initializing a pool, so each flex dev must have been allocated at least some segments.";
[
flex_devs
.meta_dev
.last()
.unwrap_or_else(|| panic!(expect_msg)),
flex_devs
.thin_meta_dev
.last()
.unwrap_or_else(|| panic!(expect_msg)),
flex_devs
.thin_data_dev
.last()
.unwrap_or_else(|| panic!(expect_msg)),
flex_devs
.thin_meta_dev_spare
.last()
.unwrap_or_else(|| panic!(expect_msg)),
]
.iter()
.max_by_key(|x| x.0)
.map(|&&(start, length)| start + length)
.expect("iterator is non-empty")
}
/// Check the metadata of an individual pool for consistency.
/// Precondition: This method is called only when setting up a pool, which
/// ensures that the flex devs metadata lists are all non-empty.
pub fn check_metadata(metadata: &PoolSave) -> StratisResult<()> {
let flex_devs = &metadata.flex_devs;
let next = next_index(&flex_devs);
let allocated_from_cap = metadata.backstore.cap.allocs[0].1;
if allocated_from_cap != next {
let err_msg = format!(
"{} used in thinpool, but {} allocated from backstore cap device",
next, allocated_from_cap
);
return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg));
}
// If the total length of the allocations in the flex devs, does not
// equal next, consider the situation an error.
{
let total_allocated = flex_devs
.meta_dev
.iter()
.chain(flex_devs.thin_meta_dev.iter())
.chain(flex_devs.thin_data_dev.iter())
.chain(flex_devs.thin_meta_dev_spare.iter())
.map(|x| x.1)
.sum::<Sectors>();
if total_allocated != next {
let err_msg = format!(
"{} used in thinpool, but {} given up by cache for pool {}",
total_allocated, next, metadata.name
);
return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg));
}
}
// If the amount allocated to the cap device is less than the amount
// allocated to the flex devices, consider the situation an error.
// Consider it an error if the amount allocated to the cap device is 0.
// If this is the case, then the thin pool can not exist.
{
let total_allocated = metadata.backstore.data_tier.blockdev.allocs[0]
.iter()
.map(|x| x.length)
.sum::<Sectors>();
if total_allocated == Sectors(0) {
let err_msg = format!(
"no segments allocated to the cap device for pool {}",
metadata.name
);
return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg));
}
if next > total_allocated {
let err_msg = format!(
"{} allocated to cap device, but {} allocated to flex devs",
next, total_allocated
);
return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg));
}
}
Ok(())
}
#[derive(Debug)]
pub struct StratPool {
backstore: Backstore,
redundancy: Redundancy,
thin_pool: ThinPool,
dbus_path: MaybeDbusPath,
}
impl StratPool {
/// Initialize a Stratis Pool.
/// 1. Initialize the block devices specified by paths.
/// 2. Set up thinpool device to back filesystems.
pub fn initialize(
name: &str,
paths: &[&Path],
redundancy: Redundancy,
) -> StratisResult<(PoolUuid, StratPool)> {
let pool_uuid = Uuid::new_v4();
let mut backstore = Backstore::initialize(pool_uuid, paths, MIN_MDA_SECTORS)?;
let thinpool = ThinPool::new(
pool_uuid,
&ThinPoolSizeParams::default(),
DATA_BLOCK_SIZE,
&mut backstore,
);
let mut thinpool = match thinpool {
Ok(thinpool) => thinpool,
Err(err) => {
let _ = backstore.destroy();
return Err(err);
}
};
thinpool.check(pool_uuid, &mut backstore)?;
let mut pool = StratPool {
backstore,
redundancy,
thin_pool: thinpool,
dbus_path: MaybeDbusPath(None),
};
pool.write_metadata(&Name::new(name.to_owned()))?;
Ok((pool_uuid, pool))
}
/// Setup a StratPool using its UUID and the list of devnodes it has.
/// Precondition: every device in devnodes has already been determined
/// to belong to the pool with the specified uuid.
/// Precondition: A metadata verification step has already been run.
pub fn setup(
uuid: PoolUuid,
devnodes: &HashMap<Device, PathBuf>,
metadata: &PoolSave,
) -> StratisResult<(Name, StratPool)> {
let mut backstore = Backstore::setup(uuid, &metadata.backstore, devnodes, None)?;
let mut thinpool = ThinPool::setup(
uuid,
&metadata.thinpool_dev,
&metadata.flex_devs,
&backstore,
)?;
let changed = thinpool.check(uuid, &mut backstore)?;
let mut pool = StratPool {
backstore,
redundancy: Redundancy::NONE,
thin_pool: thinpool,
dbus_path: MaybeDbusPath(None),
};
let pool_name = &metadata.name;
if changed {
pool.write_metadata(pool_name)?;
}
Ok((Name::new(pool_name.to_owned()), pool))
}
/// Write current metadata to pool members.
pub fn write_metadata(&mut self, name: &str) -> StratisResult<()> {
let data = serde_json::to_string(&self.record(name))?;
self.backstore.save_state(data.as_bytes())
}
/// Teardown a pool.
#[cfg(test)]
pub fn teardown(&mut self) -> StratisResult<()> {
self.thin_pool.teardown()?;
self.backstore.teardown()
}
pub fn has_filesystems(&self) -> bool {
self.thin_pool.has_filesystems()
}
/// The names of DM devices belonging to this pool that may generate events
pub fn get_eventing_dev_names(&self, pool_uuid: PoolUuid) -> Vec<DmNameBuf> {
self.thin_pool.get_eventing_dev_names(pool_uuid)
}
/// Called when a DM device in this pool has generated an event.
// TODO: Just check the device that evented. Currently checks
// everything.
pub fn event_on(
&mut self,
pool_uuid: PoolUuid,
pool_name: &Name,
dm_name: &DmName,
) -> StratisResult<()> {
assert!(self
.thin_pool
.get_eventing_dev_names(pool_uuid)
.iter()
.any(|x| dm_name == &**x));
if self.thin_pool.check(pool_uuid, &mut self.backstore)? {
self.write_metadata(pool_name)?;
}
Ok(())
}
pub fn record(&self, name: &str) -> PoolSave {
PoolSave {
name: name.to_owned(),
backstore: self.backstore.record(),
flex_devs: self.thin_pool.record(),
thinpool_dev: self.thin_pool.record(),
}
}
pub fn get_strat_blockdev(&self, uuid: DevUuid) -> Option<(BlockDevTier, &StratBlockDev)> {
self.backstore.get_blockdev_by_uuid(uuid)
}
}
impl Pool for StratPool {
fn create_filesystems<'a, 'b>(
&'a mut self,
pool_uuid: PoolUuid,
pool_name: &str,
specs: &[(&'b str, Option<Sectors>)],
) -> StratisResult<Vec<(&'b str, FilesystemUuid)>> {
let names: HashMap<_, _> = HashMap::from_iter(specs.iter().map(|&tup| (tup.0, tup.1)));
for name in names.keys() {
validate_name(name)?;
if self.thin_pool.get_mut_filesystem_by_name(*name).is_some() {
return Err(StratisError::Engine(
ErrorEnum::AlreadyExists,
name.to_string(),
));
}
}
// TODO: Roll back on filesystem initialization failure.
let mut result = Vec::new();
for (name, size) in names {
let fs_uuid = self
.thin_pool
.create_filesystem(pool_uuid, pool_name, name, size)?;
result.push((name, fs_uuid));
}
Ok(result)
}
fn add_blockdevs(
&mut self,
pool_uuid: PoolUuid,
pool_name: &str,
paths: &[&Path],
tier: BlockDevTier,
) -> StratisResult<Vec<DevUuid>> {
let bdev_info = if tier == BlockDevTier::Cache {
// If adding cache devices, must suspend the pool, since the cache
// must be augmeneted with the new devices.
self.thin_pool.suspend()?;
let bdev_info = self.backstore.add_cachedevs(pool_uuid, paths)?;
self.thin_pool.set_device(self.backstore.device().expect("Since thin pool exists, space must have been allocated from the backstore, so backstore must have a cap device"))?;
self.thin_pool.resume()?;
Ok(bdev_info)
} else {
// If just adding data devices, no need to suspend the pool.
// No action will be taken on the DM devices.
let bdev_info = self.backstore.add_datadevs(pool_uuid, paths)?;
// Adding data devices does not change the state of the thin
// pool at all. However, if the thin pool is in a state
// where it would request an allocation from the backstore the
// addition of the new data devs may have changed its context
// so that it can satisfy the allocation request where
// previously it could not. Run check() in case that is true.
self.thin_pool.check(pool_uuid, &mut self.backstore)?;
Ok(bdev_info)
};
self.write_metadata(pool_name)?;
bdev_info
}
fn destroy(&mut self) -> StratisResult<()> {
self.thin_pool.teardown()?;
self.backstore.destroy()?;
Ok(())
}
fn destroy_filesystems<'a>(
&'a mut self,
pool_name: &str,
fs_uuids: &[FilesystemUuid],
) -> StratisResult<Vec<FilesystemUuid>> {
let mut removed = Vec::new();
for &uuid in fs_uuids {
self.thin_pool.destroy_filesystem(pool_name, uuid)?;
removed.push(uuid);
}
Ok(removed)
}
fn rename_filesystem(
&mut self,
pool_name: &str,
uuid: FilesystemUuid,
new_name: &str,
) -> StratisResult<RenameAction> {
validate_name(new_name)?;
self.thin_pool.rename_filesystem(pool_name, uuid, new_name)
}
fn snapshot_filesystem(
&mut self,
pool_uuid: PoolUuid,
pool_name: &str,
origin_uuid: FilesystemUuid,
snapshot_name: &str,
) -> StratisResult<(FilesystemUuid, &mut Filesystem)> {
validate_name(snapshot_name)?;
if self
.thin_pool
.get_filesystem_by_name(snapshot_name)
.is_some()
{
return Err(StratisError::Engine(
ErrorEnum::AlreadyExists,
snapshot_name.to_string(),
));
}
self.thin_pool
.snapshot_filesystem(pool_uuid, pool_name, origin_uuid, snapshot_name)
}
fn total_physical_size(&self) -> Sectors {
self.backstore.datatier_size()
}
fn total_physical_used(&self) -> StratisResult<Sectors> {
self.thin_pool
.total_physical_used()
.and_then(|v| Ok(v + self.backstore.datatier_metadata_size()))
}
fn filesystems(&self) -> Vec<(Name, FilesystemUuid, &Filesystem)> {
self.thin_pool.filesystems()
}
fn filesystems_mut(&mut self) -> Vec<(Name, FilesystemUuid, &mut Filesystem)> {
self.thin_pool.filesystems_mut()
}
fn get_filesystem(&self, uuid: FilesystemUuid) -> Option<(Name, &Filesystem)> {
self.thin_pool
.get_filesystem_by_uuid(uuid)
.map(|(name, fs)| (name, fs as &Filesystem))
}
fn get_mut_filesystem(&mut self, uuid: FilesystemUuid) -> Option<(Name, &mut Filesystem)> {
self.thin_pool
.get_mut_filesystem_by_uuid(uuid)
.map(|(name, fs)| (name, fs as &mut Filesystem))
}
fn blockdevs(&self) -> Vec<(DevUuid, &BlockDev)> {
self.backstore
.blockdevs()
.iter()
.map(|&(u, b)| (u, b as &BlockDev))
.collect()
}
fn blockdevs_mut(&mut self) -> Vec<(DevUuid, &mut BlockDev)> {
self.backstore
.blockdevs_mut()
.into_iter()
.map(|(u, b)| (u, b as &mut BlockDev))
.collect()
}
fn get_blockdev(&self, uuid: DevUuid) -> Option<(BlockDevTier, &BlockDev)> {
self.get_strat_blockdev(uuid)
.map(|(t, b)| (t, b as &BlockDev))
}
fn get_mut_blockdev(&mut self, uuid: DevUuid) -> Option<(BlockDevTier, &mut BlockDev)> {
self.backstore
.get_mut_blockdev_by_uuid(uuid)
.map(|(t, b)| (t, b as &mut BlockDev))
}
fn set_blockdev_user_info(
&mut self,
pool_name: &str,
uuid: DevUuid,
user_info: Option<&str>,
) -> StratisResult<bool> {
if self.backstore.set_blockdev_user_info(uuid, user_info)? {
self.write_metadata(pool_name)?;
Ok(true)
} else {
Ok(false)
}
}
fn state(&self) -> PoolState {
self.thin_pool.state()
}
fn extend_state(&self) -> PoolExtendState {
self.thin_pool.extend_state()
}
fn free_space_state(&self) -> FreeSpaceState {
self.thin_pool.free_space_state()
}
fn set_dbus_path(&mut self, path: MaybeDbusPath) {
self.thin_pool.set_dbus_path(path.clone());
self.dbus_path = path
}
fn get_dbus_path(&self) -> &MaybeDbusPath {
&self.dbus_path
}
}
#[cfg(test)]
mod tests {
use std::fs::OpenOptions;
use std::io::{BufWriter, Read, Write};
use nix::mount::{mount, umount, MsFlags};
use tempfile;
use devicemapper::{Bytes, IEC, SECTOR_SIZE};
use crate::engine::devlinks;
use crate::engine::types::Redundancy;
use crate::engine::strat_engine::backstore::{find_all, get_metadata};
use crate::engine::strat_engine::cmd;
use crate::engine::strat_engine::tests::{loopbacked, real};
use super::*;
fn invariant(pool: &StratPool, pool_name: &str) {
check_metadata(&pool.record(&Name::new(pool_name.into()))).unwrap();
}
/// Verify that metadata can be read from pools.
/// 1. Split paths into two separate sets.
/// 2. Create pools from the two sets.
/// 3. Use find_all() to get the devices in the pool.
/// 4. Use get_metadata to find metadata for each pool and verify
/// correctness.
/// 5. Teardown the engine and repeat.
fn test_basic_metadata(paths: &[&Path]) {
assert!(paths.len() > 1);
let (paths1, paths2) = paths.split_at(paths.len() / 2);
let name1 = "name1";
let (uuid1, mut pool1) = StratPool::initialize(&name1, paths1, Redundancy::NONE).unwrap();
invariant(&pool1, &name1);
let metadata1 = pool1.record(name1);
let name2 = "name2";
let (uuid2, mut pool2) = StratPool::initialize(&name2, paths2, Redundancy::NONE).unwrap();
invariant(&pool2, &name2);
let metadata2 = pool2.record(name2);
cmd::udev_settle().unwrap();
let pools = find_all().unwrap();
assert_eq!(pools.len(), 2);
let devnodes1 = &pools[&uuid1];
let devnodes2 = &pools[&uuid2];
let pool_save1 = get_metadata(uuid1, devnodes1).unwrap().unwrap();
let pool_save2 = get_metadata(uuid2, devnodes2).unwrap().unwrap();
assert_eq!(pool_save1, metadata1);
assert_eq!(pool_save2, metadata2);
pool1.teardown().unwrap();
pool2.teardown().unwrap();
cmd::udev_settle().unwrap();
let pools = find_all().unwrap();
assert_eq!(pools.len(), 2);
let devnodes1 = &pools[&uuid1];
let devnodes2 = &pools[&uuid2];
let pool_save1 = get_metadata(uuid1, devnodes1).unwrap().unwrap();
let pool_save2 = get_metadata(uuid2, devnodes2).unwrap().unwrap();
assert_eq!(pool_save1, metadata1);
assert_eq!(pool_save2, metadata2);
}
#[test]
pub fn loop_test_basic_metadata() {
loopbacked::test_with_spec(
&loopbacked::DeviceLimits::Range(2, 3, None),
test_basic_metadata,
);
}
#[test]
pub fn real_test_basic_metadata() {
real::test_with_spec(
&real::DeviceLimits::AtLeast(2, None, None),
test_basic_metadata,
);
}
/// Verify that a pool with no devices does not have the minimum amount of
/// space required.
fn test_empty_pool(paths: &[&Path]) {
assert_eq!(paths.len(), 0);
assert!(StratPool::initialize("stratis_test_pool", paths, Redundancy::NONE).is_err());
}
#[test]
pub fn loop_test_empty_pool() {
loopbacked::test_with_spec(&loopbacked::DeviceLimits::Exactly(0, None), test_empty_pool);
}
#[test]
pub fn real_test_empty_pool() {
real::test_with_spec(&real::DeviceLimits::Exactly(0, None, None), test_empty_pool);
}
/// Test that adding a cachedev causes metadata to be updated.
/// Verify that teardown and setup of pool allows reading from filesystem
/// written before cache was added. Check some basic facts about the
/// metadata.
fn test_add_cachedevs(paths: &[&Path]) {
assert!(paths.len() > 1);
let (paths1, paths2) = paths.split_at(paths.len() / 2);
let name = "stratis-test-pool";
devlinks::cleanup_devlinks(Vec::new().into_iter());
let (uuid, mut pool) = StratPool::initialize(&name, paths2, Redundancy::NONE).unwrap();
devlinks::pool_added(&name);
invariant(&pool, &name);
let metadata1 = pool.record(name);
assert!(metadata1.backstore.cache_tier.is_none());
let (_, fs_uuid) = pool
.create_filesystems(uuid, &name, &[("stratis-filesystem", None)])
.unwrap()
.pop()
.unwrap();
invariant(&pool, &name);
let tmp_dir = tempfile::Builder::new()
.prefix("stratis_testing")
.tempdir()
.unwrap();
let new_file = tmp_dir.path().join("stratis_test.txt");
let bytestring = b"some bytes";
{
let (_, fs) = pool.get_filesystem(fs_uuid).unwrap();
mount(
Some(&fs.devnode()),
tmp_dir.path(),
Some("xfs"),
MsFlags::empty(),
None as Option<&str>,
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
.open(&new_file)
.unwrap()
.write_all(bytestring)
.unwrap();
}
pool.add_blockdevs(uuid, &name, paths1, BlockDevTier::Cache)
.unwrap();
invariant(&pool, &name);
let metadata2 = pool.record(name);
assert!(metadata2.backstore.cache_tier.is_some());
let mut buf = [0u8; 10];
{
OpenOptions::new()
.read(true)
.open(&new_file)
.unwrap()
.read_exact(&mut buf)
.unwrap();
}
assert_eq!(&buf, bytestring);
umount(tmp_dir.path()).unwrap();
pool.teardown().unwrap();
cmd::udev_settle().unwrap();
let pools = find_all().unwrap();
assert_eq!(pools.len(), 1);
let devices = &pools[&uuid];
let (name, pool) = StratPool::setup(
uuid,
&devices,
&get_metadata(uuid, &devices).unwrap().unwrap(),
)
.unwrap();
invariant(&pool, &name);
let mut buf = [0u8; 10];
{
let (_, fs) = pool.get_filesystem(fs_uuid).unwrap();
mount(
Some(&fs.devnode()),
tmp_dir.path(),
Some("xfs"),
MsFlags::empty(),
None as Option<&str>,
)
.unwrap();
OpenOptions::new()
.read(true)
.open(&new_file)
.unwrap()
.read_exact(&mut buf)
.unwrap();
}
assert_eq!(&buf, bytestring);
umount(tmp_dir.path()).unwrap();
}
#[test]
pub fn loop_test_add_cachedevs() {
loopbacked::test_with_spec(
&loopbacked::DeviceLimits::Range(2, 3, None),
test_add_cachedevs,
);
}
#[test]
pub fn real_test_add_cachedevs() {
real::test_with_spec(
&real::DeviceLimits::AtLeast(2, None, None),
test_add_cachedevs,
);
}
/// Verify that adding additional blockdevs will cause a pool that is
/// out of space to be extended.
fn test_add_datadevs(paths: &[&Path]) {
assert!(paths.len() > 1);
let (paths1, paths2) = paths.split_at(1);
let name = "stratis-test-pool";
devlinks::cleanup_devlinks(Vec::new().into_iter());
let (pool_uuid, mut pool) = StratPool::initialize(&name, paths1, Redundancy::NONE).unwrap();
devlinks::pool_added(&name);
invariant(&pool, &name);
let fs_name = "stratis_test_filesystem";
let (_, fs_uuid) = pool
.create_filesystems(pool_uuid, &name, &[(&fs_name, None)])
.unwrap()
.pop()
.expect("just created one");
let devnode = pool.get_filesystem(fs_uuid).unwrap().1.devnode();
{
let buffer_length = IEC::Mi;
let mut f = BufWriter::with_capacity(
buffer_length as usize,
OpenOptions::new().write(true).open(devnode).unwrap(),
);
let buf = &[1u8; SECTOR_SIZE];
let mut amount_written = Sectors(0);
let buffer_length = Bytes(buffer_length).sectors();
while pool.thin_pool.extend_state() == PoolExtendState::Good
&& pool.thin_pool.state() == PoolState::Running
{
f.write_all(buf).unwrap();
amount_written += Sectors(1);
// Run check roughly every time the buffer is cleared.
// Running it more often is pointless as the pool is guaranteed
// not to see any effects unless the buffer is cleared.
if amount_written % buffer_length == Sectors(1) {
pool.thin_pool
.check(pool_uuid, &mut pool.backstore)
.unwrap();
}
}
pool.add_blockdevs(pool_uuid, &name, paths2, BlockDevTier::Data)
.unwrap();
assert_matches!(pool.thin_pool.extend_state(), PoolExtendState::Good);
assert_matches!(pool.thin_pool.state(), PoolState::Running);
}
}
#[test]
pub fn loop_test_add_datadevs() {
loopbacked::test_with_spec(
&loopbacked::DeviceLimits::Range(2, 3, Some((4u64 * Bytes(IEC::Gi)).sectors())),
test_add_datadevs,
);
}
#[test]
pub fn real_test_add_datadevs() {
real::test_with_spec(
&real::DeviceLimits::AtLeast(
2,
Some((2u64 * Bytes(IEC::Gi)).sectors()),
Some((4u64 * Bytes(IEC::Gi)).sectors()),
),
test_add_datadevs,
);
}
}<|fim▁end|> | |
<|file_name|>papatcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
papatcher.py: simple python PA patcher
Copyright (c) 2014 Pyrus <[email protected]>
See the file LICENSE for copying permission.
"""
from argparse import ArgumentParser
from concurrent import futures
from contextlib import contextmanager
from getpass import getpass
from gzip import decompress
from hashlib import sha1
from http.client import OK as HTTP_OK, HTTPSConnection
from json import dumps, loads
from operator import itemgetter
from os import cpu_count, environ
from pathlib import Path
from ssl import create_default_context
from signal import signal, SIGINT
from stat import S_IEXEC
from urllib.error import URLError
from urllib.request import urlopen
import atexit
import sys
import pycurl
CPU_COUNT = cpu_count()
UBERNET_HOST = "uberent.com"
# set up paths according to XDG basedir spec
if "XDG_DATA_HOME" in environ:
DATA_HOME = Path(environ["XDG_DATA_HOME"])
else:
DATA_HOME = Path(environ["HOME"], ".local", "share")
if "XDG_CACHE_HOME" in environ:
CACHE_HOME = Path(environ["XDG_CACHE_HOME"])
else:
CACHE_HOME = Path(environ["HOME"], ".cache")
GAME_ROOT = DATA_HOME / "Planetary Annihilation"
CACHE_DIR = CACHE_HOME / "Planetary Annihilation"
class Cursor(object):
@staticmethod
def hide():
"""Hide the cursor using ANSI escape codes."""
sys.stdout.write("\033[?25l")
sys.stdout.flush()
@staticmethod
def show():
"""Show the cursor using ANSI escape codes."""
sys.stdout.write("\033[?25h")
sys.stdout.flush()
@contextmanager
def shown():
"""Show the cursor within a context."""
Cursor.show()
yield
Cursor.hide()
class ProgressMeter(object):
def __init__(self):
self.last_fraction = None
def display_progress(self, download_total, downloaded,
upload_total, uploaded):
if not int(download_total):
return
fraction = (downloaded / download_total) if downloaded else 0
# display progress only if it has advanced by at least 1 percent
if self.last_fraction and abs(self.last_fraction - fraction) < 0.01:
return
self.last_fraction = fraction
print("* Progress: {0: >4.0%} of {1} bytes.".format(
fraction, int(download_total)), end="\r")
class PAPatcher(object):
"""
PA Patcher class.
Logs in to UberNet, retrieves stream information and downloads patches.
"""
def __init__(self, ubername, password, threads, ratelimit):
"""
Initialize the patcher with UberNet credentials. They will be used to
login, check for and retrieve patches.
"""
self.credentials = dumps({"TitleId": 4,
"AuthMethod": "UberCredentials",
"UberName": ubername,
"Password": password})
ssl_context = create_default_context()
self.connection = HTTPSConnection(UBERNET_HOST,
context=ssl_context)
self.threads = threads
self.ratelimit = ratelimit
def login(self):
"""
Login to UberNet and store a session ticket if successful.
"""
# return immediately if we already have a session ticket
if hasattr(self, "_session"):
return True
# otherwise request a new one
headers = {"Content-Type": "application/json;charset=utf-8"}
self.connection.request("POST", "/GC/Authenticate", headers=headers,
body=self.credentials)
response = self.connection.getresponse()
if response.status is not HTTP_OK:
print("! Encountered an error: {0} {1}.".format(response.status,
response.reason))
return False
# get and parse response data
raw_data = response.read()
result = loads(raw_data.decode("utf-8"))
if "SessionTicket" not in result:
print("! Result doesn't contain a session ticket.")
return False
self._session = result["SessionTicket"]
print("* Got Session Ticket: {0}.".format(self._session))
return True
def get_streams(self):
"""
Request and return a list of streams we can download from UberNet.
"""
# we can't continue without a session ticket
if not hasattr(self, "_session"):
return None
headers = {"X-Authorization": self._session}
# we no longer need the session ticket
del self._session
self.connection.request("GET", "/Launcher/ListStreams?Platform=Linux",
headers=headers)
response = self.connection.getresponse()
if response.status is not HTTP_OK:
print("! Encountered an error: {0} {1}.".format(response.status,
response.reason))
return None
# get and parse response data
raw_data = response.read()
result = loads(raw_data.decode("utf-8"))
self._streams = {stream["StreamName"]: stream<|fim▁hole|> def get_manifest(self, stream, full):
if not hasattr(self, "_streams") or stream not in self._streams:
return False
self._stream = self._streams[stream]
# we no longer need all streams
del self._streams
print("* Downloading manifest from {0}/{1}/{2}.".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
self._stream["ManifestName"]))
# we still need to add the AuthSuffix for the download to work
manifest_url = "{0}/{1}/{2}{3}".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
self._stream["ManifestName"],
self._stream["AuthSuffix"])
try:
with urlopen(manifest_url) as response:
manifest_raw = decompress(response.read())
self._manifest = loads(manifest_raw.decode("utf-8"))
return self._verify_manifest(full)
except URLError as err:
print("! Could not retrieve manifest: {0}.".format(err.reason))
return False
def _verify_manifest(self, full):
if not hasattr(self, "_stream") or not hasattr(self, "_manifest"):
return False
# clean up cache in the process
cache_dir = CACHE_DIR / self._stream["StreamName"]
print("* Verifying contents of cache folder {0}.".format(
str(cache_dir)))
if cache_dir.exists():
bundle_names = [bundle["checksum"]
for bundle in self._manifest["bundles"]]
old_bundles = 0
for cache_file in cache_dir.iterdir():
if full or cache_file.name not in bundle_names:
cache_file.unlink()
old_bundles += 1
if old_bundles:
print("* Purged {0} old bundle(s).".format(old_bundles))
# verify bundles in parallel
with futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
# this list will contain the bundles we actually need to download
self._bundles = list()
bundle_futures = [executor.submit(self._verify_bundle, bundle)
for bundle in self._manifest["bundles"]]
for completed in futures.as_completed(bundle_futures):
if not completed.result():
# cancel waiting futures
for future in bundle_futures:
future.cancel()
return False
print("* Need to get {0} bundle(s).".format(len(self._bundles)))
# if we get here there, all bundles were verified
# we no longer need the manifest
del self._manifest
return True
def _verify_bundle(self, bundle):
if not hasattr(self, "_stream") or not hasattr(self, "_bundles"):
return False
bundle_checksum = bundle["checksum"]
cache_file = CACHE_DIR / self._stream["StreamName"] / bundle_checksum
# if we don't have that file we need to download it
if not cache_file.exists():
self._bundles.append(bundle)
return True
# if we have it, make sure the checksum is correct
with cache_file.open("rb") as cache_fp:
sha = sha1()
sha.update(cache_fp.read())
checksum = sha.hexdigest()
if checksum != bundle_checksum:
self._bundles.append(bundle)
return True
# we have that file and checksums match, nothing to do
return True
def patch(self):
if not hasattr(self, "_bundles"):
return False
with futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
bundle_futures = list()
# download bundles sorted by size
self._bundles.sort(key=lambda bundle: int(bundle["size"]),
reverse=True)
for bundle in self._bundles:
bundle_checksum = bundle["checksum"]
print("* Downloading bundle {0}.".format(bundle_checksum))
if not self._download_bundle(bundle):
return False
# bundle was downloaded, start extraction in parallel
print("* Extracting bundle {0}.".format(bundle_checksum))
bundle_future = executor.submit(self._extract_bundle, bundle)
bundle_futures.append(bundle_future)
for completed in futures.as_completed(bundle_futures):
if not completed.result():
# cancel waiting futures
for future in bundle_futures:
future.cancel()
return False
# if we're here everything has been downloaded and extracted
return True
def _download_bundle(self, bundle):
if not hasattr(self, "_stream"):
return False
bundle_checksum = bundle["checksum"]
cache_base = CACHE_DIR / self._stream["StreamName"]
# make sure that path exists
if not cache_base.exists():
cache_base.mkdir(parents=True)
cache_file = cache_base / bundle_checksum
# remove the file first if it already exists
if cache_file.exists():
cache_file.unlink()
bundle_url = "{0}/{1}/hashed/{2}{3}".format(
self._stream["DownloadUrl"],
self._stream["TitleFolder"],
bundle_checksum,
self._stream["AuthSuffix"])
with cache_file.open("x+b") as cache_fp:
curl = pycurl.Curl()
curl.setopt(pycurl.URL, bundle_url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.MAXREDIRS, 5)
curl.setopt(pycurl.CONNECTTIMEOUT, 30)
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.MAX_RECV_SPEED_LARGE, self.ratelimit)
curl.setopt(pycurl.WRITEDATA, cache_fp)
curl.setopt(pycurl.NOPROGRESS, 0)
progress_meter = ProgressMeter()
curl.setopt(pycurl.PROGRESSFUNCTION,
progress_meter.display_progress)
try:
curl.perform()
except:
print("! Downloading bundle {0} failed!".format(
bundle_checksum))
return False
finally:
curl.close()
# verify checksum
cache_fp.seek(0)
sha = sha1()
sha.update(cache_fp.read())
checksum = sha.hexdigest()
if checksum != bundle_checksum:
print("! Checksums don't match. Expected {0}, got {1}.".format(
bundle_checksum, checksum))
return False
# everything worked out OK
return True
def _extract_bundle(self, bundle):
if not hasattr(self, "_stream"):
return False
bundle_checksum = bundle["checksum"]
cache_file = CACHE_DIR / self._stream["StreamName"] / bundle_checksum
# open cache file with gzip
with cache_file.open("rb") as cache_fp:
game_base = GAME_ROOT / self._stream["StreamName"]
# get entries sorted by offset
entries = sorted(bundle["entries"], key=itemgetter("offset"))
for entry in entries:
entry_file = game_base / entry["filename"][1:]
# make sure that path exists
if not entry_file.parent.exists():
entry_file.parent.mkdir(parents=True)
entry_offset = int(entry["offset"])
cache_fp.seek(entry_offset)
# remove the file first if it already exists
if entry_file.exists():
entry_file.unlink()
with entry_file.open("xb") as entry_fp:
# data might be compressed further, check sizeZ for that
if entry["sizeZ"] != "0":
entry_size = int(entry["sizeZ"])
raw_data = cache_fp.read(entry_size)
entry_fp.write(decompress(raw_data))
else:
entry_size = int(entry["size"])
entry_fp.write(cache_fp.read(entry_size))
# set executable
if "executable" in entry:
entry_file.chmod(entry_file.stat().st_mode | S_IEXEC)
return True
if __name__ == "__main__":
Cursor.hide()
atexit.register(Cursor.show)
signal(SIGINT, lambda sig, frame: sys.exit(SIGINT))
print("Python PA Patcher\n"
"=================")
arg_parser = ArgumentParser()
arg_parser.add_argument("-u", "--ubername",
action="store", type=str,
help="UberName used for login.")
arg_parser.add_argument("-p", "--password",
action="store", type=str,
help="Password used for login.")
arg_parser.add_argument("-s", "--stream",
action="store", type=str,
help="Stream being downloaded.")
arg_parser.add_argument("-f", "--full",
action="store_true",
help="Patch even unchanged files.")
arg_parser.add_argument("-t", "--threads",
action="store", type=int,
default=CPU_COUNT,
help="Number of threads used.")
arg_parser.add_argument("-r", "--ratelimit",
action="store", type=int,
default=0,
help="Limit downloads to bytes/sec.")
arg_parser.add_argument("--unattended",
action="store_true",
help="Don't ask any questions. If you use this "
"option, --ubername, --password and --stream "
"are mandatory")
arguments = arg_parser.parse_args()
unattended = arguments.unattended
if (unattended and not (arguments.ubername and
arguments.password and
arguments.stream)):
print("! For unattended mode you need to use "
"--ubername, --password and --stream. "
"Exiting...")
sys.exit(-1)
with Cursor.shown():
ubername = arguments.ubername or input("? UberName: ")
password = arguments.password or getpass("? Password: ")
print("* Creating patcher...")
patcher = PAPatcher(ubername, password,
arguments.threads, arguments.ratelimit)
print("* Logging in to UberNet...")
if not patcher.login():
print("! Login failed. Exiting...")
sys.exit(-1)
print("* Requesting streams...")
streams = patcher.get_streams()
if not streams:
print("! Could not acquire streams. Exiting...")
sys.exit(-1)
stream = arguments.stream
if not stream or stream not in streams:
if unattended:
print("! Invalid Stream. "
"For a selection of streams use interactive mode. "
"Exiting...")
sys.exit(-1)
with Cursor.shown():
while True:
print("* Available streams: {0}.".format(
", ".join(streams.keys())))
stream = input("? Select stream: ")
if stream in streams:
break
print("! Invalid Stream.")
print("* Downloading manifest for stream '{0}'...".format(stream))
if not patcher.get_manifest(stream, arguments.full):
print("! Could not download manifest. Exiting...")
sys.exit(-1)
print("* Patching installation for stream '{0}'...".format(stream))
if not patcher.patch():
print("! Could not patch stream. Exiting...")
sys.exit(-1)
print("* Successfully updated stream '{0}'.".format(stream))
sys.exit(0)<|fim▁end|> | for stream in result["Streams"]}
return self._streams
|
<|file_name|>UsersFullAccount.cpp<|end_file_name|><|fim▁begin|>/**********************************************************
DO NOT EDIT
This file was generated from stone specification "users"
www.prokarpaty.net
***********************************************************/
#include "dropbox/users/UsersFullAccount.h"
using namespace dropboxQt;
namespace dropboxQt{
namespace users{
///FullAccount
FullAccount::operator QJsonObject()const{
QJsonObject js;
this->toJson(js);
return js;
}
void FullAccount::toJson(QJsonObject& js)const{
Account::toJson(js);
if(!m_country.isEmpty())
js["country"] = QString(m_country);
if(!m_locale.isEmpty())
js["locale"] = QString(m_locale);
if(!m_referral_link.isEmpty())
js["referral_link"] = QString(m_referral_link);
js["team"] = (QJsonObject)m_team;
if(!m_team_member_id.isEmpty())
js["team_member_id"] = QString(m_team_member_id);
js["is_paired"] = m_is_paired;
m_account_type.toJson(js, "account_type");
}
void FullAccount::fromJson(const QJsonObject& js){
Account::fromJson(js);
m_country = js["country"].toString();
m_locale = js["locale"].toString();
m_referral_link = js["referral_link"].toString();
m_team.fromJson(js["team"].toObject());
m_team_member_id = js["team_member_id"].toString();
m_is_paired = js["is_paired"].toVariant().toBool();
m_account_type.fromJson(js["account_type"].toObject());
}
QString FullAccount::toString(bool multiline)const
{
QJsonObject js;
toJson(js);
QJsonDocument doc(js);
QString s(doc.toJson(multiline ? QJsonDocument::Indented : QJsonDocument::Compact));
return s;
}
std::unique_ptr<FullAccount> FullAccount::factory::create(const QByteArray& data)
{
QJsonDocument doc = QJsonDocument::fromJson(data);
QJsonObject js = doc.object();
return create(js);
}
std::unique_ptr<FullAccount> FullAccount::factory::create(const QJsonObject& js)
{
std::unique_ptr<FullAccount> rv;
rv = std::unique_ptr<FullAccount>(new FullAccount);
rv->fromJson(js);
return rv;
}<|fim▁hole|>}//dropboxQt<|fim▁end|> |
}//users |
<|file_name|>EntityAITradePlayer.java<|end_file_name|><|fim▁begin|>package net.minecraft.entity.ai;
import net.minecraft.entity.ai.EntityAIBase;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Container;
public class EntityAITradePlayer extends EntityAIBase {
private EntityVillager field_75276_a;
public EntityAITradePlayer(EntityVillager p_i1658_1_) {
this.field_75276_a = p_i1658_1_;
this.func_75248_a(5);
}
public boolean func_75250_a() {
if(!this.field_75276_a.func_70089_S()) {
return false;
} else if(this.field_75276_a.func_70090_H()) {
return false;
} else if(!this.field_75276_a.field_70122_E) {
return false;
} else if(this.field_75276_a.field_70133_I) {
return false;<|fim▁hole|> }
public void func_75249_e() {
this.field_75276_a.func_70661_as().func_75499_g();
}
public void func_75251_c() {
this.field_75276_a.func_70932_a_((EntityPlayer)null);
}
}<|fim▁end|> | } else {
EntityPlayer var1 = this.field_75276_a.func_70931_l_();
return var1 == null?false:(this.field_75276_a.func_70068_e(var1) > 16.0D?false:var1.field_71070_bA instanceof Container);
} |
<|file_name|>new_task.rs<|end_file_name|><|fim▁begin|>use lapin::{options::*, types::FieldTable, BasicProperties, Connection, ConnectionProperties};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<_> = std::env::args().skip(1).collect();<|fim▁hole|> };
let addr = "amqp://127.0.0.1:5672";
let conn = Connection::connect(addr, ConnectionProperties::default()).await?;
let channel = conn.create_channel().await?;
channel
.queue_declare(
"task_queue",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await?;
channel
.basic_publish(
"",
"task_queue",
BasicPublishOptions::default(),
message.clone(),
BasicProperties::default(),
)
.await?;
println!(" [x] Sent {:?}", std::str::from_utf8(&message)?);
conn.close(0, "").await?;
Ok(())
}<|fim▁end|> | let message = match args.len() {
0 => b"hello".to_vec(),
_ => args.join(" ").into_bytes(), |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># This file is part of xmpp-backends (https://github.com/mathiasertl/xmpp-backends).
#
# xmpp-backends is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# xmpp-backends is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with xmpp-backends. If not, see
# <http://www.gnu.org/licenses/>.
"""Common code for XMPP backends."""
import ipaddress
import logging
import random
import re
import string
import time
from datetime import datetime
from datetime import timedelta
from importlib import import_module
import pytz
from .constants import CONNECTION_HTTP_BINDING
from .constants import CONNECTION_UNKNOWN
from .constants import CONNECTION_XMPP
log = logging.getLogger(__name__)
class BackendError(Exception):
"""All backend exceptions should be a subclass of this exception."""
pass
class InvalidXmppBackendError(BackendError):
"""Raised when a module cannot be imported."""
pass
class BackendConnectionError(BackendError):
"""Raised when the backend is unavailable."""
pass
class NotSupportedError(BackendError):
"""Raised when a backend does not support a specific function.
This error may be thrown only with specific versions, e.g. if it requires minimum version.
"""
pass
class UserExists(BackendError):
"""Raised when a user already exists."""
pass
class UserNotFound(BackendError):
"""Raised when a user is not found."""
def __init__(self, node, domain, resource=None):
self.node = node
self.domain = domain
self.resource = resource
def __str__(self):
s = '%s@%s' % (self.node, self.domain)
if self.resource is not None:
s += '/%s' % self.resource
return s
class UserSession(object):
"""An object describing a user session.
:param backend: The XMPP backend used for retrieving this session.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param resource: The resource of the user.
:param priority: The priority of this connection.
:param ip_address: The IP address of this connection.
:param uptime: A timestamp of when this connection came online.
:param status: The status message for this connection (e.g. "I am available.").
:param connection_type: The type of connection.
:param encrypted: If this connection is encrypted. This may be ``None`` if the backend is not able
decide if the connection is encrypted (e.g. if it is a HTTP bind connection).
:param compressed: If this connection uses XMPP stream compression. This is always ``None`` for
connections where this is not applicable, e.g. Websocket connections.
"""
def __init__(self, backend, username, domain, resource, priority, ip_address, uptime, status, status_text,
connection_type, encrypted, compressed):
self._backend = backend
self.username = username
self.domain = domain
self.jid = '%s@%s' % (username, domain)
self.resource = resource
self.priority = priority
self.ip_address = ip_address
self.uptime = uptime
self.status = status
self.status_text = status_text
self.connection_type = connection_type
self.encrypted = encrypted
self.compressed = compressed
def __eq__(self, other):
return isinstance(other, UserSession) and self.jid == other.jid and self.resource == other.resource
def __hash__(self):
return hash((self.jid, self.resource))
def __str__(self):
return '%s@%s/%s' % (self.username, self.domain, self.resource)
def __repr__(self):
return '<UserSession: %s@%s/%s>' % (self.username, self.domain, self.resource)
class XmppBackendBase(object):
"""Base class for all XMPP backends."""
library = None
"""Import-party of any third-party library you need.
Set this attribute to an import path and you will be able to access the module as ``self.module``. This
way you don't have to do a module-level import, which would mean that everyone has to have that library
installed, even if they're not using your backend.
:param version_cache_timeout: How long the API version for this backend will be cached.
:type version_cache_timeout: int or timedelta
"""
_module = None
minimum_version = None
version_cache_timeout = None
version_cache_timestamp = None
version_cache_value = None
def __init__(self, version_cache_timeout=3600):
if isinstance(version_cache_timeout, int):
version_cache_timeout = timedelta(seconds=version_cache_timeout)
self.version_cache_timeout = version_cache_timeout
super(XmppBackendBase, self).__init__()
@property
def module(self):
"""The module specified by the ``library`` attribute."""
if self._module is None:
if self.library is None:
raise ValueError(
"Backend '%s' doesn't specify a library attribute" % self.__class__)
try:
if '.' in self.library:
mod_path, cls_name = self.library.rsplit('.', 1)
mod = import_module(mod_path)
self._module = getattr(mod, cls_name)
else:
self._module = import_module(self.library)
except (AttributeError, ImportError):
raise ValueError("Couldn't load %s backend library" % cls_name)
return self._module
def datetime_to_timestamp(self, dt):
"""Helper function to convert a datetime object to a timestamp.
If datetime instance ``dt`` is naive, it is assumed that it is in UTC.
In Python 3, this just calls ``datetime.timestamp()``, in Python 2, it substracts any timezone offset
and returns the difference since 1970-01-01 00:00:00.
Note that the function always returns an int, even in Python 3.
>>> XmppBackendBase().datetime_to_timestamp(datetime(2017, 9, 17, 19, 59))
1505678340
>>> XmppBackendBase().datetime_to_timestamp(datetime(1984, 11, 6, 13, 21))
468595260
:param dt: The datetime object to convert. If ``None``, returns the current time.
:type dt: datetime
:return: The seconds in UTC.
:rtype: int
"""
if dt is None:
return int(time.time())
if not dt.tzinfo:
dt = pytz.utc.localize(dt)
return int(dt.timestamp())
def get_random_password(self, length=32, chars=None):
"""Helper function that gets a random password.
:param length: The length of the random password.
:type length: int
:param chars: A string with characters to choose from. Defaults to all ASCII letters and digits.
:type chars: str
"""
if chars is None:
chars = string.ascii_letters + string.digits
return ''.join(random.choice(chars) for x in range(length))
@property
def api_version(self):
"""Cached version of :py:func:`~xmpp_backends.base.XmppBackendBase.get_api_version`."""
now = datetime.utcnow()
if self.version_cache_timestamp and self.version_cache_timestamp + self.version_cache_timeout > now:
return self.version_cache_value # we have a cached value
self.version_cache_value = self.get_api_version()
if self.minimum_version and self.version_cache_value < self.minimum_version:
raise NotSupportedError('%s requires ejabberd >= %s' % (self.__class__.__name__,
self.minimum_version))
self.version_cache_timestamp = now
return self.version_cache_value
def get_api_version(self):
"""Get the API version used by this backend.
Note that this function is usually not invoked directly but through
:py:attr:`~xmpp_backends.base.XmppBackendBase.api_version`.
The value returned by this function is used by various backends to determine how to call various API
backends and/or how to parse th data returned by them. Backends generally assume that this function is
always working and return the correct value.
If your backend implementation cannot get this value, it should be passed via the constructor and
statically returned for the livetime of the instance.
"""
raise NotImplementedError
def user_exists(self, username, domain):
"""Verify that the given user exists.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: ``True`` if the user exists, ``False`` if not.
:rtype: bool
"""
raise NotImplementedError
def user_sessions(self, username, domain):
"""Get a list of all current sessions for the given user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: A list :py:class:`~xmpp_backends.base.UserSession` describing the user sessions.
:rtype: list of :py:class:`~xmpp_backends.base.UserSession`
"""
raise NotImplementedError
def stop_user_session(self, username, domain, resource, reason=''):
"""Stop a specific user session, identified by its resource.
A resource uniquely identifies a connection by a specific client.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param resource: The resource of the connection
:type resource: str
"""
raise NotImplementedError
def create_user(self, username, domain, password, email=None):
"""Create a new user.
:param username: The username of the new user.
:type username: str
:param domain: The domain of the new user.
:type domain: str
:param password: The password of the new user.
:param email: The email address provided by the user.
"""
raise NotImplementedError
def create_reservation(self, username, domain, email=None):
"""Reserve a new account.
This method is called when a user account should be reserved, meaning that the account can no longer
be registered by anybody else but the user cannot yet log in either. This is useful if e.g. an email
confirmation is still pending.
The default implementation calls :py:func:`~xmpp_backends.base.XmppBackendBase.create_user` with a
random password.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param email: The email address provided by the user. Note that at this point it is not confirmed.
You are free to ignore this parameter.
"""
password = self.get_random_password()
self.create(username=username, domain=domain, password=password, email=email)
def confirm_reservation(self, username, domain, password, email=None):
"""Confirm a reservation for a username.
The default implementation just calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` and
optionally :py:func:`~xmpp_backends.base.XmppBackendBase.set_email`.
"""
self.set_password(username=username, domain=domain, password=password)
if email is not None:
self.set_email(username=username, domain=domain, email=email)
def check_password(self, username, domain, password):
"""Check the password of a user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param password: The password to check.
:type password: str
:return: ``True`` if the password is correct, ``False`` if not.
:rtype: bool
"""
raise NotImplementedError
def set_password(self, username, domain, password):
"""Set the password of a user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param password: The password to set.
:type password: str
"""
raise NotImplementedError
def get_last_activity(self, username, domain):
"""Get the last activity of the user.
The datetime object returned should be a naive datetime object representing the time in UTC.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:return: A naive datetime object in UTC representing the last activity.
:rtype: datetime
"""
raise NotImplementedError
def set_last_activity(self, username, domain, status='', timestamp=None):
"""Set the last activity of the user.
.. NOTE::
If your backend requires a Unix timestamp (seconds since 1970-01-01), you can use the
:py:func:`~xmpp_backends.base.XmppBackendBase.datetime_to_timestamp` convenience function to
convert it to an integer.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param status: The status text.
:type status: str
:param timestamp: A datetime object representing the last activity. If the object is not
timezone-aware, assume UTC. If ``timestamp`` is ``None``, assume the current date and time.
:type timestamp: datetime
"""
raise NotImplementedError
def block_user(self, username, domain):
"""Block the specified user.
The default implementation calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` with a
random password.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
self.set_password(username, domain, self.get_random_password())
def set_email(self, username, domain, email):
"""Set the email address of a user."""
raise NotImplementedError
def check_email(self, username, domain, email):<|fim▁hole|> :param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
raise NotImplementedError
def expire_reservation(self, username, domain):
"""Expire a username reservation.
This method is called when a reservation expires. The default implementation just calls
:py:func:`~xmpp_backends.base.XmppBackendBase.remove_user`. This is fine if you do not override
:py:func:`~xmpp_backends.base.XmppBackendBase.create_reservation`.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
self.remove_user(username, domain)
def message_user(self, username, domain, subject, message):
"""Send a message to the given user.
:param username: The username of the user.
:type username: str
:param domain: The domain of the user.
:type domain: str
:param subject: The subject of the message.
:param message: The content of the message.
"""
pass
def all_users(self, domain):
"""Get all users for a given domain.
:param domain: The domain of interest.
:type domain: str
:return: A set of all users. The usernames do not include the domain, so ``[email protected]`` will
just be ``"user"``.
:rtype: set of str
"""
raise NotImplementedError
def all_domains(self):
"""List of all domains used by this backend.
:return: List of all domains served by this backend.
:rtype: list of str
"""
raise NotImplementedError
def all_user_sessions(self):
"""List all current user sessions.
:param domain: Optionally only return sessions for the given domain.
:return: A list :py:class:`~xmpp_backends.base.UserSession` for all sessions.
:rtype: list of :py:class:`~xmpp_backends.base.UserSession`
"""
raise NotImplementedError
def remove_user(self, username, domain):
"""Remove a user.
This method is called when the user explicitly wants to remove her/his account.
:param username: The username of the new user.
:type username: str
:param domain: The domain of the user.
:type domain: str
"""
raise NotImplementedError
def stats(self, stat, domain=None):
"""Get statistical value about the XMPP server.
Minimal statistics that should be supported is ``"registered_users"`` and ``"online_users"``. The
specific backend might support additional stats.
:param stat: The value of the statistic.
:type stat: str
:param domain: Limit statistic to the given domain. If not listed, give statistics
about all users.
:type domain: str
:return: The current value of the requested statistic.
:rtype: int
"""
raise NotImplementedError
class EjabberdBackendBase(XmppBackendBase):
"""Base class for ejabberd related backends.
This class overwrites a few methods common to all ejabberd backends.
"""
minimum_version = (14, 7)
def parse_version_string(self, version):
return tuple(int(t) for t in version.split('.'))
def parse_status_string(self, data):
match = re.search(r'([^ ]*) is running in that node', data)
if not match:
raise BackendError('Could not determine API version.')
return self.parse_version_string(match.groups()[0].split('-', 1)[0])
def has_usable_password(self, username, domain):
"""Always return ``True``.
In ejabberd there is no such thing as a "banned" account or an unusable password. Even ejabberd's
``ban_account`` command only sets a random password that the user could theoretically guess.
"""
return True
def set_email(self, username, domain, email):
"""Not yet implemented."""
pass
def check_email(self, username, domain, email):
"""Not yet implemented."""
pass
def parse_connection_string(self, connection):
"""Parse string as returned by the ``connected_users_info`` or ``user_sessions_info`` API calls.
>>> EjabberdBackendBase().parse_connection_string('c2s_tls')
(0, True, False)
>>> EjabberdBackendBase().parse_connection_string('c2s_compressed_tls')
(0, True, True)
>>> EjabberdBackendBase().parse_connection_string('http_bind')
(2, None, None)
:param connection: The connection string as returned by the ejabberd APIs.
:type connection: str
:return: A tuple representing the conntion type, if it is encrypted and if it uses XMPP stream
compression.
:rtype: tuple
"""
# TODO: Websockets, HTTP Polling
if connection == 'c2s_tls':
return CONNECTION_XMPP, True, False
elif connection == 'c2s_compressed_tls':
return CONNECTION_XMPP, True, True
elif connection == 'http_bind':
return CONNECTION_HTTP_BINDING, None, None
elif connection == 'c2s':
return CONNECTION_XMPP, False, False
log.warn('Could not parse connection string "%s"', connection)
return CONNECTION_UNKNOWN, True, True
def parse_ip_address(self, ip_address):
"""Parse an address as returned by the ``connected_users_info`` or ``user_sessions_info`` API calls.
Example::
>>> EjabberdBackendBase().parse_ip_address('192.168.0.1') # doctest: +FORCE_TEXT
IPv4Address('192.168.0.1')
>>> EjabberdBackendBase().parse_ip_address('::FFFF:192.168.0.1') # doctest: +FORCE_TEXT
IPv4Address('192.168.0.1')
>>> EjabberdBackendBase().parse_ip_address('::1') # doctest: +FORCE_TEXT
IPv6Address('::1')
:param ip_address: An IP address.
:type ip_address: str
:return: The parsed IP address.
:rtype: `ipaddress.IPv6Address` or `ipaddress.IPv4Address`.
"""
if ip_address.startswith('::FFFF:'):
ip_address = ip_address[7:]
return ipaddress.ip_address(ip_address)<|fim▁end|> | """Check the email address of a user.
**Note:** Most backends don't implement this feature.
|
<|file_name|>rtdeps.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains the linkage attributes to all runtime dependencies of
//! the standard library This varies per-platform, but these libraries are
//! necessary for running libstd.
<|fim▁hole|>
// All platforms need to link to rustrt
#[cfg(not(test))]
#[link(name = "rust_builtin", kind = "static")]
extern {}
// LLVM implements the `frem` instruction as a call to `fmod`, which lives in
// libm. Hence, we must explicitly link to it.
//
// On Linux, librt and libdl are indirect dependencies via std,
// and binutils 2.22+ won't add them automatically
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
#[link(name = "dl")]
#[link(name = "pthread")]
extern {}
#[cfg(target_os = "android")]
#[link(name = "dl")]
#[link(name = "log")]
extern {}
#[cfg(target_os = "freebsd")]
#[link(name = "execinfo")]
#[link(name = "pthread")]
extern {}
#[cfg(any(target_os = "dragonfly",
target_os = "bitrig",
target_os = "openbsd"))]
#[link(name = "pthread")]
extern {}
#[cfg(target_os = "macos")]
#[link(name = "System")]
extern {}
#[cfg(target_os = "ios")]
#[link(name = "System")]
extern {}<|fim▁end|> | #![unstable(feature = "std_misc")] |
<|file_name|>products-edit.js<|end_file_name|><|fim▁begin|>var EcommerceProductsEdit = function () {
var handleImages = function() {
// see http://www.plupload.com/
var uploader = new plupload.Uploader({
runtimes : 'html5,html4',
browse_button : document.getElementById('tab_images_uploader_pickfiles'), // you can pass in id...
container: document.getElementById('tab_images_uploader_container'), // ... or DOM Element itsel
url : "assets/ajax/product-images.php",
filters : {
max_file_size : '10mb',
mime_types: [
{title : "Image files", extensions : "jpg,gif,png"},
]
},
multipart_params: {'oper': "addproductimages"},
// Flash settings
flash_swf_url : 'assets/global/plugins/plupload/js/Moxie.swf',
// Silverlight settings
silverlight_xap_url : 'assets/global/plugins/plupload/js/Moxie.xap',
init: {
PostInit: function() {
$('#tab_images_uploader_filelist').html("");
$('#tab_images_uploader_uploadfiles').click(function() {
uploader.start();
return false;
});
$('#tab_images_uploader_filelist').on('click', '.added-files .remove', function(){
uploader.removeFile($(this).parent('.added-files').attr("id"));
$(this).parent('.added-files').remove();
});
},
BeforeUpload: function(up, file) {
},
FilesAdded: function(up, files) {
plupload.each(files, function(file) {
$('#tab_images_uploader_filelist').append('<div class="alert col-md-6 col-sm-12 alert-warning added-files" id="uploaded_file_' + file.id + '">' + file.name + '(' + plupload.formatSize(file.size) + ') <span class="status label label-info"></span> <a href="javascript:;" style="margin-top:0px" class="remove pull-right btn btn-xs red"><i class="fa fa-times"></i> </a></div>');
});
},
UploadProgress: function(up, file) {
$('#uploaded_file_' + file.id + ' > .status').html(file.percent + '%');
},
FileUploaded: function(up, file, response) {
var response = $.parseJSON(response.response);
if (response.error && response.error == 'no') {
var $uplaod_path = "../images/products/";
var newfile = response.newfilename.trim(); // uploaded file's unique name. Here you can collect uploaded file names and submit an jax request to your server side script to process the uploaded files and update the images tabke
if(newfile != ""){
$image_names = $("#image-names").val();
$img_lists = new Array();
$img_lists.push(newfile);
if($image_names != ""){
$img_lists = $image_names.split("::::");
$img_lists.push(newfile);
}
$("#image-names").val($img_lists.join("::::"));
$('#uploaded_file_' + file.id + ' > .status').removeClass("label-info").addClass("label-success").html('<i class="fa fa-check"></i> Done'); // set successfull upload
var imgContaint = '<div class="col-md-3 product-image-div"><div class=mt-overlay-1><div class=item-image><img alt="'+newfile+'"src="'+$uplaod_path+newfile+'"></div><div class=mt-overlay><ul class=mt-info><li><a class="btn btn-outline green" href="'+$uplaod_path+newfile+'"><i class=icon-magnifier></i></a><li><a class="btn btn-outline btn-product-image-delete red" href=javascript:; data-image="'+newfile+'"><i class="fa fa-trash-o"></i></a></ul></div></div></div>';
$('#Product-iamge-list').append(imgContaint);
}
} else {
$('#uploaded_file_' + file.id + ' > .status').removeClass("label-info").addClass("label-danger").html('<i class="fa fa-warning"></i> Failed'); // set failed upload
Metronic.alert({type: 'danger', message: response.msg, closeInSeconds: 10, icon: 'warning'});
}
},
Error: function(up, err) {
Metronic.alert({type: 'danger', message: err.message, closeInSeconds: 10, icon: 'warning'});
}
}
});
uploader.init();
// delete images();
//varient image handing
var optgroupContainer = $("#optiongroup-containner");
optgroupContainer.on("click", ".option-img-upload", function(e){
e.preventDefault();
$(this).closest('.mt-overlay-1').find(".option-img-upload-input").trigger("click");
});
optgroupContainer.on("change", ".option-img-upload-input", function(e){
e.preventDefault();
var $fileInput = $(this);
var fileInputImageContainer = $(this).closest('.mt-overlay-1');
var el = $fileInput.closest(".portlet").children(".portlet-body");
var $oper = 'saveoptionimage';
//over initialization
Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
var formData = new FormData();
formData.append('oper', $oper);
formData.append('file', $fileInput[0].files[0]);
$.ajax({
url: "assets/ajax/ajax1.php",
data: formData,
method: "post",
contentType: false,
processData: false,
dataType: 'json',
success : function(response){
if(response.error == "no"){
var d = new Date();
fileInputImageContainer.find("img").attr('src', "../images/products/"+response.filename+"?"+d.getTime());
fileInputImageContainer.find('input[name^="product-option-img"]').val(response.filename);
$fileInput.val('');
}else{
Metronic.alert({type: 'danger', message: response.msg, closeInSeconds: 10, icon: 'warning'});
}
}
});
Metronic.unblockUI(el);
});
}
var initComponents = function(){
var summerEditer = $('#product-description');
summerEditer.summernote({
height: 150, // set editor height
minHeight: 100, // set minimum height of editor
maxHeight: 300, // set maximum height of editor
placeholder: 'Product Description here...',
toolbar: [
['style', ['bold', 'italic', 'underline', 'clear']],
['font', ['superscript', 'subscript']],
['color', ['color']],
['para', ['ul', 'ol', 'paragraph']]
]
});
$('.note-editable').on('blur', function() {
if($(summerEditer.summernote('code')).text().length > 20){
$('.summernote-error').hide();
}else{
$('.summernote-error').show();
}
});
$.fn.select2.defaults.set("theme", "bootstrap");
$.fn.select2.defaults.set("id", function(object){ return object.text; });
$.fn.select2.defaults.set("tags", true);
/*$.fn.select2.defaults.set("createTag", function (params) { return { id: params.term, text: params.term, newOption: true}});
$.fn.select2.defaults.set("createSearchChoice", function(term, data){
if ( $(data).filter( function() {
return term.localeCompare(this.text)===0; //even if the this.text is undefined it works
}).length===0) {
if(confirm("Are you do you want to add item.")){
return {id:term, text:term};}
}
}) */
// non casesensetive matcher....
$.fn.select2.defaults.set("matcher", function(params, data) {
// If there are no search terms, return all of the data
if ($.trim(params.term) === '') {
return data;
}
// `params.term` should be the term that is used for searching
// `data.text` is the text that is displayed for the data object
if (data.text.toLowerCase().indexOf(params.term.toLowerCase()) > -1) {
return data;
}
// Return `null` if the term should not be displayed
return null;
});
// non casesensitive tags creater
$.fn.select2.defaults.set("createTag", function(params) {
var term = $.trim(params.term);
if(term === "") { return null; }
var optionsMatch = false;
this.$element.find("option").each(function() {
// if(this.value.toLowerCase().indexOf(term.toLowerCase()) > -1) { // for caompare values
if($(this).text().toLowerCase().indexOf(term.toLowerCase()) > -1) { // for caompare option text
optionsMatch = true;
}
});
if(optionsMatch) {
return null;
}
return {id: term, text: term, tag:true};
});
$('#product-category').select2({placeholder:"Select Category"});
$('#product-brand').select2({placeholder:"Select Manufacturer"});
$("#product-collection").select2().on("select2:select", function(e){
if(e.params.data.tag == true){
$this = $(this);
var el = $this.closest(".portlet").children(".portlet-body");
Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
$.post("assets/ajax/ajax1.php", {"oper": "saverandomcollection", "collection-name": $.trim(e.params.data.text)}, function(data){
Metronic.unblockUI(el);
if(data.error == "no"){
$('<option value="' + e.params.data.id + '">' + e.params.data.text + '</option>').appendTo($this);
}
}, "json");
}
});
$("#product-tags").select2();
var removeArrayItem = function (array, item){
for(var i in array){
if(array[i]==item){
array.splice(i,1);
break;
}
}
}
$("#Product-iamge-list").on("click", ".btn-product-image-delete", function(){
var $this = $(this);
if(confirm("Are you sure you want to remove image")){
var $image_container = $this.closest(".product-image-div");
var $img_name = $this.data("image");
var el = $(this).closest(".portlet").children(".portlet-body");
Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
$.post( "assets/ajax/product-images.php", {'product-image':$img_name, 'oper':'deleteproductimages'},function(data){
data = jQuery.parseJSON(data);
if(data.error =="no"){
$image_container.fadeOut(300, function(){ $(this).remove();});
var $image_names = $("#image-names").val();
var $img_lists = new Array();
if($image_names != ""){
$img_lists = $image_names.split("::::");
removeArrayItem($img_lists, $img_name);
}
$("#image-names").val($img_lists.join("::::"));
}
Metronic.unblockUI(el);
});
}
});
// product attribuets
var addCustomAttrVal = function(){
$('select[name^="product-attributes-value"]').select2().on("select2:select", function(e){
$this = $(this);
$attribute_id = $.trim($this.data("attribute"));
if(e.params.data.tag == true && $attribute_id > 0){
var el = $this.closest(".portlet").children(".portlet-body");
Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
$.post("assets/ajax/ajax1.php", {"oper": "savenewattributevalue", "attribute-value": $.trim(e.params.data.text), "attribute-id":$attribute_id}, function(data){
Metronic.unblockUI(el);
if(data.error == "no"){
$('<option value="' + e.params.data.id + '">' + e.params.data.text + '</option>').appendTo($this);
}
}, "json");
}
});
}
$("#product-category").on('change', function(){
$(".categoey-name-span").html($(this).find(":selected").html());
});
$(".categoey-name-span").html($("#product-category").find(":selected").html());
$("#btn-attribute-load").on("click", function(e){
e.preventDefault();
var filled_field = false;
$('input[name^="product-attributes-value"]').each(function() {
if($.trim($(this).val())){
filled_field = true;
return false;
}
});
var Confirm = true;
if(filled_field) Confirm = confirm("Previous specification data will erased. Are you sure want to reload Specification data.");
if(Confirm){
var el = $(this).closest(".portlet").children(".portlet-body");
Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
$.post( "assets/ajax/ajax1.php", {'category-id':$("#product-category").val(), 'oper':'getattributebycategory'},function(data){
data = $.parseJSON(data);
if(data.error =="no"){
var $_html = "";
if(!$.isEmptyObject(data.attributeCollection)){
$i = 0;
$.each(data.attributeCollection, function(index, attributeGroup){
$_html += '<tr><td colspan="2" class="text-danger text-center"><strong>' + attributeGroup.name + '</strong></td></tr>';
$.each(attributeGroup.attributes, function(indexJ, attribute){
$_html += '<tr><td class="text-right"><label class="control-label input-sm">'+attribute.text +' : </label></td><input type="hidden" name="product-attributes-id['+$i+']" value="' + attribute.id + '"/><td>'
if(attribute.isfilter > 0){
$_html += '<select data-attribute="' + attribute.id + '" style="width:100%;" name="product-attributes-value['+$i+']" class="form-control input-sm">';
var filterArray = attribute.filters.split(",");
$.each(filterArray, function(index, filter){
$_html += '<option value = "'+filter+'">'+filter+'</option>';
});
$_html += "</select>";
}else{
$_html += '<input type="type" name="product-attributes-value['+$i+']" class="form-control input-sm">';
}
$_html += '</td></tr>';
$i++;
});
});
}else{
$_html = '<tr><td colspan="2" class="text-center">No Attributes Found. </td></tr>';
}
$("#attribute-list-table tbody").html($_html);
addCustomAttrVal();
}
Metronic.unblockUI(el);
});
}
});
addCustomAttrVal();
$("img", "#Product-iamge-list").error(function() {
$this = $(this);
$this.error = null;
$this.attr("src", "http://placehold.it/400?text=image")
});
$('[data-toggle="tooltip"]').tooltip();
// product varients
var optiongroupMaster = $("#optiongroupmaster").html();
var optgroupContainer = $("#optiongroup-containner");
var createCombinations = function($elements){
var CombinationArray = {};
var $i = 0;
$elements.each(function(index, element){
var SelectedOptGgroup = $(element).select2("data");
if(SelectedOptGgroup.length > 0){
var temp_array = {};
$.each(SelectedOptGgroup, function(index, data){
temp_array[index] = {text:data.text, id:data.id};
});
CombinationArray[$i++] = temp_array;
}
});
var $totalCombinations = {};
var i=0, k=0;
var combinations = [];
$.each(CombinationArray, function(index1, varients){
if(i== 0){
//combinations = varients;
$.each(varients, function(index, varient){
combinations.push(varient);
});
}else{
k = 0;
tempCombination = [];
$.each(combinations, function(index2, combination){
$.each(varients, function(index3, varient){
tempCombination[k] = [];
if(i == 1){
tempCombination[k].push(combination);
}else{
$.each(combination, function(index4, subCombination){
tempCombination[k].push(subCombination);
});
}
tempCombination[k].push(varient);
k++;
});
});
combinations = tempCombination;
}
i++;
});
return combinations;
}
var loadCombination = function(){
var $combinations = createCombinations($(".product-options-select2", optgroupContainer));
var $_html = "";
$.each($combinations, function(index, combination){
$_html += '<tr><td class="text-center">';
var combination_id = [];
if(Array.isArray(combination)){
combination_length = combination.length;
$.each(combination, function(index1, varient){
$_html += '<label class="label label-sm label-success lh2"><strong>'+varient.text+'</strong></label>';
combination_id.push(varient.id);
if(index1+1 < combination_length) $_html += " X ";
});
}else{
$_html += '<label class="label label-sm label-success lh2"><strong>'+combination.text+'</strong></label>';
combination_id.push(combination.id);
}
var comb_id_text = combination_id.join("-")
$_html += '<input type="hidden" name="combination-id[]" value="'+comb_id_text+'"></td><td><input type="text" name="combination-qty['+comb_id_text+']" placeholder="Quantity" class="form-control input-sm"></td><td><input type="text" name="combination-price['+comb_id_text+']" placeholder="Price" class="form-control input-sm"></td><!---<td><button type="button" class="btn btn-sm red btn-combination-delete">Delete</button></td>---></tr>';
});
$("tbody", "#verient-form-table").html($_html);
}
var has_img_html = '';
var insertImageDiv = function(id, text){
return '<div class="col-md-3 col-sm-6 text-center" id="selected-option-'+id+'" ><div class="mt-overlay-1"><div class="item-image"><img src="http://placehold.it/400?text='+text+'"></div><div class="mt-overlay"><ul class="mt-info"><li><input type="file" class="option-img-upload-input display-hide"><input type="hidden" name="product-option-img['+id+']"><a class="btn green btn-outline" href="javascript:;"><i class="icon-magnifier"></i></a></li><li><a class="btn yellow btn-outline option-img-upload" href="javascript:;"><i class="fa fa-upload"></i></a></li></ul></div></div><div class="bg-blue label-full">'+text+'</div></div>';
}
$(".product-options-select2", optgroupContainer).select2({tags : false});
$("#add_attrbut_btn").on("click", function(){
optgroupContainer.append(optiongroupMaster);
var otgrouplength = optgroupContainer.find(".optiongroups").length - 1;
var lastOptgroup = optgroupContainer.find(".optiongroups:last");
lastOptgroup.find(".product-options-select2:last").select2({tags: false})
.on("change", function(e) {
$this = $(this);
e.preventDefault();
loadCombination();
})
.on("select2:select", function(e){
$this = $(this);
if($this.closest(".optiongroups").find('.product-optgroup-select option:selected').data("type") == "image")
$this.closest(".optiongroups").find(".optgroup-image-div .swatches").append(insertImageDiv(e.params.data.id, e.params.data.text));
})
.on("select2:unselect", function(e){
$this = $(this);
$this.closest(".optiongroups").find(".optgroup-image-div .swatches").find("#selected-option-"+e.params.data.id).remove();
})
});
optgroupContainer.find(".product-options-select2").select2()
.on("change", function(e) {
e.preventDefault();
loadCombination();
})
.on("select2:select", function(e){
$this = $(this);
if($this.closest(".optiongroups").find('.product-optgroup-select option:selected').data("type") == "image")
$this.closest(".optiongroups").find(".optgroup-image-div .swatches").append(insertImageDiv(e.params.data.id, e.params.data.text));
})
.on("select2:unselect", function(e){
$this = $(this);
$this.closest(".optiongroups").find(".optgroup-image-div .swatches").find("#selected-option-"+e.params.data.id).remove();
});
optgroupContainer.on('click', ".optiongroup-delete-btn" , function(){
$(this).closest(".optiongroups").fadeOut(300, function(){ $(this).remove(); loadCombination();});
});
optgroupContainer.on("change", '.product-optgroup-select', function(){
var $this = $(this);
$found = false;
optgroupContainer.find(".product-optgroup-select").each(function(index, optgroup_select){
if($this.val() == $(optgroup_select).val() && !$this.is($(optgroup_select))){
$found = true;
return;
}
});
if($found){
Metronic.alert({type: 'danger', message: "This varient is already selected", closeInSeconds: 4, icon: 'warning'});
$this.val("").trigger("change");
return;
}
var optionGroupSelect = $this.closest(".optiongroups").find('.product-options-select2');
optionGroupSelect.select2("val", "");
if($.trim($this.val()) > 0){
$.post("assets/ajax/ajax1.php", {"option-group-id": $this.val(), "oper": "getoptionbyoptiongroup"}, function(data){
data = $.parseJSON(data);
if(data.error == "no"){
var $_html = "";
$.each(data.options, function(index, option){
$_html += '<option value="'+option.id+'">'+option.text+'</option>';
});
optionGroupSelect.html($_html);
$this.closest(".optiongroups").find(".optgroup-image-div .swatches").html("");
}
});
}
if($this.find("option:selected").data("type") == "image"){
$this.closest(".optiongroups").find(".optgroup-image-btn").removeClass("display-hide");
$this.closest(".optiongroups").find(".optgroup-image-div").collapse("show");
}else{
$this.closest(".optiongroups").find(".optgroup-image-btn").addClass("display-hide");
$this.closest(".optiongroups").find(".optgroup-image-div").collapse("hide");
}
});
$("#verient-form-table tbody").on("click", ".btn-combination-delete",function(){
$(this).closest("tr").fadeOut(300, function(){ $(this).remove()});
});
optgroupContainer.on("click", ".optgroup-image-btn",function(e){
e.preventDefault();
$(this).closest(".optiongroups").find(".optgroup-image-div").collapse("toggle");
});
}
var handleForms = function() {
$.validator.setDefaults({
highlight: function(element) {
$(element).closest('.form-group').addClass('has-error');
},
unhighlight: function(element) {
$(element).closest('.form-group').removeClass('has-error');
},
errorElement: 'span',
errorClass: 'help-block',<|fim▁hole|> } else {
error.insertAfter(element);
}
}
});
var product_form_validater = $("#product-form").validate();
$("#tab_images_uploader_pickfiles").on('click', function(){
$(".alert-product-image").hide();
});
$("#product-form").on('submit', function(e){
e.preventDefault();
$(".alert-product-image").hide();
if(product_form_validater.valid() === true){
$this = $(this);
var el = $this.closest(".portlet").children(".portlet-body");
image_vals = $("#image-names").val().trim();
$productDescription = $('#product-description').summernote('code');
if($($productDescription).text().length < 20){ // summernote validation....
$('.summernote-error').show();
$('#product-description').summernote('focus');
return false;
Metronic.unblockUI(el);
}
if(image_vals == "" || image_vals.indexOf(".") < 5){ // image valiadation
$(".alert-product-image").fadeIn("300").show();
var $target = $('html,body');
$target.animate({scrollTop: $target.height()}, 1000);
return false;
Metronic.unblockUI(el);
}
var $data = $this.serializeArray(); // convert form to array
$data.push({name: "product-description", value: $productDescription});
var optionGroups = $(".optiongroups", "#optiongroup-containner");
if(optionGroups.length > 0){
optionGroups.each(function(index, optiongroup){
$data.push({name: "product-optgroup["+index+"]", value: $(optiongroup).find(".product-optgroup-select").val()});
$data.push({name: "product-opttype["+index+"]", value: $(optiongroup).find(".product-optgroup-select option:selected").data("type")});
$data.push({name: "product-options["+index+"]", value: $(optiongroup).find(".product-options-select2").select2("val")});
});
}
//Metronic.blockUI({ target: el, animate: true, overlayColor: '#000000' });
$.post( "assets/ajax/ajax1.php", $data, function(data){
data = jQuery.parseJSON(data);
if(data.error =="no"){
var ClickBtnVal = $("[type=submit][clicked=true]", $this).data("value");
if(ClickBtnVal == "save-exit"){
location.href="products.php?Saved=Successfully";
}else{
Metronic.alert({type: 'success', message: data.msg, closeInSeconds: 5, icon: 'check'});
}
}else{
Metronic.alert({type: 'danger', message: data.msg, closeInSeconds: 5, icon: 'warning'});
}
Metronic.unblockUI(el);
});
}
});
$("form button[type=submit], form input[type=submit]").click(function() {
$("button[type=submit], input[type=submit]", $(this).parents("form")).removeAttr("clicked");
$(this).attr("clicked", "true");
});
}
return {
//main function to initiate the module
init: function () {
handleImages();
initComponents();
handleForms();
}
};
}();<|fim▁end|> | errorPlacement: function(error, element) {
if(element.parent('.input-group').length) {
error.insertAfter(element.parent()); |
<|file_name|>FunnelChart.tsx<|end_file_name|><|fim▁begin|>/**
* @fileOverview Funnel Chart
*/
import { generateCategoricalChart } from './generateCategoricalChart';
import { Funnel } from '../numberAxis/Funnel';<|fim▁hole|>export const FunnelChart = generateCategoricalChart({
chartName: 'FunnelChart',
GraphicalChild: Funnel,
validateTooltipEventTypes: ['item'],
defaultTooltipEventType: 'item',
axisComponents: [],
defaultProps: {
layout: 'centric',
},
} as CategoricalChartOptions);<|fim▁end|> | import { CategoricalChartOptions } from '../util/types';
|
<|file_name|>ShutdownHooks.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2002-2015, the original author or authors.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*
* http://www.opensource.org/licenses/bsd-license.php
*/
package jline.internal;
import java.util.ArrayList;
import java.util.List;
import static jline.internal.Preconditions.checkNotNull;
/**
* Manages the JLine shutdown-hook thread and tasks to execute on shutdown.
*
* @author <a href="mailto:[email protected]">Jason Dillon</a>
* @since 2.7
*/
public class ShutdownHooks
{
public static final String JLINE_SHUTDOWNHOOK = "jline.shutdownhook";
private static final boolean enabled = Configuration.getBoolean(JLINE_SHUTDOWNHOOK, true);
private static final List<Task> tasks = new ArrayList<Task>();
private static Thread hook;
public static synchronized <T extends Task> T add(final T task) {
checkNotNull(task);
// If not enabled ignore
if (!enabled) {
Log.debug("Shutdown-hook is disabled; not installing: ", task);
return task;
}
// Install the hook thread if needed
if (hook == null) {
hook = addHook(new Thread("JLine Shutdown Hook")
{
@Override
public void run() {
runTasks();
}
});
}
// Track the task
Log.debug("Adding shutdown-hook task: ", task);
tasks.add(task);
return task;
}
private static synchronized void runTasks() {
Log.debug("Running all shutdown-hook tasks");
<|fim▁hole|> for (Task task : tasks.toArray(new Task[tasks.size()])) {
Log.debug("Running task: ", task);
try {
task.run();
}
catch (Throwable e) {
Log.warn("Task failed", e);
}
}
tasks.clear();
}
private static Thread addHook(final Thread thread) {
Log.debug("Registering shutdown-hook: ", thread);
try {
Runtime.getRuntime().addShutdownHook(thread);
}
catch (AbstractMethodError e) {
// JDK 1.3+ only method. Bummer.
Log.debug("Failed to register shutdown-hook", e);
}
return thread;
}
public static synchronized void remove(final Task task) {
checkNotNull(task);
// ignore if not enabled or hook never installed
if (!enabled || hook == null) {
return;
}
// Drop the task
tasks.remove(task);
// If there are no more tasks, then remove the hook thread
if (tasks.isEmpty()) {
removeHook(hook);
hook = null;
}
}
private static void removeHook(final Thread thread) {
Log.debug("Removing shutdown-hook: ", thread);
try {
Runtime.getRuntime().removeShutdownHook(thread);
}
catch (AbstractMethodError e) {
// JDK 1.3+ only method. Bummer.
Log.debug("Failed to remove shutdown-hook", e);
}
catch (IllegalStateException e) {
// The VM is shutting down, not a big deal; ignore
}
}
/**
* Essentially a {@link Runnable} which allows running to throw an exception.
*/
public static interface Task
{
void run() throws Exception;
}
}<|fim▁end|> | // Iterate through copy of tasks list |
<|file_name|>sndhdr.py<|end_file_name|><|fim▁begin|>"""Routines to help recognizing sound files.
Function whathdr() recognizes various types of sound file headers.
It understands almost all headers that SOX can decode.
The return tuple contains the following items, in this order:
- file type (as SOX understands it)
- sampling rate (0 if unknown or hard to decode)
- number of channels (0 if unknown or hard to decode)
- number of frames in the file (-1 if unknown or hard to decode)
- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
If the file doesn't have a recognizable type, it returns None.
If the file can't be opened, OSError is raised.
To compute the total time, divide the number of frames by the
sampling rate (a frame contains a sample for each channel).
Function what() calls whathdr(). (It used to also use some
heuristics for raw data, but this doesn't work very well.)
Finally, the function test() is a simple main program that calls
what() for all files mentioned on the argument list. For directory
arguments it calls what() for all files in that directory. Default
argument is "." (testing all files in the current directory). The
option -r tells it to recurse down directories found inside
explicitly given directories.
"""
# The file structure is top-down except that the test program and its
# subroutine come last.
__all__ = ['what', 'whathdr']
from collections import namedtuple
SndHeaders = namedtuple('SndHeaders',
'filetype framerate nchannels nframes sampwidth')
SndHeaders.filetype.__doc__ = ("""The value for type indicates the data type
and will be one of the strings 'aifc', 'aiff', 'au','hcom',
'sndr', 'sndt', 'voc', 'wav', '8svx', 'sb', 'ub', or 'ul'.""")
SndHeaders.framerate.__doc__ = ("""The sampling_rate will be either the actual
value or 0 if unknown or difficult to decode.""")
SndHeaders.nchannels.__doc__ = ("""The number of channels or 0 if it cannot be
determined or if the value is difficult to decode.""")
SndHeaders.nframes.__doc__ = ("""The value for frames will be either the number
of frames or -1.""")
SndHeaders.sampwidth.__doc__ = ("""Either the sample size in bits or
'A' for A-LAW or 'U' for u-LAW.""")
def what(filename):
"""Guess the type of a sound file."""
res = whathdr(filename)
return res
def whathdr(filename):
"""Recognize sound headers."""
with open(filename, 'rb') as f:
h = f.read(512)
for tf in tests:
res = tf(h, f)
if res:
return SndHeaders(*res)
return None
#-----------------------------------#
# Subroutines per sound header type #
#-----------------------------------#
tests = []
def test_aifc(h, f):
import aifc
if not h.startswith(b'FORM'):
return None
if h[8:12] == b'AIFC':
fmt = 'aifc'
elif h[8:12] == b'AIFF':
fmt = 'aiff'
else:
return None
f.seek(0)
try:
a = aifc.open(f, 'r')
except (EOFError, aifc.Error):
return None
return (fmt, a.getframerate(), a.getnchannels(),
a.getnframes(), 8 * a.getsampwidth())
tests.append(test_aifc)
def test_au(h, f):
if h.startswith(b'.snd'):
func = get_long_be
elif h[:4] in (b'\0ds.', b'dns.'):
func = get_long_le
else:
return None
filetype = 'au'
hdr_size = func(h[4:8])
data_size = func(h[8:12])
encoding = func(h[12:16])
rate = func(h[16:20])
nchannels = func(h[20:24])
sample_size = 1 # default
if encoding == 1:
sample_bits = 'U'
elif encoding == 2:
sample_bits = 8
elif encoding == 3:
sample_bits = 16
sample_size = 2
else:
sample_bits = '?'
frame_size = sample_size * nchannels
if frame_size:
nframe = data_size / frame_size
else:
nframe = -1
return filetype, rate, nchannels, nframe, sample_bits
tests.append(test_au)
def test_hcom(h, f):
if h[65:69] != b'FSSD' or h[128:132] != b'HCOM':
return None
divisor = get_long_be(h[144:148])
if divisor:
rate = 22050 / divisor
else:
rate = 0
return 'hcom', rate, 1, -1, 8
tests.append(test_hcom)
def test_voc(h, f):
if not h.startswith(b'Creative Voice File\032'):
return None
sbseek = get_short_le(h[20:22])
rate = 0
if 0 <= sbseek < 500 and h[sbseek] == 1:
ratecode = 256 - h[sbseek+4]
if ratecode:
rate = int(1000000.0 / ratecode)
return 'voc', rate, 1, -1, 8
tests.append(test_voc)
def test_wav(h, f):
import wave
# 'RIFF' <len> 'WAVE' 'fmt ' <len>
if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ':
return None<|fim▁hole|> w = wave.openfp(f, 'r')
except (EOFError, wave.Error):
return None
return ('wav', w.getframerate(), w.getnchannels(),
w.getnframes(), 8*w.getsampwidth())
tests.append(test_wav)
def test_8svx(h, f):
if not h.startswith(b'FORM') or h[8:12] != b'8SVX':
return None
# Should decode it to get #channels -- assume always 1
return '8svx', 0, 1, 0, 8
tests.append(test_8svx)
def test_sndt(h, f):
if h.startswith(b'SOUND'):
nsamples = get_long_le(h[8:12])
rate = get_short_le(h[20:22])
return 'sndt', rate, 1, nsamples, 8
tests.append(test_sndt)
def test_sndr(h, f):
if h.startswith(b'\0\0'):
rate = get_short_le(h[2:4])
if 4000 <= rate <= 25000:
return 'sndr', rate, 1, -1, 8
tests.append(test_sndr)
#-------------------------------------------#
# Subroutines to extract numbers from bytes #
#-------------------------------------------#
def get_long_be(b):
return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]
def get_long_le(b):
return (b[3] << 24) | (b[2] << 16) | (b[1] << 8) | b[0]
def get_short_be(b):
return (b[0] << 8) | b[1]
def get_short_le(b):
return (b[1] << 8) | b[0]
#--------------------#
# Small test program #
#--------------------#
def test():
import sys
recursive = 0
if sys.argv[1:] and sys.argv[1] == '-r':
del sys.argv[1:2]
recursive = 1
try:
if sys.argv[1:]:
testall(sys.argv[1:], recursive, 1)
else:
testall(['.'], recursive, 1)
except KeyboardInterrupt:
sys.stderr.write('\n[Interrupted]\n')
sys.exit(1)
def testall(list, recursive, toplevel):
import sys
import os
for filename in list:
if os.path.isdir(filename):
print(filename + '/:', end=' ')
if recursive or toplevel:
print('recursing down:')
import glob
names = glob.glob(os.path.join(filename, '*'))
testall(names, recursive, 0)
else:
print('*** directory (use -r) ***')
else:
print(filename + ':', end=' ')
sys.stdout.flush()
try:
print(what(filename))
except OSError:
print('*** not found ***')
if __name__ == '__main__':
test()<|fim▁end|> | f.seek(0)
try: |
<|file_name|>testes_notificacao.py<|end_file_name|><|fim▁begin|># coding=utf-8
# ---------------------------------------------------------------
# Desenvolvedor: Arannã Sousa Santos
# Mês: 12
# Ano: 2015
# Projeto: pagseguro_xml
# e-mail: [email protected]
# ---------------------------------------------------------------
import logging
from pagseguro_xml.notificacao import ApiPagSeguroNotificacao_v3, CONST_v3
logger = logging.basicConfig(level=logging.DEBUG)
PAGSEGURO_API_AMBIENTE = u'sandbox'
PAGSEGURO_API_EMAIL = u'[email protected]'<|fim▁hole|>
CHAVE_NOTIFICACAO = u'AA0000-AA00A0A0AA00-AA00AA000000-AA0000' # ela éh de producao
api = ApiPagSeguroNotificacao_v3(ambiente=CONST_v3.AMBIENTE.SANDBOX)
PAGSEGURO_API_TOKEN = PAGSEGURO_API_TOKEN_PRODUCAO
ok, retorno = api.consulta_notificacao_transacao_v3(PAGSEGURO_API_EMAIL, PAGSEGURO_API_TOKEN, CHAVE_NOTIFICACAO)
if ok:
print u'-' * 50
print retorno.xml
print u'-' * 50
for a in retorno.alertas:
print a
else:
print u'Motivo do erro:', retorno<|fim▁end|> | PAGSEGURO_API_TOKEN_PRODUCAO = u''
PAGSEGURO_API_TOKEN_SANDBOX = u'' |
<|file_name|>test-03.logfile.js<|end_file_name|><|fim▁begin|>'use strict';
var
lab = require('lab'),
describe = lab.describe,
it = lab.it,
demand = require('must'),
bole = require('bole'),
fs = require('fs'),
LogOutput = require('../lib/output-logfile'),
mkdirp = require('mkdirp'),
path = require('path'),
rimraf = require('rimraf')
;
var tmpdir = './tmp';
describe('logfile output', function()
{
var output;
var mockopts = { path: path.join(tmpdir, 'foo.log'), name: 'test-1' };
lab.before(function(done)
{
mkdirp(tmpdir, done);
});
it('demands an options object', function(done)
{
function shouldThrow() { return new LogOutput(); }
shouldThrow.must.throw(/options/);
done();
});
it('demands a name object', function(done)
{
function shouldThrow() { return new LogOutput({ path: '../tmp'}); }
shouldThrow.must.throw(/name/);
done();
});
it('can be constructed', function(done)
{
output = new LogOutput(mockopts);
output.must.be.an.object();
output.must.be.instanceof(LogOutput);
done();
});
it('creates a logger client', function(done)
{
output.must.have.property('client');
output.client.must.be.truthy();
output.client.must.have.property('info');
output.client.info.must.be.a.function();
done();
});
it('emits to its logfile', function(done)
{
output.write({ test: 'yes'}, function()
{
fs.readFile(mockopts.path, function(err, data)
{
data = data.toString('utf8');
var first = data.split('\n')[0];
var written = JSON.parse(first);
written.must.be.an.object();<|fim▁hole|> written.level.must.equal('info');
written.name.must.equal('test-1');
written.test.must.equal('yes');
done();
});
});
});
it('the path option is optional', function(done)
{
var consoleOut = new LogOutput({ name: 'test-2' });
output.write({ test: 'yes'}, function()
{
done();
});
});
it('has a useful toString() implementation', function(done)
{
var str = output.toString();
str.must.equal('[ logfile @ tmp/foo.log ]');
done();
});
lab.after(function(done)
{
rimraf(tmpdir, done);
});
});<|fim▁end|> | |
<|file_name|>k.py<|end_file_name|><|fim▁begin|>n = int(input())
st = [(-1, -2)]
s = 0
for i, h in enumerate(map(int, input().split() + [' -1'])):
if h > st[-1][1]:
st.append((i, h))<|fim▁hole|> while st[-1][1] >= h:
r = st.pop()
s = max(s, (i - r[0]) * r[1])
st.append((r[0], h))
print(s)<|fim▁end|> | else: |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import render_template, flash, redirect, request, url_for, abort
from flask_login import login_user, logout_user, login_required, current_user
from . import loans
from forms import LoanApplicationForm, ApproveLoan, RepayLoan
from ..models import db
from ..models import Loan, User
from datetime import date
@loans.route('/new_loan', methods=['GET', 'POST'])
@login_required
def request_loan():
if not current_user.is_borrower:
abort(403)
elif not current_user.is_approved:
abort(404)
elif current_user.is_owing:
flash('You cannot request a new loan if your still due!')
return redirect(url_for('loans.view'))
elif current_user.has_requested_loan:
flash('You cannot request a new loan if your last loan hasnt been approved!')
return redirect(url_for('loans.view'))
else:
form = LoanApplicationForm()
if form.validate_on_submit():
loan = Loan(loan_amt=form.loan_amt.data,
user=current_user._get_current_object())
if loan.loan_amt > loan.user.max_credit_amt:
flash('You can only borrow to a maximum of %s' %
loan.user.max_credit_amt)
return redirect(url_for('loans.request_loan'))
loans.is_requested = True
loan.user.has_requested_loan = True
db.session.add(loan)
db.session.commit()
flash(
'Success.Your Loan Application has been submitted.View it below.')
return redirect(url_for('loans.view'))
return render_template('loans/request_loan.html',
form=form, title="New Loan")
@loans.route('/view_history')
@login_required
def view():
if not current_user.is_borrower:
abort(403)<|fim▁hole|> else:
loans = (Loan.query
.filter(Loan.user_id == current_user.id)
.order_by(Loan.requested_on.desc())
).all()
return render_template('loans/view.html',
loans=loans, title="My Loan Reqests")
@loans.route('/view_payment_history')
@login_required
def view_payment_history():
if not current_user.is_borrower:
abort(403)
if not current_user.is_approved:
abort(404)
else:
loans = (Loan.query
.filter(Loan.user_id == current_user.id)
.order_by(Loan.requested_on.desc())
).all()
return render_template('loans/view-payments.html',
loans=loans, title="My Loan Reqests")
'''View for if the user is credit worthy and can now borrow'''
@loans.route('/repay/loan/<id>', methods=['GET', 'POST'])
@login_required
def repay_loan(id):
if not current_user.is_borrower:
abort(403)
loan = Loan.query.filter_by(id=id).first()
if loan is None:
abort(404)
if not loan.is_approved:
flash('You cannot repay a loan that hasnt been approved')
return redirect(url_for('loans.view'))
else:
form = RepayLoan()
if current_user.is_borrower and form.validate_on_submit():
loan.my_funds = form.my_funds.data
flash('Your payment has been received. Please wait while we confirm it.')
return redirect(url_for('loans.view'))
return render_template('loans/repay-loan.html', form=form, loan=loan)
@loans.route('/clear/loan/balance/<id>', methods=['GET', 'POST'])
@login_required
def clear_loan_balance(id):
if not current_user.is_borrower:
abort(403)
loan = Loan.query.filter_by(id=id).first()
if loan is None:
abort(404)
if not loan.is_approved:
flash('You cannot repay a loan that hasnt been approved')
return redirect(url_for('loans.view'))
form = RepayLoan()
if current_user.is_borrower and form.validate_on_submit():
loan.my_funds = form.my_funds.data
flash('Your payment has been received. Please wait while we confirm it.')
return redirect(url_for('loans.view'))
return render_template('loans/repay-loan.html', form=form, loan=loan)<|fim▁end|> | if not current_user.is_approved:
abort(404) |
<|file_name|>BibColoringPreferencePage.java<|end_file_name|><|fim▁begin|>/*
* $Id$
*
* Copyright (c) 2004-2005 by the TeXlapse Team.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package net.sourceforge.texlipse.properties;
import net.sourceforge.texlipse.TexlipsePlugin;
import net.sourceforge.texlipse.bibeditor.BibColorProvider;
import org.eclipse.jface.preference.ColorFieldEditor;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferencePage;
/**
* The page to set syntax highlighting colors.
*
* @author kimmo
*/
public class BibColoringPreferencePage
extends FieldEditorPreferencePage
implements IWorkbenchPreferencePage {
/**
* Creates an instance of the "syntax highlighting colors" -preference page.
*/
public BibColoringPreferencePage() {
super(GRID);
setPreferenceStore(TexlipsePlugin.getDefault().getPreferenceStore());
setDescription(TexlipsePlugin.getResourceString("preferenceBibColorPageDescription"));
}
/**
* Creates the property editing UI components of this page.
*/
protected void createFieldEditors() {
addField(new ColorFieldEditor(BibColorProvider.DEFAULT, TexlipsePlugin.getResourceString("preferenceBibColorTextLabel"), getFieldEditorParent()));
addField(new ColorFieldEditor(BibColorProvider.TYPE, TexlipsePlugin.getResourceString("preferenceBibColorTypeLabel"), getFieldEditorParent()));
addField(new ColorFieldEditor(BibColorProvider.KEYWORD, TexlipsePlugin.getResourceString("preferenceBibColorKeywordLabel"), getFieldEditorParent()));
addField(new ColorFieldEditor(BibColorProvider.STRING, TexlipsePlugin.getResourceString("preferenceBibColorStringLabel"), getFieldEditorParent()));
// addField(new ColorFieldEditor(BibColorProvider.MULTI_LINE_COMMENT, TexlipsePlugin.getResourceString("preferenceBibColorMLCommentLabel"), getFieldEditorParent()));
addField(new ColorFieldEditor(BibColorProvider.SINGLE_LINE_COMMENT, TexlipsePlugin.getResourceString("preferenceBibColorCommentLabel"), getFieldEditorParent()));
}
/**
* Nothing to do.<|fim▁hole|> }
}<|fim▁end|> | */
public void init(IWorkbench workbench) { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod math;
pub mod system;
pub mod renderer;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
#[test]
fn fs_tests() {
use system::filesystem;
use system::config;<|fim▁hole|> // read random file
let s = filesystem::read_file("data/config.json");
// read and decode JSON config file
let conf = config::Config::new("data/config.json");
let fwinwidth = conf.get_f64("iWindowWidth");
let fwinheight = conf.get_u64("iWindowHeight");
println!("read window size : {} {}", fwinwidth, fwinheight);
}
}<|fim▁end|> | |
<|file_name|>imtools.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import logging
import inspect
import numpy as np
#from matplotlib import _cntr as cntr
#from contours.core import shapely_formatter as shapely_fmt
#from contours.quad import QuadContourGenerator
from astropy.coordinates import Angle
from astropy import constants as c
from astropy import wcs
from astropy.io import fits
from matplotlib.patheffects import withStroke
class Polygon:
"""
Generic polygon class.
Note: code based on:
http://code.activestate.com/recipes/578381-a-point-in-polygon-program-sw-sloan-algorithm/
Parameters
----------
x : array
A sequence of nodal x-coords.
y : array
A sequence of nodal y-coords.
"""
def __init__(self, x, y):
self.logger = logging.getLogger(__name__)
self.logger.info("Creating Polygon")
if len(x) != len(y):
raise IndexError('x and y must be equally sized.')
self.x = np.asfarray(x)
self.y = np.asfarray(y)
# Closes the polygon if were open
x1, y1 = x[0], y[0]
xn, yn = x[-1], y[-1]
if x1 != xn or y1 != yn:
self.x = np.concatenate((self.x, [x1]))
self.y = np.concatenate((self.y, [y1]))
# Anti-clockwise coordinates
if _det(self.x, self.y) < 0:
self.x = self.x[::-1]
self.y = self.y[::-1]
def get_vertices(self):
"""
Returns the vertices of the polygon as a 2xNvert list.
"""
return [[i,j] for i,j in zip(self.x, self.y)]
def is_inside(self, xpoint, ypoint, smalld=1e-12):
"""
Check if point is inside a general polygon.
An improved version of the algorithm of Nordbeck and Rydstedt.
REF: SLOAN, S.W. (1985): A point-in-polygon program. Adv. Eng.
Software, Vol 7, No. 1, pp 45-47.
Parameters
----------
xpoint : array or float
The x-coord of the point to be tested.
ypoint : array or float
The y-coords of the point to be tested.
smalld : float
Tolerance within which point is considered to be on a side.
Returns
-------
mindst : array or float
The distance from the point to the nearest point of the polygon:
If mindst < 0 then point is outside the polygon.
If mindst = 0 then point in on a side of the polygon.
If mindst > 0 then point is inside the polygon.
"""
xpoint = np.asfarray(xpoint)
ypoint = np.asfarray(ypoint)
# Scalar to array
if xpoint.shape is tuple():
xpoint = np.array([xpoint], dtype=float)
ypoint = np.array([ypoint], dtype=float)
scalar = True
else:
scalar = False
# Check consistency
if xpoint.shape != ypoint.shape:
raise IndexError('x and y must be equally sized.')
# If snear = True: Dist to nearest side < nearest vertex
# If snear = False: Dist to nearest vertex < nearest side
snear = np.ma.masked_all(xpoint.shape, dtype=bool)
# Initialize arrays
mindst = np.ones_like(xpoint, dtype=float) * np.inf
j = np.ma.masked_all(xpoint.shape, dtype=int)
x = self.x
y = self.y
n = len(x) - 1 # Number of sides/vertices defining the polygon
# Loop over each side defining polygon
for i in range(n):
d = np.ones_like(xpoint, dtype=float) * np.inf
# Start of side has coords (x1, y1)
# End of side has coords (x2, y2)
# Point has coords (xpoint, ypoint)
x1 = x[i]
y1 = y[i]
x21 = x[i+1] - x1
y21 = y[i+1] - y1
x1p = x1 - xpoint
y1p = y1 - ypoint
# Points on infinite line defined by
# x = x1 + t * (x1 - x2)
# y = y1 + t * (y1 - y2)
# where
# t = 0 at (x1, y1)
# t = 1 at (x2, y2)
# Find where normal passing through (xpoint, ypoint) intersects
# infinite line
t = -(x1p * x21 + y1p * y21) / (x21 ** 2 + y21 ** 2)
tlt0 = t < 0
tle1 = (0 <= t) & (t <= 1)
# Normal intersects side
d[tle1] = ((x1p[tle1] + t[tle1] * x21) ** 2 +
(y1p[tle1] + t[tle1] * y21) ** 2)
# Normal does not intersects side
# Point is closest to vertex (x1, y1)
# Compute square of distance to this vertex
d[tlt0] = x1p[tlt0] ** 2 + y1p[tlt0] ** 2
# Store distances
mask = d < mindst
mindst[mask] = d[mask]
j[mask] = i
# Point is closer to (x1, y1) than any other vertex or side
snear[mask & tlt0] = False
# Point is closer to this side than to any other side or vertex
snear[mask & tle1] = True
if np.ma.count(snear) != snear.size:
raise IndexError('Error computing distances')
mindst **= 0.5
# Point is closer to its nearest vertex than its nearest side, check if
# nearest vertex is concave.
# If the nearest vertex is concave then point is inside the polygon,
# else the point is outside the polygon.
jo = j.copy()
jo[j==0] -= 1
area = _det([x[j+1], x[j], x[jo-1]], [y[j+1], y[j], y[jo-1]])
mindst[~snear] = np.copysign(mindst, area)[~snear]
# Point is closer to its nearest side than to its nearest vertex, check
# if point is to left or right of this side.
# If point is to left of side it is inside polygon, else point is
# outside polygon.
area = _det([x[j], x[j+1], xpoint], [y[j], y[j+1], ypoint])
mindst[snear] = np.copysign(mindst, area)[snear]
# Point is on side of polygon
mindst[np.fabs(mindst) < smalld] = 0
# If input values were scalar then the output should be too
if scalar:
mindst = float(mindst)
return mindst
def make_mask(self, shape, **kwargs):
"""
Creates a mask of a given shape using the Polygon as boundaries.
All points inside the Polygon will have a value of 1.
:param shape: Shape of the output mask.
:type shape: tuple
:returns: Mask of the Polygon.
:rtype: array
"""
mask = np.zeros(shape)
xmax = int(round(max(self.x)))
xmin = int(round(min(self.x)))
ymax = int(round(max(self.y)))
ymin = int(round(min(self.y)))
for j in xrange(ymax - ymin):
for i in xrange(xmax - xmin):
if self.is_inside(i+xmin, j+ymin, **kwargs) >= 0:
self.logger.debug("Point ({0},{1}) ".format(i+xmin,j+ymin) +
"is inside the Polygon")
mask[j+ymin,i+xmin] = 1
return mask
def _det(xvert, yvert):
"""
Compute twice the area of the triangle defined by points using the
determinant formula.
Parameters
----------
xvert : array
A vector of nodal x-coords.
yvert : array
A vector of nodal y-coords.
Returns
-------
area : float
Twice the area of the triangle defined by the points:
area is positive if points define polygon in anticlockwise order.
area is negative if points define polygon in clockwise order.
area is zero if at least two of the points are concident or if
all points are collinear.
"""
xvert = np.asfarray(xvert)
yvert = np.asfarray(yvert)
x_prev = np.concatenate(([xvert[-1]], xvert[:-1]))
y_prev = np.concatenate(([yvert[-1]], yvert[:-1]))
return np.sum(yvert * x_prev - xvert * y_prev, axis=0)
def beam_area_pix(head):
"""
Computes the beam area in pixels.
It uses an approximation accurate to
within 5%.
K. Rohlfs and T.L. Wilson, 'Tools of Radio Astronomy', third revised and enlarged edition, 1996, Springer, page 190-191.
:param head: Image header.
:type head: Fits header
:returns: Number of pixels inside the beam.
:rtype: float
"""
return 1.133*float(head['BMAJ'])*float(head['BMIN'])/(abs(head['CDELT1'])*abs(head['CDELT2']))
def beam_area(head):
"""
Computes the beam area in sterradians.
It uses an approximation accurate to
within 5%.
K. Rohlfs and T.L. Wilson, 'Tools of Radio Astronomy', third revised and enlarged edition, 1996, Springer, page 190-191.
:param head: Image header.
:type head: Fits header
:returns: Beam area in sr.
:rtype: float
"""
return np.pi/(4.*np.log(2.))*np.deg2rad(float(head['BMAJ']))*np.deg2rad(float(head['BMIN']))
def check_ascending(ra, dec, vel, verbose=False):
"""
Check if the RA, DEC and VELO axes of a cube are in ascending order.
It returns a step for every axes which will make it go in ascending order.
:param ra: RA axis.
:param dec: DEC axis.
:param vel: Velocity axis.
:returns: Step for RA, DEC and velocity.
:rtype: int,int,int
"""
if vel[0] > vel[1]:
vs = -1
if verbose:
print("Velocity axis is inverted.")
else:
vs = 1
if ra[0] > ra[1]:
rs = -1
if verbose:
print("RA axis is inverted.")
else:
rs = 1
if dec[0] > dec[1]:
ds = -1
if verbose:
print("DEC axis is inverted.")
else:
ds = 1
return rs, ds, vs
def compare_headers(head1, head2):
"""
Compares the size and element width of 2 fits headers.
"""
axes = np.array([False, False, False])
for i in range(3):
if head1['CDELT{0}'.format(i+1)] == head2['CDELT{0}'.format(i+1)] \
and head1['NAXIS{0}'.format(i+1)] == head2['NAXIS{0}'.format(i+1)]:
axes[i] = True
if np.prod(axes) == 1:
return True
else:
return False
def draw_beam(header, ax, **kwargs):
"""
Draws an elliptical beam in a pywcsgrid2 axes object.
"""
bmaj = header.get("BMAJ")
bmin = header.get("BMIN")
pa = header.get("BPA")
pixx = header.get("CDELT1")
pixy = header.get("CDELT2")
ax.add_beam_size(bmaj/np.abs(pixx), bmin/np.abs(pixy), pa, loc=3, **kwargs)
def get_axis(header, axis):
"""
Constructs a cube axis.
:param header: Fits cube header.
:type header: pyfits header
:param axis: Axis to reconstruct.
:type axis: int
:returns: cube axis
:rtype: numpy array
"""
axis = str(axis)
dx = header.get("CDELT" + axis)
try:
dx = float(dx)
p0 = header.get("CRPIX" + axis)
x0 = header.get("CRVAL" + axis)
except TypeError:
dx = 1
p0 = 1
x0 = 1
n = header.get("NAXIS" + axis)
p0 -= 1 # Fits files index start at 1, not for python.
axis = np.arange(x0 - p0*dx, x0 - p0*dx + n*dx, dx)
if len(axis) > n:
axis = axis[:-1]
return axis
def get_fits3axes(head):
"""
"""
ra = get_axis(head, 1)
de = get_axis(head, 2)
ve = get_axis(head, 3)
return ra , de, ve
#def get_contours(x, y, z, levs, segment=0, verbose=False):
# """
# Creates an array with the contour vertices.
# """
#
# c = QuadContourGenerator.from_rectilinear(x, y, z, shapely_fmt)
# <|fim▁hole|># for i,l in enumerate(levs):
# #res = c.trace(l)
# res = c.filled_contour(min=1-l, max=None)
# if res:
# nseg = len(res) // 2
# segments.append(res[:nseg][segment])
# if verbose:
# print(res[:nseg][segment])
# else:
# pass
#
# # Where should we add missing corners?
# #if x.max() in np.asarray(segments)[0][:,0] \
# #and x.max() not in np.asarray(segments)[1][:,0]:
# #if np.asarray(segments)[1][:,1]
# #segments[1].append()
#
# return np.asarray(segments)
def K2Jy(head, freq=0):
"""
Computes the conversion factor Jy/K.
:param head: Image header.
:type head: Fits header
:returns: Factor to convert K to Jy.
:rtype: float
"""
omega = beam_area(head)
if freq == 0:
try:
freq = head['RESTFREQ']
except KeyError:
fcol = [s for s in head.keys() if "FREQ" in s]
freq = head[fcol[0]]
k2jy = 2.*c.k_B.cgs.value/np.power(c.c.cgs.value/freq, 2.)*omega/1e-23
return k2jy
def read_casa_polys(filename, image=None, wcs=None):
"""
Reads casa region file and returns Polygon objects.
Code adapted from FACTOR
https://github.com/lofar-astron/factor/blob/reimage/factor/scripts/make_clean_mask.py
https://github.com/lofar-astron/factor/commit/667b77e8690e9536a61afbe3ed673a3e16889bb1
:param filename: Path to file containing the casa region.
:type filename: string
:param image: pyrap.images.image object, with properly defined coordsys.
:type image: pyrap.images.image
:returns: list of Polygons.
:rtype: Polygon
"""
logger = logging.getLogger(__name__)
with open(filename, 'r') as f:
lines = f.readlines()
polys = []
for line in lines:
if line.startswith('poly'):
poly_str_temp = line.split('[[')[1]
poly_str = poly_str_temp.split(']]')[0]
poly_str_list = poly_str.split('], [')
ra = []
dec = []
for pos in poly_str_list:
RAstr, Decstr = pos.split(',')
ra.append(Angle(RAstr, unit='hourangle').to('deg').value)
dec.append(Angle(Decstr.replace('.', ':', 2), unit='deg').to('deg').value)
# Convert to image-plane Polygon
xvert = []
yvert = []
for RAvert, Decvert in zip(np.array(ra), np.array(dec)):
if image:
try:
pixels = image.topixel([0, 1, Decvert*np.pi/180.0,
RAvert*np.pi/180.0])
except:
pixels = image.topixel([1, 1, Decvert*np.pi/180.0,
RAvert*np.pi/180.0])
xvert.append(pixels[2]) # x -> Dec
yvert.append(pixels[3]) # y -> RA
elif wcs:
pixels = wcs.all_world2pix([[RAvert,
Decvert]], 0)[0]
xvert.append(pixels[0])
yvert.append(pixels[1])
polys.append(Polygon(xvert, yvert))
elif line.startswith('ellipse'):
ell_str_temp = line.split('[[')[1]
if '], 0.0' not in ell_str_temp and '], 90.0' not in ell_str_temp:
logger.error('Only position angles of 0.0 and 90.0 are supported for CASA '
'regions of type "ellipse"')
sys.exit(1)
if '], 0.0' in ell_str_temp:
ell_str = ell_str_temp.split('], 0.0')[0]
pa = 0
else:
ell_str = ell_str_temp.split('], 90.0')[0]
pa = 90
ell_str_list = ell_str.split('], [')
# Ellipse center
RAstr, Decstr = ell_str_list[0].split(',')
ra_center = Angle(RAstr, unit='hourangle').to('deg').value
dec_center = Angle(Decstr.replace('.', ':', 2), unit='deg').to('deg').value
if image:
pixels = image.topixel([0, 1, dec_center*np.pi/180.0,
ra_center*np.pi/180.0])
x_center = pixels[2] # x -> Dec
y_center = pixels[3] # y -> RA
elif wcs:
pixels = wcs.all_world2pix([[ra_center, dec_center]], 0)[0]
x_center = pixels[0] # x -> Dec
y_center = pixels[1] # y -> RA
# Ellipse semimajor and semiminor axes
a_str, b_str = ell_str_list[1].split(',')
a_deg = float(a_str.split('arcsec')[0])/3600.0
b_deg = float(b_str.split('arcsec')[0])/3600.0
if image:
pixels1 = image.topixel([0, 1, (dec_center-a_deg/2.0)*np.pi/180.0,
ra_center*np.pi/180.0])
a_pix1 = pixels1[2]
pixels2 = image.topixel([0, 1, (dec_center+a_deg/2.0)*np.pi/180.0,
ra_center*np.pi/180.0])
a_pix2 = pixels2[2]
elif wcs:
pixels1 = wcs.all_world2pix([[ra_center, dec_center-a_deg/2.0]], 0)[0]
a_pix1 = pixels1[1]
pixels2 = wcs.all_world2pix([[ra_center, dec_center+a_deg/2.0]], 0)[0]
a_pix2 = pixels2[1]
a_pix = abs(a_pix2 - a_pix1)
ex = []
ey = []
for th in range(0, 360, 1):
if pa == 0:
# semimajor axis is along x-axis
ex.append(a_pix * np.cos(th * np.pi / 180.0)
+ x_center) # x -> Dec
ey.append(a_pix * b_deg / a_deg * np.sin(th * np.pi / 180.0) + y_center) # y -> RA
elif pa == 90:
# semimajor axis is along y-axis
ex.append(a_pix * b_deg / a_deg * np.cos(th * np.pi / 180.0)
+ x_center) # x -> Dec
ey.append(a_pix * np.sin(th * np.pi / 180.0) + y_center) # y -> RA
polys.append(Polygon(ex, ey))
elif line.startswith('box'):
poly_str_temp = line.split('[[')[1]
poly_str = poly_str_temp.split(']]')[0]
poly_str_list = poly_str.split('], [')
ra = []
dec = []
for pos in poly_str_list:
RAstr, Decstr = pos.split(',')
ra.append(Angle(RAstr, unit='hourangle').to('deg').value)
dec.append(Angle(Decstr.replace('.', ':', 2), unit='deg').to('deg').value)
ra.insert(1, ra[0])
dec.insert(1, dec[1])
ra.append(ra[2])
dec.append(dec[0])
# Convert to image-plane Polygon
xvert = []
yvert = []
for RAvert, Decvert in zip(np.array(ra), np.array(dec)):
if image:
try:
pixels = image.topixel([0, 1, Decvert*np.pi/180.0,
RAvert*np.pi/180.0])
except:
pixels = image.topixel([1, 1, Decvert*np.pi/180.0,
RAvert*np.pi/180.0])
xvert.append(pixels[2]) # x -> Dec
yvert.append(pixels[3]) # y -> RA
elif wcs:
pixels = wcs.all_world2pix([[RAvert, Decvert]], 0)[0]
xvert.append(pixels[0])
yvert.append(pixels[1])
polys.append(Polygon(xvert, yvert))
elif line.startswith('#'):
pass
else:
logger.error('Only CASA regions of type "poly", "box", or "ellipse" are supported')
sys.exit(1)
return polys
def remove_nans(contours, indx=0):
"""
Removes NaN elements from a contour list produced with get_contours().
"""
mask = np.isnan(contours[indx][:,0])
contours[indx] = contours[indx][~mask]
return contours
def sector_mask(shape, centre, radius, angle_range):
"""
Return a boolean mask for a circular sector. The start/stop angles in
`angle_range` should be given in clockwise order.
"""
x,y = np.ogrid[:shape[0],:shape[1]]
cx,cy = centre
tmin,tmax = np.deg2rad(angle_range)
# ensure stop angle > start angle
if tmax < tmin:
tmax += 2*np.pi
# convert cartesian --> polar coordinates
r2 = (x-cx)*(x-cx) + (y-cy)*(y-cy)
theta = np.arctan2(x-cx, y-cy) - tmin
# wrap angles between 0 and 2*pi
theta %= (2*np.pi)
# circular mask
circmask = r2 <= radius*radius
# angular mask
anglemask = theta <= (tmax-tmin)
return circmask*anglemask
def set_tick_bkgd(ax, color, lw, alpha):
"""
Applies a contour of color to the tick labels of ax.
"""
labs = ax.cax.get_yaxis().get_ticklabels()
for lab in labs:
lab.set_path_effects([withStroke(foreground=color, linewidth=lw)])
lab.set_alpha(alpha)
def set_wcs(head):
"""
Build a WCS object given the
spatial header parameters.
"""
# Create a new WCS object.
# The number of axes must be set from the start.
w = wcs.WCS(naxis=2)
w.wcs.crpix = [head['CRPIX1'], head['CRPIX2']]
w.wcs.cdelt = [head['CDELT1'], head['CDELT2']]
w.wcs.crval = [head['CRVAL1'], head['CRVAL2']]
w.wcs.ctype = [head['CTYPE1'], head['CTYPE2']]
return w<|fim▁end|> | # #c = cntr.Cntr(x, y, z)
#
# segments = [] |
<|file_name|>feed_parse_extractDlscanlationsCom.py<|end_file_name|><|fim▁begin|>def extractDlscanlationsCom(item):
'''
Parser for 'dlscanlations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None<|fim▁hole|>
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False<|fim▁end|> | |
<|file_name|>packing.js<|end_file_name|><|fim▁begin|>var global = require('../../global');
module.exports = function (packing, offset) {
var items = [].concat.apply([], packing.items);
var iso = "FM.FP-GJ-15-003";
// var number = packing.code;
// var colorName = packing.colorName;
var orderType = (packing.orderType || "").toString().toLowerCase() === "printing" ? "Printing" : "Finishing";
var locale = global.config.locale;
var buyerName = packing.buyerName ? packing.buyerName : "";
var colorType = packing.colorType ? packing.colorType : "";
var construction = packing.construction ? packing.construction : "";
var buyerAddress = packing.buyerAddress ? packing.buyerAddress : "";
var moment = require('moment');
moment.locale(locale.name);
var footerStack = [];
var footerStackValue = [];
var footerStackDivide = [];
if ((packing.orderType || "").toString().toLowerCase() === "solid") {
footerStack = ['Buyer', "Jenis Order", "Jenis Warna", 'Konstruksi', 'Tujuan'];
footerStackValue = [buyerName, orderType, colorType, construction, buyerAddress];
footerStackDivide = [':', ":", ":", ':', ':'];
} else if ((packing.orderType || "").toString().toLowerCase() === "printing") {
footerStack = ['Buyer', "Jenis Order", 'Konstruksi', 'Design/Motif', 'Tujuan'];
footerStackValue = [buyerName, orderType, construction, packing.designNumber && packing.designCode ? `${packing.designNumber} - ${packing.designCode}` : "", buyerAddress];
footerStackDivide = [':', ":", ":", ':', ':'];
} else {
footerStack = ['Buyer', "Jenis Order", 'Konstruksi', 'Tujuan'];
footerStackValue = [buyerName, orderType, construction, buyerAddress];
footerStackDivide = [':', ":", ":", ':'];
}
var header = [{
columns: [{
columns: [{
width: '*',
stack: [{
text: 'BON PENYERAHAN PRODUKSI',
style: ['size15'],
alignment: "center"
}]
}]
}]
}];
var line = [{
canvas: [{
type: 'line',
x1: 0,
y1: 5,
x2: 555,
y2: 5,
lineWidth: 0.5
}
]
}];
var subheader = [{
columns: [{
columns: [{
width: '*',
stack: [{
text: iso,
style: ['size09', 'bold'],
alignment: "right"
}
]
}]
}]
}];
var subheader2 = [{
columns: [{
width: '60%',
columns: [{
width: '*',
stack: ['Kepada Yth. Bagian Penjualan ', `Bersama ini kami kirimkan hasil produksi: Inspeksi ${orderType}`],
}],
style: ['size08']
}
,
{
width: '5%',
text: ''
},
{
width: '40%',
columns: [{
width: '40%',
stack: ['No', 'Sesuai No Order'],
}, {
width: '5%',
stack: [':', ':'],
}, {
width: '*',
stack: [packing.code, packing.productionOrderNo],
}],
style: ['size08']
}
]
}];
var thead = [{
text: 'NO',
style: 'tableHeader'
},
{
text: 'BARANG',
style: 'tableHeader'
},
{
text: `Jumlah (${packing.packingUom})`,
style: 'tableHeader'
},
{
text: 'Panjang (Meter)',
style: 'tableHeader'
},
{
text: 'Panjang Total (Meter)',
style: 'tableHeader'
},
{
text: 'Berat Total (Kg)',
style: 'tableHeader'
},
{
text: 'Keterangan',
style: 'tableHeader'
}
];
var gradeItem = "";
var totalJumlah = 0;
var totalBerat = 0;
var totalPanjang = 0;
var totalPanjangTotal = 0;
var totalBeratTotal = 0;
var tbody = items.map(function (item, index) {
// if (item.grade.toLowerCase() == "a" || item.grade.toLowerCase() == "b" || item.grade.toLowerCase() == "c") {
if (item.grade.toLowerCase() == "a") {
gradeItem = "BQ";
} else {
gradeItem = "BS";
}
totalJumlah += item.quantity;
totalBerat += item.weight;
totalPanjang += item.length;
totalPanjangTotal += item.length * item.quantity;
totalBeratTotal += item.weight * item.quantity;
return [{
text: (index + 1).toString() || '',
style: ['size08', 'center']
},
{
text: packing.colorName + ' ' + item.lot + ' ' + item.grade + ' ' + gradeItem,
style: ['size08', 'center']
},
{
text: item.quantity,
style: ['size08', 'center']
},
{
text: item.length,
style: ['size08', 'center']
},
{
text: (item.length * item.quantity).toFixed(2),
style: ['size08', 'center']
},
{
text: (item.weight * item.quantity).toFixed(2),
style: ['size08', 'center']
},
{
text: item.remark,
style: ['size08', 'center']
}
];
});
var tfoot = [[{
text: " ",
style: ['size08', 'center']
}, {
text: "Total",
style: ['size08', 'center']
}, {
text: totalJumlah.toFixed(2),
style: ['size08', 'center']
}, {
text: totalPanjang.toFixed(2),
style: ['size08', 'center']
}, {
text: totalPanjangTotal.toFixed(2),
style: ['size08', 'center']
}, {
text: totalBeratTotal.toFixed(2),
style: ['size08', 'center']
}, "",]];
tbody = tbody.length > 0 ? tbody : [
[{
text: "tidak ada barang",
style: ['size08', 'center'],
colSpan: 6
}, "", "", "", "", "", ""]
];
var table = [{
table: {
widths: ['5%', '35%', '10%', '10%', '10%', '10%', '20%'],
headerRows: 1,
body: [].concat([thead], tbody, tfoot),
}
}];
var footer = [{
stack: [{
columns: [{
columns: [{
width: '15%',
stack: footerStack
}, {
width: '2%',
stack: footerStackDivide
}, {
width: '*',
stack: footerStackValue
}]
}]
}
],
style: ['size08']
},
];
var footer2 = ['\n', {
columns: [{
width: '25%',
stack: ['\n', 'Diterima oleh:', '\n\n\n\n', '( )'],
style: ['center']
},
{
width: '25%',
stack: [],
},
{
width: '25%',
stack: [],
},
{
width: '25%',
stack: [`Sukoharjo, ${moment(packing.date).add(offset, 'h').format(locale.date.format)} `, 'Diserahkan oleh :', '\n\n\n\n', `( ${packing._createdBy} )`],
style: ['center']
}],
style: ['size08']
}];
var packingPDF = {
pageSize: 'A5',
pageOrientation: 'landscape',
pageMargins: 20,
// content: [].concat(header, line, subheader, subheader2, table, footer),
content: [].concat(header, line, subheader, subheader2, table, footer, footer2),
styles: {
size06: {
fontSize: 8
},
size07: {
fontSize: 9
},
size08: {
fontSize: 10
},
size09: {
fontSize: 11
},
size10: {
fontSize: 12
},
size15: {
fontSize: 17
},
size30: {
fontSize: 32
},
bold: {
bold: true
},
center: {
alignment: 'center'
},
left: {
alignment: 'left'
},
right: {
alignment: 'right'<|fim▁hole|> },
justify: {
alignment: 'justify'
},
tableHeader: {
bold: true,
fontSize: 10,
color: 'black',
alignment: 'center'
}
}
};
return packingPDF;
}<|fim▁end|> | |
<|file_name|>Chat.java<|end_file_name|><|fim▁begin|>/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack.chat;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.StanzaCollector;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smack.util.StringUtils;
import org.jxmpp.jid.EntityJid;
/**
* A chat is a series of messages sent between two users. Each chat has a unique
* thread ID, which is used to track which messages are part of a particular
* conversation. Some messages are sent without a thread ID, and some clients
* don't send thread IDs at all. Therefore, if a message without a thread ID
* arrives it is routed to the most recently created Chat with the message
* sender.
*
* @author Matt Tucker<|fim▁hole|>
private final ChatManager chatManager;
private final String threadID;
private final EntityJid participant;
private final Set<ChatMessageListener> listeners = new CopyOnWriteArraySet<>();
/**
* Creates a new chat with the specified user and thread ID.
*
* @param chatManager the chatManager the chat will use.
* @param participant the user to chat with.
* @param threadID the thread ID to use.
*/
Chat(ChatManager chatManager, EntityJid participant, String threadID) {
if (StringUtils.isEmpty(threadID)) {
throw new IllegalArgumentException("Thread ID must not be null");
}
this.chatManager = chatManager;
this.participant = participant;
this.threadID = threadID;
}
/**
* Returns the thread id associated with this chat, which corresponds to the
* <tt>thread</tt> field of XMPP messages. This method may return <tt>null</tt>
* if there is no thread ID is associated with this Chat.
*
* @return the thread ID of this chat.
*/
public String getThreadID() {
return threadID;
}
/**
* Returns the name of the user the chat is with.
*
* @return the name of the user the chat is occuring with.
*/
public EntityJid getParticipant() {
return participant;
}
/**
* Sends the specified text as a message to the other chat participant.
* This is a convenience method for:
*
* <pre>
* Message message = chat.createMessage();
* message.setBody(messageText);
* chat.sendMessage(message);
* </pre>
*
* @param text the text to send.
* @throws NotConnectedException
* @throws InterruptedException
*/
public void sendMessage(String text) throws NotConnectedException, InterruptedException {
Message message = new Message();
message.setBody(text);
sendMessage(message);
}
/**
* Sends a message to the other chat participant. The thread ID, recipient,
* and message type of the message will automatically set to those of this chat.
*
* @param message the message to send.
* @throws NotConnectedException
* @throws InterruptedException
*/
public void sendMessage(Message message) throws NotConnectedException, InterruptedException {
// Force the recipient, message type, and thread ID since the user elected
// to send the message through this chat object.
message.setTo(participant);
message.setType(Message.Type.chat);
message.setThread(threadID);
chatManager.sendMessage(this, message);
}
/**
* Adds a stanza(/packet) listener that will be notified of any new messages in the
* chat.
*
* @param listener a stanza(/packet) listener.
*/
public void addMessageListener(ChatMessageListener listener) {
if (listener == null) {
return;
}
// TODO these references should be weak.
listeners.add(listener);
}
public void removeMessageListener(ChatMessageListener listener) {
listeners.remove(listener);
}
/**
* Closes the Chat and removes all references to it from the {@link ChatManager}. The chat will
* be unusable when this method returns, so it's recommend to drop all references to the
* instance right after calling {@link #close()}.
*/
public void close() {
chatManager.closeChat(this);
listeners.clear();
}
/**
* Returns an unmodifiable set of all of the listeners registered with this chat.
*
* @return an unmodifiable set of all of the listeners registered with this chat.
*/
public Set<ChatMessageListener> getListeners() {
return Collections.unmodifiableSet(listeners);
}
/**
* Creates a {@link org.jivesoftware.smack.StanzaCollector} which will accumulate the Messages
* for this chat. Always cancel StanzaCollectors when finished with them as they will accumulate
* messages indefinitely.
*
* @return the StanzaCollector which returns Messages for this chat.
*/
public StanzaCollector createCollector() {
return chatManager.createStanzaCollector(this);
}
/**
* Delivers a message directly to this chat, which will add the message
* to the collector and deliver it to all listeners registered with the
* Chat. This is used by the XMPPConnection class to deliver messages
* without a thread ID.
*
* @param message the message.
*/
void deliver(Message message) {
// Because the collector and listeners are expecting a thread ID with
// a specific value, set the thread ID on the message even though it
// probably never had one.
message.setThread(threadID);
for (ChatMessageListener listener : listeners) {
listener.processMessage(this, message);
}
}
@Override
public String toString() {
return "Chat [(participant=" + participant + "), (thread=" + threadID + ")]";
}
@Override
public int hashCode() {
int hash = 1;
hash = hash * 31 + threadID.hashCode();
hash = hash * 31 + participant.hashCode();
return hash;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Chat
&& threadID.equals(((Chat) obj).getThreadID())
&& participant.equals(((Chat) obj).getParticipant());
}
}<|fim▁end|> | * @deprecated use <code>org.jivesoftware.smack.chat2.Chat</code> from <code>smack-extensions</code> instead.
*/
@Deprecated
public class Chat { |
<|file_name|>privateStaticNameShadowing.ts<|end_file_name|><|fim▁begin|>// @target: es2015
class X {
<|fim▁hole|> constructor() {
X.#m();
}
static #m() {
const X: any = {}; // shadow the class
const _a: any = {}; // shadow the first generated var
X.#m(); // Should check with X as the receiver with _b as the class constructor
return 1;
}
}<|fim▁end|> | static #f = X.#m();
|
<|file_name|>IndexManager.java<|end_file_name|><|fim▁begin|>package listener;
/**
* Created by pengshu on 2016/11/11.<|fim▁hole|> */
public class IndexManager implements EntryListener {
/**
* 博客文章被创建
*
* @param entryevent
*/
@Override
public void entryAdded(EntryEvent entryevent) {
System.out.println("IndexManager 处理 博客文章被创建事件。");
}
/**
* 博客文章被删除
*
* @param entryevent
*/
@Override
public void entryDeleted(EntryEvent entryevent) {
System.out.println("IndexManager 处理 博客文章被删除事件。");
}
/**
* 博客文章被修改
*
* @param entryevent
*/
@Override
public void entryModified(EntryEvent entryevent) {
System.out.println("IndexManager 处理 博客文章被修改事件。");
}
}<|fim▁end|> | |
<|file_name|>MultiHandler.java<|end_file_name|><|fim▁begin|>/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* MultiHandler.java
* Copyright (C) 2017 University of Waikato, Hamilton, NZ
*/
package adams.core.logging;
import java.util.logging.Handler;
import java.util.logging.LogRecord;
/**
* Combines multiple handlers.
*
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision$
*/
public class MultiHandler
extends AbstractLogHandler {
/** the logging handlers to use. */
protected Handler[] m_Handlers;
/**
* Initializes the members.
*/
@Override
protected void initialize() {
super.initialize();
setHandlers(new Handler[0]);
}
/**
* Sets the handlers to use.
*
* @param value the handlers
*/
public void setHandlers(Handler[] value) {
m_Handlers = value;<|fim▁hole|> /**
* Returns the current handlers.
*
* @return the handlers
*/
public Handler[] getHandlers() {
return m_Handlers;
}
/**
* Adds the specified handler.
*
* @param value the handler
*/
public void addHandler(Handler value) {
Handler[] handlers;
int i;
handlers = new Handler[m_Handlers.length + 1];
for (i = 0; i < m_Handlers.length; i++)
handlers[i] = m_Handlers[i];
handlers[handlers.length - 1] = value;
m_Handlers = handlers;
}
/**
* Removes the specified handler.
*
* @param index the handler index
*/
public void removeHandler(int index) {
Handler[] handlers;
int i;
int n;
handlers = new Handler[m_Handlers.length - 1];
n = 0;
for (i = 0; i < m_Handlers.length; i++) {
if (i == index)
continue;
handlers[n] = m_Handlers[i];
n++;
}
m_Handlers = handlers;
}
/**
* Flush any buffered output.
*/
@Override
public void flush() {
super.flush();
if (m_Handlers != null) {
for (Handler h : m_Handlers)
h.flush();
}
}
/**
* Close the <tt>Handler</tt> and free all associated resources.
* <p>
* The close method will perform a <tt>flush</tt> and then close the
* <tt>Handler</tt>. After close has been called this <tt>Handler</tt>
* should no longer be used. Method calls may either be silently
* ignored or may throw runtime exceptions.
*
* @exception SecurityException if a security manager exists and if
* the caller does not have <tt>LoggingPermission("control")</tt>.
*/
@Override
public void close() throws SecurityException {
if (m_Handlers != null) {
for (Handler h : m_Handlers)
h.close();
}
super.close();
}
/**
* Publish a <tt>LogRecord</tt>.
* <p>
* The logging request was made initially to a <tt>Logger</tt> object,
* which initialized the <tt>LogRecord</tt> and forwarded it here.
* <p>
* The <tt>Handler</tt> is responsible for formatting the message, when and
* if necessary. The formatting should include localization.
*
* @param record description of the log event. A null record is
* silently ignored and is not published
*/
@Override
protected void doPublish(LogRecord record) {
for (Handler h: m_Handlers)
h.publish(record);
}
/**
* Compares the handler with itself.
*
* @param o the other handler
* @return less than 0, equal to 0, or greater than 0 if the
* handler is less, equal to, or greater than this one
*/
public int compareTo(Handler o) {
int result;
MultiHandler other;
int i;
result = super.compareTo(o);
if (result == 0) {
other = (MultiHandler) o;
result = new Integer(getHandlers().length).compareTo(other.getHandlers().length);
if (result == 0) {
for (i = 0; i < getHandlers().length; i++) {
if ((getHandlers()[i] instanceof AbstractLogHandler) && (other.getHandlers()[i] instanceof AbstractLogHandler))
result = ((AbstractLogHandler) getHandlers()[i]).compareTo(other.getHandlers()[i]);
else
result = new Integer(getHandlers()[i].hashCode()).compareTo(other.getHandlers()[i].hashCode());
if (result != 0)
break;
}
}
}
return result;
}
}<|fim▁end|> | reset();
}
|
<|file_name|>LocalUriHandler.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rsocket.transport.local;
import io.rsocket.transport.ClientTransport;
import io.rsocket.transport.ServerTransport;
import io.rsocket.uri.UriHandler;
import java.net.URI;
import java.util.Optional;
<|fim▁hole|> public Optional<ClientTransport> buildClient(URI uri) {
if ("local".equals(uri.getScheme())) {
return Optional.of(LocalClientTransport.create(uri.getSchemeSpecificPart()));
}
return UriHandler.super.buildClient(uri);
}
@Override
public Optional<ServerTransport> buildServer(URI uri) {
if ("local".equals(uri.getScheme())) {
return Optional.of(LocalServerTransport.create(uri.getSchemeSpecificPart()));
}
return UriHandler.super.buildServer(uri);
}
}<|fim▁end|> | public class LocalUriHandler implements UriHandler {
@Override |
<|file_name|>adalogo.py<|end_file_name|><|fim▁begin|>width = 75
height = 75
data = [
0x00,0x00,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x01,0xf0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x03,0xf0,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x03,0xf8,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x07,0xf8,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x0f,0xf8,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x1f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x1f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x3f,0xfc,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0xff,0xfe,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x01,0xff,0xff,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x03,0xff,0xff,0x00,0x00,0x00,0x00,<|fim▁hole|> 0x00,0x00,0x00,0x03,0xff,0xff,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x07,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0x00,0x00,0x00,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0x7f,0xff,0xfc,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0x0f,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0xcf,0xff,0xff,0x80,0x00,0x00,0x00,
0xff,0xff,0xff,0xef,0xff,0xff,0x80,0x00,0x00,0x00,
0x7f,0xff,0xff,0xf7,0xff,0xff,0x80,0x00,0x00,0x00,
0x3f,0xff,0xff,0xff,0xfb,0xff,0x00,0x00,0x00,0x00,
0x3f,0xff,0xff,0xff,0xf1,0xff,0x3f,0xf0,0x00,0x00,
0x1f,0xff,0xff,0xff,0xf1,0xfe,0xff,0xfe,0x00,0x00,
0x0f,0xff,0xff,0xff,0xf1,0xff,0xff,0xff,0xc0,0x00,
0x0f,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xf8,0x00,
0x07,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xff,0x00,
0x03,0xff,0xff,0xff,0xe1,0xff,0xff,0xff,0xff,0xc0,
0x01,0xff,0xff,0x3f,0xe1,0xff,0xff,0xff,0xff,0xe0,
0x01,0xff,0xfe,0x07,0xe3,0xff,0xff,0xff,0xff,0xe0,
0x00,0xff,0xff,0x03,0xe3,0xff,0xff,0xff,0xff,0xe0,
0x00,0x7f,0xff,0x00,0xf7,0xff,0xff,0xff,0xff,0xc0,
0x00,0x3f,0xff,0xc0,0xff,0xc0,0x7f,0xff,0xff,0x80,
0x00,0x1f,0xff,0xf0,0xff,0x00,0x3f,0xff,0xff,0x00,
0x00,0x0f,0xff,0xff,0xff,0x00,0x7f,0xff,0xfc,0x00,
0x00,0x07,0xff,0xff,0xff,0x01,0xff,0xff,0xf8,0x00,
0x00,0x01,0xff,0xff,0xff,0xff,0xff,0xff,0xf0,0x00,
0x00,0x00,0x7f,0xff,0xff,0xff,0xff,0xff,0xc0,0x00,
0x00,0x00,0x1f,0xfc,0x7f,0xff,0xff,0xff,0x80,0x00,
0x00,0x00,0x7f,0xf8,0x78,0xff,0xff,0xfe,0x00,0x00,
0x00,0x00,0xff,0xf0,0x78,0x7f,0xff,0xfc,0x00,0x00,
0x00,0x01,0xff,0xe0,0xf8,0x7f,0xff,0xf0,0x00,0x00,
0x00,0x03,0xff,0xc0,0xf8,0x3f,0xdf,0xc0,0x00,0x00,
0x00,0x07,0xff,0xc1,0xfc,0x3f,0xe0,0x00,0x00,0x00,
0x00,0x07,0xff,0x87,0xfc,0x1f,0xf0,0x00,0x00,0x00,
0x00,0x0f,0xff,0xcf,0xfe,0x1f,0xf8,0x00,0x00,0x00,
0x00,0x0f,0xff,0xff,0xff,0x1f,0xf8,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0x1f,0xfc,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0xff,0xfc,0x00,0x00,0x00,
0x00,0x1f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0xff,0xff,0xfe,0x00,0x00,0x00,
0x00,0x3f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xff,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0x7f,0xff,0xfe,0x3f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xfc,0x1f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xf8,0x1f,0xff,0xfe,0x00,0x00,0x00,
0x00,0xff,0xff,0xe0,0x0f,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xff,0x80,0x07,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xfc,0x00,0x03,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0xe0,0x00,0x01,0xff,0xfe,0x00,0x00,0x00,
0x01,0xff,0x00,0x00,0x00,0xff,0xfe,0x00,0x00,0x00,
0x00,0xf8,0x00,0x00,0x00,0x7f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x1f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0f,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x07,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x01,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00,0x00
]<|fim▁end|> | |
<|file_name|>pmod_led8.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from . import Pmod_DevMode
from . import PMOD_SWCFG_DIOALL
from . import PMOD_DIO_BASEADDR
from . import PMOD_DIO_TRI_OFFSET
from . import PMOD_DIO_DATA_OFFSET
from . import PMOD_CFG_DIO_ALLOUTPUT
from . import PMOD_NUM_DIGITAL_PINS
__author__ = "Graham Schelle, Giuseppe Natale, Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "[email protected]"
class Pmod_LED8(Pmod_DevMode):
"""This class controls a single LED on the LED8 Pmod.
The Pmod LED8 (PB 200-163) has eight high-brightness LEDs. Each LED can be
individually illuminated from a logic high signal.
Attributes<|fim▁hole|> ----------
microblaze : Pmod
Microblaze processor instance used by this module.
iop_switch_config :list
Microblaze processor IO switch configuration (8 integers).
index : int
Index of the pin on LED8, starting from 0.
"""
def __init__(self, mb_info, index):
"""Return a new instance of a LED object.
Parameters
----------
mb_info : dict
A dictionary storing Microblaze information, such as the
IP name and the reset name.
index: int
The index of the pin in a Pmod, starting from 0.
"""
if index not in range(PMOD_NUM_DIGITAL_PINS):
raise ValueError("Valid pin indexes are 0 - {}."
.format(PMOD_NUM_DIGITAL_PINS-1))
super().__init__(mb_info, PMOD_SWCFG_DIOALL)
self.index = index
self.start()
self.write_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_TRI_OFFSET,
PMOD_CFG_DIO_ALLOUTPUT)
def toggle(self):
"""Flip the bit of a single LED.
Note
----
The LED will be turned off if it is on. Similarly, it will be turned
on if it is off.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val ^ (0x1 << self.index)
self._set_leds_values(new_val)
def on(self):
"""Turn on a single LED.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val | (0x1 << self.index)
self._set_leds_values(new_val)
def off(self):
"""Turn off a single LED.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val & (0xff ^ (0x1 << self.index))
self._set_leds_values(new_val)
def write(self, value):
"""Set the LED state according to the input value
Note
----
This method does not take into account the current LED state.
Parameters
----------
value : int
Turn on the LED if value is 1; turn it off if value is 0.
Returns
-------
None
"""
if value not in (0, 1):
raise ValueError("LED8 can only write 0 or 1.")
if value:
self.on()
else:
self.off()
def read(self):
"""Retrieve the LED state.
Returns
-------
int
The data (0 or 1) read out from the selected pin.
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
return (curr_val >> self.index) & 0x1
def _set_leds_values(self, value):
"""Set the state for all the LEDs.
Note
----
Should not be used directly. User should rely on toggle(), on(),
off(), write(), and read() instead.
Parameters
----------
value : int
The state of all the LEDs encoded in one single value
Returns
-------
None
"""
self.write_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET, value)<|fim▁end|> | |
<|file_name|>RedirectForwardSpec.js<|end_file_name|><|fim▁begin|>var helper = require("../../specRuntime/testHelper"),
Browser = require("zombie");
describe("A redirect page", () => {
helper.startServerBeforeAll(__filename, [
"./TemporaryRedirectPage",
"./TemporaryRedirectWithDocumentPage",
"./PermanentRedirectPage",
"./PermanentRedirectWithDocumentPage",
"./FinalPage",
]);
helper.stopServerAfterAll();
describe("redirects temporarily to the right page", () => {
helper.testWithDocument("/temporaryRedirect", (document) => {
expect(document.location.pathname).toMatch("/final");
});
});
describe("contains the correct HTML after temp redirect", () => {
helper.testWithDocument("/temporaryRedirect", (document) => {
expect(document.querySelector("#main").innerHTML).toMatch("FinalPage");
expect(document.querySelector("body").innerHTML).not.toMatch(/TemporaryRedirectPage/);
});
});
it("gets the right status code for a temp redirect", (done) => {
var browser = new Browser();
browser.silent = true;
browser.on("redirect", (request, response, redirectRequest) => { //eslint-disable-line no-unused-vars
expect(response.status).toBe(302);
done();
});
browser.visit(`http://localhost:${helper.getPort()}/temporaryRedirect`);
});
it("gets the right body for a temp redirect", done => {
(new Browser).on("redirect", (req, res) => {
res.text().then(text => {
expect(text).toMatch('<p>Found. Redirecting to <a href="/final">/final</a></p>');
expect(text).not.toMatch('TemporaryRedirectPage');
done();
});
})
.visit(`http://localhost:${helper.getPort()}/temporaryRedirect`);
});
it("gets the right body for a temp redirect with document", done => {
(new Browser).on("redirect", (req, res) => {
res.text().then(text => {
expect(text).not.toMatch('<p>Found. Redirecting to <a href="/final">/final</a></p>');
expect(text).toMatch('TemporaryRedirectWithDocumentPage');
done();
});
})
.visit(`http://localhost:${helper.getPort()}/temporaryRedirectWithDocument`);
});
describe("redirects temporarily to the right page with document", () => {
helper.testWithDocument("/temporaryRedirectWithDocument", (document) => {
expect(document.location.pathname).toMatch("/final");
});
});
describe("redirects permanently to the right page", () => {
helper.testWithDocument("/permanentRedirect", (document) => {
expect(document.location.pathname).toMatch("/final");
});
});
describe("contains the correct HTML after permanent redirect", () => {
helper.testWithDocument("/permanentRedirect", (document) => {
expect(document.querySelector("#main").innerHTML).toMatch("FinalPage");
expect(document.querySelector("body").innerHTML).not.toMatch(/PermanentRedirectPage/);
});
});
it("gets the right status code for a permanent redirect", (done) => {
var browser = new Browser();
browser.silent = true;
browser.on("redirect", (request, response, redirectRequest) => { //eslint-disable-line no-unused-vars
expect(response.status).toBe(301);
done();
});
browser.visit(`http://localhost:${helper.getPort()}/permanentRedirect`);
});<|fim▁hole|> expect(text).toMatch('<p>Moved Permanently. Redirecting to <a href="/final">/final</a></p>');
expect(text).not.toMatch('PermanentRedirectPage');
done();
});
})
.visit(`http://localhost:${helper.getPort()}/permanentRedirect`);
});
it("gets the right body for a permanent redirect with document", done => {
(new Browser).on("redirect", (req, res) => {
res.text().then(text => {
expect(text).not.toMatch('<p>Moved Permanently. Redirecting to <a href="/final">/final</a></p>');
expect(text).toMatch('PermanentRedirectWithDocumentPage');
done();
});
})
.visit(`http://localhost:${helper.getPort()}/permanentRedirectWithDocument`);
});
describe("redirects permanently to the right page with document", () => {
helper.testWithDocument("/permanentRedirectWithDocument", (document) => {
expect(document.location.pathname).toMatch("/final");
});
});
});
describe("A forward page", () => {
helper.startServerBeforeAll(__filename, [
"./FinalPage",
"./ForwardPage",
]);
helper.stopServerAfterAll();
describe("does NOT change its URL", () => {
helper.testWithDocument("/forward", (document) => {
expect(document.location.pathname).toMatch("/forward");
});
});
describe("contains the correct HTML after forward", () => {
helper.testWithDocument("/forward", (document) => {
expect(document.querySelector("#main").innerHTML).toMatch("FinalPage");
expect(document.querySelector("body").innerHTML).not.toMatch(/ForwardPage/);
});
});
it ("gets a 200 status code and doesn't redirect", (done) => {
var browser = new Browser();
browser.silent = true;
browser.on("redirect", (request, response, redirectRequest) => { //eslint-disable-line no-unused-vars
fail("Forward page redirected when it shouldn't have.");
done();
});
browser.visit(`http://localhost:${helper.getPort()}/forward`).then(() => {
expect(browser.resources[0].response.status).toBe(200);
done();
});
});
});<|fim▁end|> |
it("gets the right body for a permanent redirect", done => {
(new Browser).on("redirect", (req, res) => {
res.text().then(text => { |
<|file_name|>dirac-rss-list-status.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
dirac-rss-list-status
Script that dumps the DB information for the elements into the standard output.
If returns information concerning the StatusType and Status attributes.
Usage:
dirac-rss-list-status
--element= Element family to be Synchronized ( Site, Resource or Node )
--elementType= ElementType narrows the search; None if default
--name= ElementName; None if default
--tokenOwner= Owner of the token; None if default
--statusType= StatusType; None if default
--status= Status; None if default
Verbosity:
-o LogLevel=LEVEL NOTICE by default, levels available: INFO, DEBUG, VERBOSE..
"""
from DIRAC import gLogger, exit as DIRACExit, version
from DIRAC.Core.Base import Script
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.Core.Utilities.PrettyPrint import printTable
__RCSID__ = '$Id:$'
subLogger = None
switchDict = {}
def registerSwitches():
'''
Registers all switches that can be used while calling the script from the
command line interface.
'''
switches = (
( 'element=', 'Element family to be Synchronized ( Site, Resource or Node )' ),
( 'elementType=', 'ElementType narrows the search; None if default' ),
( 'name=', 'ElementName; None if default' ),
( 'tokenOwner=', 'Owner of the token; None if default' ),
( 'statusType=', 'StatusType; None if default' ),
( 'status=', 'Status; None if default' ),
)
for switch in switches:
Script.registerSwitch( '', switch[ 0 ], switch[ 1 ] )
def registerUsageMessage():
'''
Takes the script __doc__ and adds the DIRAC version to it
'''
hLine = ' ' + '='*78 + '\n'
usageMessage = hLine
usageMessage += ' DIRAC %s\n' % version
usageMessage += __doc__
usageMessage += '\n' + hLine
Script.setUsageMessage( usageMessage )
def parseSwitches():
'''
Parses the arguments passed by the user
'''
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if args:
subLogger.error( "Found the following positional args '%s', but we only accept switches" % args )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
switches = dict( Script.getUnprocessedSwitches() )
# Default values
switches.setdefault( 'elementType', None )
switches.setdefault( 'name', None )
switches.setdefault( 'tokenOwner', None )
switches.setdefault( 'statusType', None )
switches.setdefault( 'status', None )
if 'element' not in switches:
subLogger.error( "element Switch missing" )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ):
subLogger.error( "Found %s as element switch" % switches[ 'element' ] )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
subLogger.debug( "The switches used are:" )
map( subLogger.debug, switches.iteritems() )
return switches
#...............................................................................
def getElements():
'''
Given the switches, gets a list of elements with their respective statustype
and status attributes.
'''
rssClient = ResourceStatusClient.ResourceStatusClient()
meta = { 'columns' : [] }
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
if switchDict[ key[0].lower() + key[1:] ] is None:
meta[ 'columns' ].append( key )
elements = rssClient.selectStatusElement(
switchDict[ 'element' ], 'Status',
name = switchDict[ 'name' ].split(',') if switchDict['name'] else None,
statusType = switchDict[ 'statusType' ].split(',') if switchDict['statusType'] else None,
status = switchDict[ 'status' ].split(',') if switchDict['status'] else None,
elementType = switchDict[ 'elementType' ].split(',') if switchDict['elementType'] else None,
tokenOwner = switchDict[ 'tokenOwner' ].split(',') if switchDict['tokenOwner'] else None,
meta = meta )
return elements
def tabularPrint( elementsList ):
'''
Prints the list of elements on a tabular
'''
subLogger.notice( '' )
subLogger.notice( 'Selection parameters:' )
subLogger.notice( ' %s: %s' % ( 'element'.ljust( 15 ), switchDict[ 'element' ] ) )
titles = []
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
keyT = key[0].lower() + key[1:]
if switchDict[ keyT ] is None:
titles.append( key )
else:
subLogger.notice( ' %s: %s' % ( key.ljust( 15 ), switchDict[ keyT ] ) )
subLogger.notice( '' )
subLogger.notice( printTable( titles, elementsList, printOut = False,
numbering = False, columnSeparator = ' | ' ) )
#...............................................................................
def run():
'''
Main function of the script
'''
elements = getElements()
if not elements[ 'OK' ]:
subLogger.error( elements )
DIRACExit( 1 )
elements = elements[ 'Value' ]
tabularPrint( elements )
#...............................................................................
if __name__ == "__main__":
subLogger = gLogger.getSubLogger( __file__ )
#Script initialization
registerSwitches()
registerUsageMessage()
switchDict = parseSwitches()
#Run script
run()
<|fim▁hole|> DIRACExit( 0 )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF<|fim▁end|> | #Bye |
<|file_name|>11.4.1-5-a-27-s.js<|end_file_name|><|fim▁begin|>/// Copyright (c) 2012 Ecma International. All rights reserved.
/**
* @path ch11/11.4/11.4.1/11.4.1-5-a-27-s.js
* @description Strict Mode - TypeError is thrown after deleting a property, calling preventExtensions, and attempting to reassign the property
* @onlyStrict
*/
function testcase() {
"use strict";
var a = {x:0, get y() { return 0;}};
delete a.x;
Object.preventExtensions(a);
try {<|fim▁hole|> } catch (e) {
return e instanceof TypeError;
}
}
runTestCase(testcase);<|fim▁end|> | a.x = 1;
return false; |
<|file_name|>interfaces.go<|end_file_name|><|fim▁begin|>package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// InterfacesClient is the network Client
type InterfacesClient struct {
ManagementClient
}
// NewInterfacesClient creates an instance of the InterfacesClient client.
func NewInterfacesClient(subscriptionID string) InterfacesClient {
return NewInterfacesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewInterfacesClientWithBaseURI creates an instance of the InterfacesClient client.
func NewInterfacesClientWithBaseURI(baseURI string, subscriptionID string) InterfacesClient {
return InterfacesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates a network interface. This method may poll for completion. Polling can be canceled
// by passing the cancel channel argument. The channel will be used to cancel polling and any outstanding HTTP
// requests.
//
// resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface.
// parameters is parameters supplied to the create or update network interface operation.
func (client InterfacesClient) CreateOrUpdate(resourceGroupName string, networkInterfaceName string, parameters Interface, cancel <-chan struct{}) (<-chan Interface, <-chan error) {
resultChan := make(chan Interface, 1)
errChan := make(chan error, 1)
go func() {
var err error
var result Interface
defer func() {
if err != nil {
errChan <- err
}
resultChan <- result
close(resultChan)
close(errChan)
}()
req, err := client.CreateOrUpdatePreparer(resourceGroupName, networkInterfaceName, parameters, cancel)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", resp, "Failure responding to request")
}
}()
return resultChan, errChan
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client InterfacesClient) CreateOrUpdatePreparer(resourceGroupName string, networkInterfaceName string, parameters Interface, cancel <-chan struct{}) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsJSON(),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{Cancel: cancel})
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client),
azure.DoPollForAsynchronous(client.PollingDelay))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client InterfacesClient) CreateOrUpdateResponder(resp *http.Response) (result Interface, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified network interface. This method may poll for completion. Polling can be canceled by
// passing the cancel channel argument. The channel will be used to cancel polling and any outstanding HTTP requests.
//
// resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface.
func (client InterfacesClient) Delete(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan autorest.Response, <-chan error) {
resultChan := make(chan autorest.Response, 1)
errChan := make(chan error, 1)
go func() {
var err error
var result autorest.Response
defer func() {
if err != nil {
errChan <- err
}
resultChan <- result
close(resultChan)
close(errChan)
}()
req, err := client.DeletePreparer(resourceGroupName, networkInterfaceName, cancel)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", resp, "Failure responding to request")
}
}()
return resultChan, errChan
}
// DeletePreparer prepares the Delete request.
func (client InterfacesClient) DeletePreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{Cancel: cancel})
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) DeleteSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client),
azure.DoPollForAsynchronous(client.PollingDelay))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client InterfacesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusAccepted, http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets information about the specified network interface.
//
// resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface.
// expand is expands referenced resources.
func (client InterfacesClient) Get(resourceGroupName string, networkInterfaceName string, expand string) (result Interface, err error) {
req, err := client.GetPreparer(resourceGroupName, networkInterfaceName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client InterfacesClient) GetPreparer(resourceGroupName string, networkInterfaceName string, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{})
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client InterfacesClient) GetResponder(resp *http.Response) (result Interface, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetEffectiveRouteTable gets all route tables applied to a network interface. This method may poll for completion.
// Polling can be canceled by passing the cancel channel argument. The channel will be used to cancel polling and any
// outstanding HTTP requests.
//
// resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface.
func (client InterfacesClient) GetEffectiveRouteTable(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan EffectiveRouteListResult, <-chan error) {
resultChan := make(chan EffectiveRouteListResult, 1)
errChan := make(chan error, 1)
go func() {
var err error
var result EffectiveRouteListResult
defer func() {
if err != nil {
errChan <- err
}
resultChan <- result
close(resultChan)
close(errChan)
}()
req, err := client.GetEffectiveRouteTablePreparer(resourceGroupName, networkInterfaceName, cancel)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", nil, "Failure preparing request")
return
}
resp, err := client.GetEffectiveRouteTableSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", resp, "Failure sending request")
return
}
result, err = client.GetEffectiveRouteTableResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", resp, "Failure responding to request")
}
}()
return resultChan, errChan
}
// GetEffectiveRouteTablePreparer prepares the GetEffectiveRouteTable request.
func (client InterfacesClient) GetEffectiveRouteTablePreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{Cancel: cancel})
}
// GetEffectiveRouteTableSender sends the GetEffectiveRouteTable request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) GetEffectiveRouteTableSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client),
azure.DoPollForAsynchronous(client.PollingDelay))
}
// GetEffectiveRouteTableResponder handles the response to the GetEffectiveRouteTable request. The method always
// closes the http.Response Body.
func (client InterfacesClient) GetEffectiveRouteTableResponder(resp *http.Response) (result EffectiveRouteListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetVirtualMachineScaleSetNetworkInterface get the specified network interface in a virtual machine scale set.
//
// resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine
// scale set. virtualmachineIndex is the virtual machine index. networkInterfaceName is the name of the network
// interface. expand is expands referenced resources.
func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterface(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, expand string) (result Interface, err error) {
req, err := client.GetVirtualMachineScaleSetNetworkInterfacePreparer(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex, networkInterfaceName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", nil, "Failure preparing request")
return
}
resp, err := client.GetVirtualMachineScaleSetNetworkInterfaceSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", resp, "Failure sending request")
return
}
result, err = client.GetVirtualMachineScaleSetNetworkInterfaceResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", resp, "Failure responding to request")
}
return
}
// GetVirtualMachineScaleSetNetworkInterfacePreparer prepares the GetVirtualMachineScaleSetNetworkInterface request.
func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfacePreparer(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualmachineIndex": autorest.Encode("path", virtualmachineIndex),
"virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName),
}
const APIVersion = "2017-03-30"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),<|fim▁hole|> return preparer.Prepare(&http.Request{})
}
// GetVirtualMachineScaleSetNetworkInterfaceSender sends the GetVirtualMachineScaleSetNetworkInterface request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfaceSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// GetVirtualMachineScaleSetNetworkInterfaceResponder handles the response to the GetVirtualMachineScaleSetNetworkInterface request. The method always
// closes the http.Response Body.
func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfaceResponder(resp *http.Response) (result Interface, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all network interfaces in a resource group.
//
// resourceGroupName is the name of the resource group.
func (client InterfacesClient) List(resourceGroupName string) (result InterfaceListResult, err error) {
req, err := client.ListPreparer(resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure sending request")
return
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client InterfacesClient) ListPreparer(resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{})
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client InterfacesClient) ListResponder(resp *http.Response) (result InterfaceListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListNextResults retrieves the next set of results, if any.
func (client InterfacesClient) ListNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) {
req, err := lastResults.InterfaceListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "List", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure responding to next results request")
}
return
}
// ListComplete gets all elements from the list without paging.
func (client InterfacesClient) ListComplete(resourceGroupName string, cancel <-chan struct{}) (<-chan Interface, <-chan error) {
resultChan := make(chan Interface)
errChan := make(chan error, 1)
go func() {
defer func() {
close(resultChan)
close(errChan)
}()
list, err := client.List(resourceGroupName)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
for list.NextLink != nil {
list, err = client.ListNextResults(list)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
}
}()
return resultChan, errChan
}
// ListAll gets all network interfaces in a subscription.
func (client InterfacesClient) ListAll() (result InterfaceListResult, err error) {
req, err := client.ListAllPreparer()
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", nil, "Failure preparing request")
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure sending request")
return
}
result, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure responding to request")
}
return
}
// ListAllPreparer prepares the ListAll request.
func (client InterfacesClient) ListAllPreparer() (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{})
}
// ListAllSender sends the ListAll request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) ListAllSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// ListAllResponder handles the response to the ListAll request. The method always
// closes the http.Response Body.
func (client InterfacesClient) ListAllResponder(resp *http.Response) (result InterfaceListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListAllNextResults retrieves the next set of results, if any.
func (client InterfacesClient) ListAllNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) {
req, err := lastResults.InterfaceListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure sending next results request")
}
result, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure responding to next results request")
}
return
}
// ListAllComplete gets all elements from the list without paging.
func (client InterfacesClient) ListAllComplete(cancel <-chan struct{}) (<-chan Interface, <-chan error) {
resultChan := make(chan Interface)
errChan := make(chan error, 1)
go func() {
defer func() {
close(resultChan)
close(errChan)
}()
list, err := client.ListAll()
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
for list.NextLink != nil {
list, err = client.ListAllNextResults(list)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
}
}()
return resultChan, errChan
}
// ListEffectiveNetworkSecurityGroups gets all network security groups applied to a network interface. This method may
// poll for completion. Polling can be canceled by passing the cancel channel argument. The channel will be used to
// cancel polling and any outstanding HTTP requests.
//
// resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface.
func (client InterfacesClient) ListEffectiveNetworkSecurityGroups(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan EffectiveNetworkSecurityGroupListResult, <-chan error) {
resultChan := make(chan EffectiveNetworkSecurityGroupListResult, 1)
errChan := make(chan error, 1)
go func() {
var err error
var result EffectiveNetworkSecurityGroupListResult
defer func() {
if err != nil {
errChan <- err
}
resultChan <- result
close(resultChan)
close(errChan)
}()
req, err := client.ListEffectiveNetworkSecurityGroupsPreparer(resourceGroupName, networkInterfaceName, cancel)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", nil, "Failure preparing request")
return
}
resp, err := client.ListEffectiveNetworkSecurityGroupsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", resp, "Failure sending request")
return
}
result, err = client.ListEffectiveNetworkSecurityGroupsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", resp, "Failure responding to request")
}
}()
return resultChan, errChan
}
// ListEffectiveNetworkSecurityGroupsPreparer prepares the ListEffectiveNetworkSecurityGroups request.
func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsPreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) {
pathParameters := map[string]interface{}{
"networkInterfaceName": autorest.Encode("path", networkInterfaceName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{Cancel: cancel})
}
// ListEffectiveNetworkSecurityGroupsSender sends the ListEffectiveNetworkSecurityGroups request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client),
azure.DoPollForAsynchronous(client.PollingDelay))
}
// ListEffectiveNetworkSecurityGroupsResponder handles the response to the ListEffectiveNetworkSecurityGroups request. The method always
// closes the http.Response Body.
func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsResponder(resp *http.Response) (result EffectiveNetworkSecurityGroupListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListVirtualMachineScaleSetNetworkInterfaces gets all network interfaces in a virtual machine scale set.
//
// resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine
// scale set.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfaces(resourceGroupName string, virtualMachineScaleSetName string) (result InterfaceListResult, err error) {
req, err := client.ListVirtualMachineScaleSetNetworkInterfacesPreparer(resourceGroupName, virtualMachineScaleSetName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", nil, "Failure preparing request")
return
}
resp, err := client.ListVirtualMachineScaleSetNetworkInterfacesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure sending request")
return
}
result, err = client.ListVirtualMachineScaleSetNetworkInterfacesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure responding to request")
}
return
}
// ListVirtualMachineScaleSetNetworkInterfacesPreparer prepares the ListVirtualMachineScaleSetNetworkInterfaces request.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesPreparer(resourceGroupName string, virtualMachineScaleSetName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName),
}
const APIVersion = "2017-03-30"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{})
}
// ListVirtualMachineScaleSetNetworkInterfacesSender sends the ListVirtualMachineScaleSetNetworkInterfaces request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// ListVirtualMachineScaleSetNetworkInterfacesResponder handles the response to the ListVirtualMachineScaleSetNetworkInterfaces request. The method always
// closes the http.Response Body.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesResponder(resp *http.Response) (result InterfaceListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListVirtualMachineScaleSetNetworkInterfacesNextResults retrieves the next set of results, if any.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) {
req, err := lastResults.InterfaceListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListVirtualMachineScaleSetNetworkInterfacesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure sending next results request")
}
result, err = client.ListVirtualMachineScaleSetNetworkInterfacesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure responding to next results request")
}
return
}
// ListVirtualMachineScaleSetNetworkInterfacesComplete gets all elements from the list without paging.
func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesComplete(resourceGroupName string, virtualMachineScaleSetName string, cancel <-chan struct{}) (<-chan Interface, <-chan error) {
resultChan := make(chan Interface)
errChan := make(chan error, 1)
go func() {
defer func() {
close(resultChan)
close(errChan)
}()
list, err := client.ListVirtualMachineScaleSetNetworkInterfaces(resourceGroupName, virtualMachineScaleSetName)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
for list.NextLink != nil {
list, err = client.ListVirtualMachineScaleSetNetworkInterfacesNextResults(list)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
}
}()
return resultChan, errChan
}
// ListVirtualMachineScaleSetVMNetworkInterfaces gets information about all network interfaces in a virtual machine in
// a virtual machine scale set.
//
// resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine
// scale set. virtualmachineIndex is the virtual machine index.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfaces(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string) (result InterfaceListResult, err error) {
req, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesPreparer(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", nil, "Failure preparing request")
return
}
resp, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure sending request")
return
}
result, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure responding to request")
}
return
}
// ListVirtualMachineScaleSetVMNetworkInterfacesPreparer prepares the ListVirtualMachineScaleSetVMNetworkInterfaces request.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesPreparer(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualmachineIndex": autorest.Encode("path", virtualmachineIndex),
"virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName),
}
const APIVersion = "2017-03-30"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare(&http.Request{})
}
// ListVirtualMachineScaleSetVMNetworkInterfacesSender sends the ListVirtualMachineScaleSetVMNetworkInterfaces request. The method will close the
// http.Response Body if it receives an error.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client,
req,
azure.DoRetryWithRegistration(client.Client))
}
// ListVirtualMachineScaleSetVMNetworkInterfacesResponder handles the response to the ListVirtualMachineScaleSetVMNetworkInterfaces request. The method always
// closes the http.Response Body.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp *http.Response) (result InterfaceListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListVirtualMachineScaleSetVMNetworkInterfacesNextResults retrieves the next set of results, if any.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) {
req, err := lastResults.InterfaceListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure sending next results request")
}
result, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure responding to next results request")
}
return
}
// ListVirtualMachineScaleSetVMNetworkInterfacesComplete gets all elements from the list without paging.
func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesComplete(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, cancel <-chan struct{}) (<-chan Interface, <-chan error) {
resultChan := make(chan Interface)
errChan := make(chan error, 1)
go func() {
defer func() {
close(resultChan)
close(errChan)
}()
list, err := client.ListVirtualMachineScaleSetVMNetworkInterfaces(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
for list.NextLink != nil {
list, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesNextResults(list)
if err != nil {
errChan <- err
return
}
if list.Value != nil {
for _, item := range *list.Value {
select {
case <-cancel:
return
case resultChan <- item:
// Intentionally left blank
}
}
}
}
}()
return resultChan, errChan
}<|fim▁end|> | autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}", pathParameters),
autorest.WithQueryParameters(queryParameters)) |
<|file_name|>CreateClusteredTableIT.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.bigquery;
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardSQLTypeName;
import com.google.common.collect.ImmutableList;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class CreateClusteredTableIT {
private final Logger log = Logger.getLogger(this.getClass().getName());
private String tableName;
private ByteArrayOutputStream bout;
private PrintStream out;<|fim▁hole|> private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME");
private static void requireEnvVar(String varName) {
assertNotNull(
"Environment variable " + varName + " is required to perform these tests.",
System.getenv(varName));
}
@BeforeClass
public static void checkRequirements() {
requireEnvVar("BIGQUERY_DATASET_NAME");
}
@Before
public void setUp() {
tableName = "MY_CLUSTERED_TABLE_TEST" + UUID.randomUUID().toString().substring(0, 8);
bout = new ByteArrayOutputStream();
out = new PrintStream(bout);
originalPrintStream = System.out;
System.setOut(out);
}
@After
public void tearDown() {
// Clean up
DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName);
// restores print statements in the original method
System.out.flush();
System.setOut(originalPrintStream);
log.log(Level.INFO, "\n" + bout.toString());
}
@Test
public void createClusteredTable() {
Schema schema =
Schema.of(
Field.of("name", StandardSQLTypeName.STRING),
Field.of("post_abbr", StandardSQLTypeName.STRING),
Field.of("date", StandardSQLTypeName.DATE));
CreateClusteredTable.createClusteredTable(
BIGQUERY_DATASET_NAME, tableName, schema, ImmutableList.of("name", "post_abbr"));
assertThat(bout.toString()).contains("Clustered table created successfully");
}
}<|fim▁end|> | private PrintStream originalPrintStream;
|
<|file_name|>user.js<|end_file_name|><|fim▁begin|>app.service('UserService', ['$http', function($http) {
return {
getLogged: function(successCallback) {
$http.get('/api/user/logged').then(successCallback);
},
<|fim▁hole|> $http.post('/api/user/pin/', user).then(successCallback);
}
};
}]);<|fim▁end|> | putPin: function(user, successCallback) { |
<|file_name|>TimelineCtrl.ts<|end_file_name|><|fim▁begin|>module Timeline {
declare var vis;
export interface ITimelineScope extends ng.IScope {
vm: TimelineCtrl;
numberOfItems: number;
timeline: any;
datePickerOptions: any;
datePickerDate: Date;
}
/** Interface for the timeline configuration, may be part of the {csComp.Services.IFeatureType} or {csComp.Services.IProjectLayer}. */
export interface ITimelineConfig {
/** Group (row/lane) to use */
group?: string;
/** Property to use as the group (row/lane) */
groupProperty?: string;
/** CSS class to use for the group */
groupClass?: string;
/** Property to use as the CSS class for the group */
groupClassProperty?: string;
/** CSS class to use for the timeline item */
class?: string;
/** Property to use as the CSS class for the timeline item */
classProperty?: string;
/** Property that contains the start time (as stringified Date) */
startTimeProperty?: string;
/** Property that contains the end time (as stringified Date) */
endTimeProperty?: string;
/** Property that contains the content (text that appears inside the timeline item) */<|fim▁hole|> export interface ITimelineItem {
/** Feature ID */
id?: any;
/** Layer ID */
layerId?: string;
/** Content to show in the timeline item (html or string) */
content?: string;
/** Start time */
start?: Date;
/** End time */
end?: Date;
group?: string;
/** CSS group class name */
groupClass?: string;
/** CSS timeline item class name */
className?: string;
}
/** Interface to talk to the timeline items in the timeline, of type vis.DataSet. */
export interface IDataSet {
/** Add one or more timeline items. */
add(items: ITimelineItem | ITimelineItem[]);
/** Removes an item from the timeline. */
remove(items: ITimelineItem | ITimelineItem[]);
/** Returns the ids of all timeline items. */
getIds(): string[];
/** Get all timeline items. */
get(): ITimelineItem[];
/** Clears the timeline items. */
clear();
forEach(calback: (item: ITimelineItem) => void);
}
export class TimelineCtrl {
private scope: ITimelineScope;
private locale = 'en-us';
private timelineGroups: IDataSet = new vis.DataSet();
/** Holds the timeline items, is databound to the timeline. */
private timelineItems: IDataSet = new vis.DataSet();
// $inject annotation.
// It provides $injector with information about dependencies to be injected into constructor
// it is better to have it close to the constructor, because the parameters must match in count and type.
// See http://docs.angularjs.org/guide/di
public static $inject = [
'$scope',
'layerService',
'mapService',
'messageBusService',
'TimelineService'
];
public focusDate: Date;
public line1: string;
public line2: string;
public startDate: Date;
public endDate: Date;
public timer: any;
public isPlaying: boolean;
public showControl: boolean;
public isPinned: boolean = true;
public activeDateRange: csComp.Services.DateRange;
public options: any;
public expandButtonBottom = 52;
public datePickerBottom = 120;
public items = new vis.DataSet();
private debounceUpdate: Function;
private debounceSetItems: Function;
private ids: string[] = [];
// dependencies are injected via AngularJS $injector
// controller's name is registered in Application.ts and specified from ng-controller attribute in index.html
constructor(
private $scope: ITimelineScope,
private $layerService: csComp.Services.LayerService,
private $mapService: csComp.Services.MapService,
private $messageBusService: csComp.Services.MessageBusService,
private TimelineService: Timeline.ITimelineService
) {
this.loadLocales();
this.options = {
'width': '100%',
'editable': false,
'margin': 0,
'height': 54,
'moveable': false,
'zoomMax': 172800000000,
'zoomMin': 3600000
//'layout': 'box'
};
this.debounceUpdate = _.debounce(this.updateFeatures, 500);
this.debounceSetItems = _.debounce((items) => { this.addItems(items); }, 500);
$scope.$watch("datePickerDate", (d: string) => {
if (typeof d !== 'undefined') {
var date = new Date(d);
this.updateTimeline(date, new Date(date.getTime() + 1000 * 60 * 60 * 24));
}
})
$scope.vm = this;
$scope.datePickerOptions = {
customClass: this.getDayClass,
minDate: new Date(2015, 1, 1),
maxDate: new Date()
};
this.$messageBusService.subscribe('dashboard-main', (s: string, data: any) => {
if (s === 'activated') {
this.updatePanelHeights();
this.updateTimelineHeight();
}
});
this.$messageBusService.subscribe('project', (s: string, data: any) => {
setTimeout(() => {
this.$scope.timeline.setItems(this.timelineItems);
this.$scope.timeline.setGroups(this.timelineGroups);
// set min/max zoom levels if available
if (this.activeDateRange !== null) {
if (!_.isUndefined(this.activeDateRange.zoomMax)) this.$scope.timeline.options['zoomMax'] = this.activeDateRange.zoomMax;
if (!_.isUndefined(this.activeDateRange.zoomMin)) this.$scope.timeline.options['zoomMin'] = this.activeDateRange.zoomMin;
}
this.updateFocusTime();
this.updateDragging();
this.myTimer();
if (this.activeDateRange && this.activeDateRange.isLive) this.goLive();
}, 0);
});
this.initTimeline();
this.$messageBusService.subscribe('timeline', (s: string, data: any) => { this.update(s, data); });
this.$messageBusService.subscribe('feature', (s: string, feature: csComp.Services.IFeature) => {
if (s === 'onFeatureSelect' && feature) {
if (this.ids.indexOf(feature.id) !== -1) {
this.$scope.timeline.setSelection(feature.id);
}
}
});
//$scope.focusDate = $layerService.project.timeLine.focusDate();
// Options for the timeline
this.$messageBusService.subscribe('language', (s: string, newLanguage: string) => {
switch (s) {
case 'newLanguage':
this.initTimeline();
break;
}
});
this.$messageBusService.subscribe('layer', (title: string, layer: csComp.Services.IProjectLayer) => {
switch (title) {
case 'timelineUpdated':
this.addTimelineItemsInLayer(layer);
break;
case 'activated':
this.addTimelineItemsInLayer(layer);
break;
case 'deactivate':
this.removeTimelineItemsInLayer(layer);
break;
}
});
}
public updateTimeline(start: Date, end: Date) {
var d = this.$layerService.project.activeDashboard;
if (d.showTimeline && (d.timeline || this.$layerService.project.timeLine)) {
//console.log('checkTimeline: dashboard has timeline');
var t = (d.timeline) ? d.timeline : this.$layerService.project.timeLine;
t.start = start.getTime();
t.end = end.getTime();
this.$messageBusService.publish('timeline', 'updateTimerange', t);
}
}
private getDayClass(data) {
var date = data.date,
mode = data.mode;
if (mode === 'day') {
var dayToCheck = new Date(date).setHours(0, 0, 0, 0);
}
return '';
}
/** Check whether the layer contains timeline items, and if so, add them to the timeline. */
private addTimelineItemsInLayer(layer: csComp.Services.IProjectLayer) {
if (!layer.timeAware || !layer.data || !layer.data.features) return;
var layerConfig = layer.timelineConfig;
var items: ITimelineItem[] = [];
layer.data.features.forEach((f: csComp.Services.IFeature) => {
let props = f.properties;
let featureConfig = f.fType.timelineConfig;
if (!featureConfig && !layerConfig) return;
let classProp = (featureConfig && featureConfig.classProperty) || (layerConfig && layerConfig.classProperty);
let groupClassProp = (featureConfig && featureConfig.groupClassProperty) || (layerConfig && layerConfig.groupClassProperty);
let contentProp = (featureConfig && featureConfig.contentProperty) || (layerConfig && layerConfig.contentProperty);
let startProp = (featureConfig && featureConfig.startTimeProperty) || (layerConfig && layerConfig.startTimeProperty);
let endProp = (featureConfig && featureConfig.endTimeProperty) || (layerConfig && layerConfig.endTimeProperty);
let groupProp = (featureConfig && featureConfig.groupProperty) || (layerConfig && layerConfig.groupProperty);
let timelineItem = <ITimelineItem>{
id: f.id,
layerId: layer.id,
className: props.hasOwnProperty(classProp) ? props[classProp] : (featureConfig && featureConfig.class) || (layerConfig && layerConfig.class),
groupClass: props.hasOwnProperty(groupClassProp) ? props[groupClassProp] : (featureConfig && featureConfig.groupClass) || (layerConfig && layerConfig.groupClass),
group: props.hasOwnProperty(groupProp) ? props[groupProp] : (featureConfig && featureConfig.group) || (layerConfig && layerConfig.group) || '',
start: props.hasOwnProperty(startProp) ? props[startProp] : null,
end: props.hasOwnProperty(endProp) ? props[endProp] : null,
type: props.hasOwnProperty('type') ? props['type'] : null,
content: props.hasOwnProperty(contentProp) ? props[contentProp] : ''
};
if (timelineItem.start) items.push(timelineItem);
});
this.addItems(items);
}
/** Remove all timeline items that could be found in this layer. */
private removeTimelineItemsInLayer(layer) {
if (!layer.timeAware || !layer.data || !layer.data.features) return;
var deleteItems: ITimelineItem[] = [];
this.timelineItems.forEach(item => {
if (item.layerId !== layer.id) return;
deleteItems.push(item);
});
this.deleteItems(deleteItems);
}
/** Update the groups, most likely after certain items have been added or deleted */
private updateGroups() {
this.timelineGroups.clear();
var groups: string[] = [];
this.timelineItems.forEach(item => {
if (groups.indexOf(item.group) >= 0) return;
groups.push(item.group);
this.timelineGroups.add(<ITimelineItem>{
className: item.groupClass,
content: item.group,
id: item.group,
title: item.group
});
});
}
private update(s, data) {
switch (s) {
case 'updateTimerange':
this.$scope.timeline.setWindow(data.start, data.end);
this.updateFocusTime();
break;
case 'loadProjectTimeRange':
if (typeof this.$layerService.project === 'undefined'
|| this.$layerService.project === null
|| typeof this.$layerService.project.timeLine === 'undefined'
|| this.$layerService.project.timeLine === null) return;
this.$scope.timeline.setWindow(this.$layerService.project.timeLine.start, this.$layerService.project.timeLine.end);
this.updateFocusTime();
break;
case 'setFocus':
this.setFocusContainerDebounce(data);
break;
case 'updateFeatures':
this.debounceUpdate();
break;
case 'setItems':
this.debounceSetItems(data);
break;
case 'setGroups':
this.setGroups(data);
break;
}
}
private setFocusContainerDebounce = _.debounce((data) => {
this.updateFocusTimeContainer(data);
//console.log(`Moved timeline and focuscontainer to ${data}`);
}, 300, true);
private addItems(items: ITimelineItem[]) {
if (!items) return;
let its = [];
items.forEach(i => {
if (this.timelineItems.getIds().indexOf(i.id) === -1) its.push(i);
});
this.timelineItems.add(its);
this.updateGroups();
}
private deleteItems(items: ITimelineItem[]) {
if (!items) return;
this.timelineItems.remove(items);
this.updateGroups();
}
private setGroups(groups: ITimelineItem[]) {
if (!groups || groups.length === 1) return;
this.timelineGroups.add(groups);
//var gs = new vis.DataSet(groups);
//this.$scope.timeline.setGroups(gs);
}
private updateFeatures() {
//console.log('timeline: updating features');
//this.items = [];
//this.$scope.timeline.redraw();
var temp: string[] = [];
var hasChanged = false;
// check for new items
this.$layerService.project.features.forEach((f: csComp.Services.IFeature) => {
hasChanged = true;
if (f.layer.showOnTimeline && f.properties.hasOwnProperty('date')) {
temp.push(f.id);
if (this.ids.indexOf(f.id) === -1) {
var t = { id: f.id, group: 'all', content: f.properties['Name'], start: new Date(f.properties['date']) };
this.items.update(t);
this.ids.push(f.id);
}
}
});
// check for old items
this.ids.forEach((s) => {
hasChanged = true;
if (temp.indexOf(s) === -1) {
// remove item
var i = this.items.remove(s);
this.ids = this.ids.filter((t) => s !== t);
}
});
//this.$scope.timeline.setItems(i);
if (hasChanged) this.$scope.timeline.redraw();
}
private initTimeline() {
var container = document.getElementById('timeline');
// Remove old timeline before initializing a new one
while (container.firstChild) {
container.removeChild(container.firstChild);
}
this.$layerService.timeline = this.$scope.timeline = new vis.Timeline(container, this.items, this.options);
this.$scope.timeline.addCustomTime(this.focusDate, '1');
this.$scope.timeline.on('timechange', (res) => {
console.log(res.time);
});
this.$layerService.timeline.redraw();
if (this.$layerService.project && this.activeDateRange !== null) {
this.$scope.timeline.setWindow(this.activeDateRange.start, this.activeDateRange.end);
if (this.activeDateRange && this.activeDateRange.isLive) this.goLive();
}
this.updateDragging();
this.updateFocusTime();
this.$scope.timeline.on('select', (properties) => {
if (properties.items && properties.items.length > 0) {
var id = properties.items[0];
var f = this.$layerService.findFeatureById(id);
if (f) {
this.$layerService.selectFeature(f);
} else if (this.$layerService.project.eventTab) {
this.$messageBusService.publish('eventtab', 'zoomto', { id: id });
}
}
});
this.$scope.timeline.addEventListener('rangechange', _.throttle((prop) => this.onRangeChanged(prop), 200));
//this.addEventListener('featureschanged', _.throttle((prop) => this.updateFeatures(), 200));
}
public selectDate() {
}
public updateDragging() {
if (this.activeDateRange && this.activeDateRange.isLive) {
(<any>$('#focustimeContainer')).draggable('disable');
} else {
(<any>$('#focustimeContainer')).draggable({
axis: 'x',
containment: 'parent',
drag: _.throttle(() => this.updateFocusTime(), 200)
});
(<any>$('#focustimeContainer')).draggable('enable');
}
}
public expandToggle() {
this.activeDateRange.isExpanded = !this.activeDateRange.isExpanded;
this.updateTimelineHeight();
// this.options.margin = {};
// this.options.margin['item'] = (this.expanded) ? 65 : 0;
this.updatePanelHeights();
}
private updateTimelineHeight() {
this.options.moveable = this.activeDateRange.ismoveable;
this.options.height = (this.activeDateRange.isExpanded) ? this.activeDateRange.expandHeight : 54;
this.expandButtonBottom = (this.activeDateRange.isExpanded) ? this.activeDateRange.expandHeight - 1 : 52;
this.datePickerBottom = this.expandButtonBottom + 170;
this.$layerService.timeline.setOptions(this.options);
this.$layerService.timeline.redraw();
}
private updatePanelHeights() {
this.activeDateRange = (this.$layerService.project.activeDashboard.timeline) ? this.$layerService.project.activeDashboard.timeline : this.$layerService.project.timeLine;
var height = (this.activeDateRange.isExpanded && this.$layerService.project.activeDashboard.showTimeline) ? this.activeDateRange.expandHeight : 54;
$('.leftpanel-container').css('bottom', height + 20);
$('.rightpanel').css('bottom', height);
}
private throttleTimeSpanUpdate = _.debounce(this.triggerTimeSpanUpdated, 1000);
/**
* trigger a debounced timespan updated message on the message bus
*/
private triggerTimeSpanUpdated() {
this.$messageBusService.publish('timeline', 'timeSpanUpdated', '');
}
/**
* time span was updated by timeline control
*/
public onRangeChanged(prop) {
this.updateFocusTime();
this.throttleTimeSpanUpdate();
}
public start() {
this.stop();
this.isPlaying = true;
if (this.timer) this.timer = null;
this.timer = setInterval(() => { this.myTimer(); }, 500);
}
public goLive() {
this.stop();
this.activeDateRange.isLive = true;
this.isPlaying = false;
if (this.activeDateRange.isLive) {
this.myTimer();
this.start();
}
this.updateDragging();
}
public stopLive() {
if (!this.activeDateRange) return;
this.stop();
this.activeDateRange.isLive = false;
this.isPlaying = false;
this.updateDragging();
}
public myTimer() {
var tl = this.$scope.timeline;
if (this.activeDateRange.isLive) {
var pos = tl._toScreen(new Date());
$('#focustimeContainer').css('left', pos - 65);
if (this.isPinned)
tl.moveTo(new Date(), { animation: { duration: 500, easingFunction: 'linear' } });
this.updateFocusTime();
} else if (this.isPlaying) {
var w = tl.getWindow();
var dif = (w.end.getTime() - w.start.getTime()) / 200;
tl.setWindow(w.start.getTime() + dif, w.end.getTime() + dif, { animation: { duration: 500, easingFunction: 'linear' } });
//tl.move(0.005);
this.updateFocusTime();
}
}
public mouseEnter() {
this.updateFocusTime();
if (!isNaN(this.focusDate.getTime())) {
this.showControl = true;
}
}
public mouseLeave() {
if (!this.isPlaying) this.showControl = false;
}
public pin() {
this.isPinned = true;
}
public unPin() {
this.isPinned = false;
}
public pinToNow() {
this.isPinned = true;
this.start();
}
public stop() {
this.isPlaying = false;
if (this.timer) clearInterval(this.timer);
}
public timelineSelect() {
}
public updateFocusTimeContainer(time: Date) {
this.$scope.timeline.moveTo(time);
this.$scope.timeline.redraw();
if (this.$scope.$$phase !== '$apply' && this.$scope.$$phase !== '$digest') { this.$scope.$apply(); }
let screenPos = this.$scope.timeline._toScreen(time);
$('#focustimeContainer').css('left', screenPos - $('#focustimeContainer').width() / 2);
}
public updateFocusTime() {
if (!this.$layerService.project) return;
//if (!this.$mapService.timelineVisible) return;
setTimeout(() => {
var tl = this.$scope.timeline;
tl.showCustomTime = true;
// typeof this.$layerService.project === 'undefined'
// ? tl.setCustomTime(new Date())
// : tl.setCustomTime(this.$layerService.project.timeLine.focusDate());
//var end = $("#timeline").width;
var range = this.$scope.timeline.getWindow();
//tl.calcConversionFactor();
var pos = $('#focustimeContainer').position().left + $('#focustimeContainer').width() / 2;
if (this.activeDateRange.isLive) {
this.focusDate = new Date();
} else {
this.focusDate = new Date(this.$scope.timeline._toTime(pos));
}
this.startDate = range.start; //new Date(range.start); //this.$scope.timeline.screenToTime(0));
this.endDate = range.end; //new Date(this.$scope.timeline.screenToTime(end));
if (this.activeDateRange != null) {
this.activeDateRange.setFocus(this.focusDate, this.startDate, this.endDate);
this.$layerService.project.timeLine.setFocus(this.focusDate, this.startDate, this.endDate);
var month = (<any>this.focusDate).toLocaleString(this.locale, { month: 'long' });
switch (this.activeDateRange.zoomLevelName) {
case 'decades':
this.line1 = this.focusDate.getFullYear().toString();
this.line2 = '';
break;
case 'years':
this.line1 = this.focusDate.getFullYear().toString();
this.line2 = month;
break;
case 'weeks':
this.line1 = this.focusDate.getFullYear().toString();
this.line2 = moment(this.focusDate).format('DD') + ' ' + month;
break;
case 'milliseconds':
this.line1 = moment(this.focusDate).format('MM - DD - YYYY');
this.line2 = moment(this.focusDate).format('HH:mm:ss.SSS');
break;
default:
this.line1 = moment(this.focusDate).format('MM - DD - YYYY');
this.line2 = moment(this.focusDate).format('HH:mm:ss');
}
}
if (this.$scope.$$phase !== '$apply' && this.$scope.$$phase !== '$digest') { this.$scope.$apply(); }
this.$messageBusService.publish('timeline', 'focusChange', this.focusDate);
tl.setCustomTime(this.focusDate, "1");
}, 0);
//this.$layerService.focusTime = new Date(this.timelineCtrl.screenToTime(centerX));
}
/**
* Load the locales: instead of loading them from the original timeline-locales.js distribution,
* add them here so you don't need to add another js dependency.
* @seealso: http://almende.github.io/chap-links-library/downloads.html
*/
loadLocales() {
if (typeof vis === 'undefined') {
vis = {};
vis.locales = {};
} else if (typeof vis.locales === 'undefined') {
vis.locales = {};
}
// English ===================================================
vis.locales['en'] = {
'MONTHS': ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'],
'MONTHS_SHORT': ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
'DAYS': ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'],
'DAYS_SHORT': ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'],
'ZOOM_IN': 'Zoom in',
'ZOOM_OUT': 'Zoom out',
'MOVE_LEFT': 'Move left',
'MOVE_RIGHT': 'Move right',
'NEW': 'New',
'CREATE_NEW_EVENT': 'Create new event'
};
vis.locales['en_US'] = vis.locales['en'];
vis.locales['en_UK'] = vis.locales['en'];
// French ===================================================
vis.locales['fr'] = {
'MONTHS': ['Janvier', 'Février', 'Mars', 'Avril', 'Mai', 'Juin', 'Juillet', 'Août', 'Septembre', 'Octobre', 'Novembre', 'Décembre'],
'MONTHS_SHORT': ['Jan', 'Fev', 'Mar', 'Avr', 'Mai', 'Jun', 'Jul', 'Aou', 'Sep', 'Oct', 'Nov', 'Dec'],
'DAYS': ['Dimanche', 'Lundi', 'Mardi', 'Mercredi', 'Jeudi', 'Vendredi', 'Samedi'],
'DAYS_SHORT': ['Dim', 'Lun', 'Mar', 'Mer', 'Jeu', 'Ven', 'Sam'],
'ZOOM_IN': 'Zoomer',
'ZOOM_OUT': 'Dézoomer',
'MOVE_LEFT': 'Déplacer à gauche',
'MOVE_RIGHT': 'Déplacer à droite',
'NEW': 'Nouveau',
'CREATE_NEW_EVENT': 'Créer un nouvel évènement'
};
vis.locales['fr_FR'] = vis.locales['fr'];
vis.locales['fr_BE'] = vis.locales['fr'];
vis.locales['fr_CA'] = vis.locales['fr'];
// German ===================================================
vis.locales['de'] = {
'MONTHS': ['Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember'],
'MONTHS_SHORT': ['Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez'],
'DAYS': ['Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag'],
'DAYS_SHORT': ['Son', 'Mon', 'Die', 'Mit', 'Don', 'Fre', 'Sam'],
'ZOOM_IN': 'Vergrößern',
'ZOOM_OUT': 'Verkleinern',
'MOVE_LEFT': 'Nach links verschieben',
'MOVE_RIGHT': 'Nach rechts verschieben',
'NEW': 'Neu',
'CREATE_NEW_EVENT': 'Neues Ereignis erzeugen'
};
vis.locales['de_DE'] = vis.locales['de'];
vis.locales['de_CH'] = vis.locales['de'];
// Dutch =====================================================
vis.locales['nl'] = {
'MONTHS': ['januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september', 'oktober', 'november', 'december'],
'MONTHS_SHORT': ['jan', 'feb', 'mrt', 'apr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec'],
'DAYS': ['zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag'],
'DAYS_SHORT': ['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za'],
'ZOOM_IN': 'Inzoomen',
'ZOOM_OUT': 'Uitzoomen',
'MOVE_LEFT': 'Naar links',
'MOVE_RIGHT': 'Naar rechts',
'NEW': 'Nieuw',
'CREATE_NEW_EVENT': 'Nieuwe gebeurtenis maken'
};
vis.locales['nl_NL'] = vis.locales['nl'];
vis.locales['nl_BE'] = vis.locales['nl'];
}
}
}<|fim▁end|> | contentProperty?: string;
}
/** Interface for every group and timeline item. */ |
<|file_name|>print_needed_variables.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# print_needed_variables.py
#
# Copyright (C) 2014, 2015 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
import os
import sys
if __name__ == '__main__' and __package__ is None:<|fim▁hole|> dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if dir_path != '/usr':
sys.path.insert(1, dir_path)
from kano_profile.badges import load_badge_rules
from kano.utils import write_json, uniqify_list
all_rules = load_badge_rules()
variables_needed = dict()
for category, subcats in all_rules.iteritems():
for subcat, items in subcats.iteritems():
for item, rules in items.iteritems():
targets = rules['targets']
for target in targets:
app = target[0]
variable = target[1]
variables_needed.setdefault(app, list()).append(variable)
for key in variables_needed.iterkeys():
variables_needed[key] = uniqify_list(variables_needed[key])
write_json('variables_needed.json', variables_needed, False)<|fim▁end|> | |
<|file_name|>sample.wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for server_proj project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another<|fim▁hole|>
"""
import os
import site
site.addsitedir('/path/to/spacescout_builds/server_proj/lib/python2.6/site-packages')
site.addsitedir('/path/to/spacescout_builds/server_proj')
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server_proj.settings")
os.environ["DJANGO_SETTINGS_MODULE"] = "server_proj.settings"
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|> | framework. |
<|file_name|>user_blackbox_test.go<|end_file_name|><|fim▁begin|>package controller_test
import (
"context"
"path/filepath"
"testing"
"github.com/fabric8-services/fabric8-wit/app/test"
"github.com/fabric8-services/fabric8-wit/gormtestsupport"
errs "github.com/pkg/errors"
uuid "github.com/satori/go.uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/fabric8-services/fabric8-wit/configuration"
"github.com/fabric8-services/fabric8-wit/controller"
"github.com/fabric8-services/fabric8-wit/resource"
testjwt "github.com/fabric8-services/fabric8-wit/test/jwt"
testrecorder "github.com/fabric8-services/fabric8-wit/test/recorder"
tf "github.com/fabric8-services/fabric8-wit/test/testfixture"
"github.com/goadesign/goa"
"github.com/stretchr/testify/suite"
)
type UserControllerTestSuite struct {
gormtestsupport.DBTestSuite
config controller.UserControllerConfiguration
}
type UserControllerConfig struct {
url string
}
func (t UserControllerConfig) GetAuthServiceURL() string {
return t.url
}
func (t UserControllerConfig) GetAuthShortServiceHostName() string {
return ""
}
func (t UserControllerConfig) GetCacheControlUser() string {
return ""
}
func (t UserControllerConfig) IsAuthorizationEnabled() bool {
return false
}
func TestUserController(t *testing.T) {
resource.Require(t, resource.Database)
suite.Run(t, &UserControllerTestSuite{DBTestSuite: gormtestsupport.NewDBTestSuite()})
}
func (s *UserControllerTestSuite) NewSecuredController(options ...configuration.HTTPClientOption) (*goa.Service, *controller.UserController) {
svc := goa.New("user-controller")
userCtrl := controller.NewUserController(svc, s.GormDB, s.config, options...)
return svc, userCtrl
}
func (s *UserControllerTestSuite) NewUnsecuredController(options ...configuration.HTTPClientOption) (*goa.Service, *controller.UserController) {
svc := goa.New("user-controller")
userCtrl := controller.NewUserController(svc, s.GormDB, s.config, options...)
return svc, userCtrl
}
func (s *UserControllerTestSuite) TestListSpaces() {
s.config = UserControllerConfig{
url: "https://auth",
}
s.T().Run("ok", func(t *testing.T) {
t.Run("user has no role in any space", func(t *testing.T) {
// given
ctx, err := testjwt.NewJWTContext("aa8bffab-c505-40b6-8e87-cd8b0fc1a0c4", "")
require.NoError(t, err)
r, err := testrecorder.New("../test/data/auth/list_spaces",
testrecorder.WithJWTMatcher("../test/jwt/public_key.pem"))
require.NoError(t, err)
defer r.Stop()
svc, userCtrl := s.NewSecuredController(configuration.WithRoundTripper(r.Transport))
// when
_, result := test.ListSpacesUserOK(t, ctx, svc, userCtrl)
// then
require.Empty(t, result.Data)
})
t.Run("user has a role in 1 space", func(t *testing.T) {
// given
tf.NewTestFixture(t, s.DB, tf.Spaces(1, func(fxt *tf.TestFixture, idx int) error {
if idx == 0 {
id, err := uuid.FromString("6c378ed7-67cf-4e09-b099-c25bf8202617")
if err != nil {
return errs.Wrapf(err, "failed to set ID for space in test fixture")
}
fxt.Spaces[idx].ID = id
fxt.Spaces[idx].Name = "space1"
}
return nil
}))
ctx, err := testjwt.NewJWTContext("bcdd0b29-123d-11e8-a8bc-b69930b94f5c", "")
require.NoError(t, err)
r, err := testrecorder.New("../test/data/auth/list_spaces",
testrecorder.WithJWTMatcher("../test/jwt/public_key.pem"))
require.NoError(t, err)
defer r.Stop()
svc, userCtrl := s.NewSecuredController(configuration.WithRoundTripper(r.Transport))
// when
_, result := test.ListSpacesUserOK(t, ctx, svc, userCtrl)
// then
require.Len(t, result.Data, 1)
assert.Equal(t, "space1", result.Data[0].Attributes.Name)
assert.NotNil(t, result.Data[0].Links.Self)
})
t.Run("user has a role in 2 spaces", func(t *testing.T) {
// given
tf.NewTestFixture(t, s.DB, tf.Spaces(2, func(fxt *tf.TestFixture, idx int) error {
if idx == 0 {
id, err := uuid.FromString("6bfa9182-dc81-4bc1-a694-c2e96ec23d3e")
if err != nil {
return errs.Wrapf(err, "failed to set ID for space in test fixture")
}
fxt.Spaces[idx].ID = id
fxt.Spaces[idx].Name = "space1"
} else if idx == 1 {
id, err := uuid.FromString("2423d75d-ae5d-4bc5-818b-8e3fa4e2167c")
if err != nil {
return errs.Wrapf(err, "failed to set ID for space in test fixture")
}
fxt.Spaces[idx].ID = id
fxt.Spaces[idx].Name = "space2"
}
return nil
}))
ctx, err := testjwt.NewJWTContext("83fdcae2-634f-4a52-958a-f723cb621700", "")
require.NoError(t, err)
r, err := testrecorder.New("../test/data/auth/list_spaces",
testrecorder.WithJWTMatcher("../test/jwt/public_key.pem"))
require.NoError(t, err)
defer r.Stop()
svc, userCtrl := s.NewSecuredController(configuration.WithRoundTripper(r.Transport))
// when
_, result := test.ListSpacesUserOK(t, ctx, svc, userCtrl)
// then
compareWithGoldenAgnostic(t, filepath.Join("test-files", "endpoints", "listspaces", "ok.res.payload.golden.json"), result)<|fim▁hole|> s.T().Run("unauthorized", func(t *testing.T) {
t.Run("missing token", func(t *testing.T) {
// given
ctx := context.Background()
r, err := testrecorder.New("../test/data/auth/list_spaces",
testrecorder.WithJWTMatcher("../test/jwt/public_key.pem"))
require.NoError(t, err)
defer r.Stop()
svc, userCtrl := s.NewUnsecuredController(configuration.WithRoundTripper(r.Transport))
// when/then
test.ListSpacesUserUnauthorized(t, ctx, svc, userCtrl)
})
})
}<|fim▁end|> | })
})
|
<|file_name|>Ekin_map.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Protein Engineering Analysis Tool DataBase (PEATDB)
# Copyright (C) 2010 Damien Farrell & Jens Erik Nielsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Contact information:
# Email: Jens.Nielsen_at_gmail.com
# Normal mail:
# Jens Nielsen
# SBBS, Conway Institute
# University College Dublin
# Dublin 4, Ireland
#
from Tkinter import *
import Pmw
import os
import numpy
class Ekin_map_annotate:
def map_datatab2structure(self):
"""If the PEATDB record has a structure, then we allow the user to map each datatab
to a specific part of the protein.
One can map a datatab to an atom, a residue, a chain, or define a structural group and map to it"""
if not self.parent:
import tkMessageBox
tkMessageBox.showinfo("No PEAT",
"This option is only available when Ekin is started from PEAT",
parent=self.ekin_win)
return
#
# Do we have a record name
#
if not self.protein:
import tkMessageBox
tkMessageBox.showinfo("No PEAT record",
"This option is only available when Ekin has been started by clicking a PEAT record",
parent=self.ekin_win)
return
#
# Is there a structure?
#
error=None
if not self.parent.data.has_key('DBinstance'):
error=1
else:
DB=self.parent.data['DBinstance'].DB
if not DB[self.protein].has_key('Structure'):
error=1
else:
print 'Trying to get PDB'
self.pdblines,X=self.parent.get_structure(self.protein,'Structure')
if not self.pdblines:
error=1
if error:
import tkMessageBox
tkMessageBox.showinfo("No Structure in PEAT",
"This option is only available when the PEAT record has a structure",
parent=self.ekin_win)
return
#
# Open the mapping window
#
mapper_win=Toplevel()
mapper_win.title('Map datatab to structure. %s - %s' %(self.protein,self.field))
self.set_geometry(self.ekin_win,mapper_win)
#
# Mapping Manager
#
row=0
Label(mapper_win,text='Mapping Manager',bg='lightblue').grid(row=row,column=0,columnspan=3,sticky='news')
row=row+1
Label(mapper_win,textvariable=self.currentdataset.get()).grid(row=row,column=0,columnspan=3,sticky='news')
#
# Headers
#
#row=row+1
#Label(mapper_win,text='Structural group type').grid(row=row,column=0,sticky='news')
#Label(mapper_win,text='Structural element').grid(row=row,column=1,sticky='news')
#Label(mapper_win,text='Datatab property').grid(row=row,column=2,sticky='news')
#
# Structural groupings for this protein
#
#if not DB[self.protein].has_key('structgroups'):
# DB[self.protein]['structgroups']={}
#structgroups=DB[self.protein]['structgroups'].keys()
#
# Load the residue definitions
#
import Protool.mutate
self.M_instance=Protool.mutate.Mutate(onlydefs=1)
self.AAdefs=self.M_instance.aadefs
#
# Struct group types
#
row=row+1
listbox_height=5
self.group_type_box = Pmw.ScrolledListBox(mapper_win,
items=['Residues','Atoms','Titratable groups'],
labelpos='nw',
label_text='Group type',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100,
selectioncommand=self.update_elements)
self.group_type_box.grid(row=row,column=0,columnspan=1,sticky='news')
self.group_type_box.configure(listbox_bg='white')
self.group_type_box.configure(listbox_selectmode='single')
self.group_type_box.configure(listbox_exportselection=0)
#
#
# Dropdown list of elements of each structgroup type
#
self.group_elements_box = Pmw.ScrolledListBox(mapper_win,
items=[],
labelpos='nw',
label_text='Group Elements',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100)
self.group_elements_box.grid(row=row,column=1,columnspan=1,sticky='news')
self.group_elements_box.configure(listbox_bg='white')
self.group_elements_box.configure(listbox_selectmode='extended')
self.group_elements_box.configure(listbox_exportselection=0)
# Parameters that we can map to structgroups
import Fitter
self.FIT=Fitter.FITTER('1 pKa 2 Chemical shifts',self)
self.dataprops=['Data source']+self.FIT.parameter_names
self.data_prop_box = Pmw.ScrolledListBox(mapper_win,
items=self.dataprops,
labelpos='nw',
label_text='Data properties',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100)
self.data_prop_box.grid(row=row,column=2,columnspan=1,sticky='news')
self.data_prop_box.configure(listbox_bg='white')
self.data_prop_box.configure(listbox_selectmode='extended')
self.data_prop_box.configure(listbox_exportselection=0)
#
# List of existing mappings
#
row=row+1
datatab=self.currentdataset.get()
print 'Loading this datatab in mapper',datatab
mappings=self.get_structmappings(datatab)
self.mapping_box = Pmw.ScrolledListBox(mapper_win,
items=mappings,
labelpos='nw',
label_text='Existing mappings',
listbox_height = 6,
usehullsize = 1,
hull_width = 200,
hull_height = 200)
self.mapping_box.grid(row=row,column=0,columnspan=3,sticky='news')
self.mapping_box.configure(listbox_selectmode='single')
self.mapping_box.configure(listbox_bg='white')
#
# Buttons
#
row=row+1
Button(mapper_win,text='Create mapping',bg='lightgreen',borderwidth=2, relief=GROOVE, command=self.create_mapping).grid(row=row,column=0,sticky='news',padx=2,pady=2)
Button(mapper_win,text='Delete mapping',bg='yellow',borderwidth=2, relief=GROOVE, command=self.delete_mapping).grid(row=row,column=1,sticky='news',padx=2,pady=2)
Button(mapper_win,text='Export',bg='#CFECEC',borderwidth=2, relief=GROOVE, command=self.export_dialog).grid(row=row,column=2,sticky='news',padx=2,pady=2)
row=row+1
Button(mapper_win,text='Close',borderwidth=2, relief=GROOVE,command=self.close_mapper_window).grid(row=row,column=1,columnspan=2,sticky='news',padx=2,pady=2)
#
# Structural group manager
#
#row=row+1
#Label(mapper_win,text='Structural Group Manager',bg='lightblue').grid(row=row,column=0,columnspan=3,sticky='news')
#import os, sys
#PEAT_dir=os.path.split(__file__)[0]
#sys.path.append(PEAT_dir)
#import protein_selector
#row=row+1
#SEL=protein_selector.select_residue(mapper_win,self.pdblines)
#SEL.box.grid(row=row,column=0)
##
#row=row+1
#Label(mapper_win,text='Atoms').grid(row=row,column=1)
#row=row+1
#Button(mapper_win,text='Create new structural grouping',command=self.create_new_structgroup).grid(row=row,column=0)
#Button(mapper_win,text='Add to structural grouping',command=self.add_to_structgroup).grid(row=row,column=1)
#Button(mapper_win,text='Close',command=mapper_win.destroy).grid(row=row,column=2,sticky='news')
mapper_win.rowconfigure(2,weight=1)
self.mapper_win=mapper_win
self.mapper_win.transient(master=self.ekin_win)
return
#
# ----
#
def close_mapper_window(self):
"""Close the mapping window and delete references to it"""
self.mapper_win.destroy()
if hasattr(self,"mapper_win"):
delattr(self,"mapper_win")
return
#
# ----
#
def update_elements(self):
"""Insert a new dropdown list for the element"""
#
# Get the group type
#
elements=None
group_type=self.group_type_box.getcurselection()[0]
import Protool
if group_type=='Residues':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
residues=P.residues.keys()
residues.sort()
elements=[]
for res in residues:
elements.append('%s %s' %(res,P.resname(res)))
elif group_type=='Atoms':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
atoms=P.atoms.keys()
for res in P.residues.keys():
resname=P.resname(res)
if self.AAdefs.has_key(resname):
defatoms=self.AAdefs[resname]['atoms']
#print defatoms
for defatom,coord,dummy in defatoms:
atom_name='%s:%s' %(res,defatom)
if not P.atoms.has_key(atom_name):
atoms.append(atom_name)
#print 'Adding',atom_name
atoms.sort()
elements=[]
for at in atoms:
elements.append(at)
elif group_type=='Titratable groups':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
P.get_titratable_groups()
titgrps=P.titratable_groups.keys()
titgrps.sort()
elements=[]
for res in titgrps:
for titgrp in P.titratable_groups[res]:
name='%s %s' %(res,titgrp['name'])
elements.append(name)
else:
print 'Unkown group type',group_type
#
# Make the new dropdown list
#
if elements:
self.group_elements_box.setlist(elements)
return
#
# -----
#
def create_mapping(self):
"""Create the mapping"""
g_type=self.group_type_box.getcurselection()
if len(g_type)==0:
return
g_type=g_type[0]
g_elements=self.group_elements_box.getcurselection()
props=self.data_prop_box.getcurselection()
#
if not getattr(self,'structmappings',None):
self.structmappings={}
datatab=self.currentdataset.get()
if not self.structmappings.has_key(datatab):
self.structmappings[datatab]={}
#
# Get the dict of current mappings
#
curmappings=self.structmappings[datatab]
map_keys=curmappings.keys()
map_keys.sort()
#
# Get the number of the last mapping
#
last_num=0
if len(map_keys)>0:
last_num=map_keys[-1]
#
# Add the new mapping
#
if props and g_elements and g_type:
self.structmappings[datatab][last_num+1]={'Group type':g_type,'Group elements':g_elements,'Data property':props}
#
# Display the updated list of mappings
#
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
return
#
# ----
#
def get_structmappings(self,datatab):
"""Get a printable list of structural mappings for this datatab"""
if not getattr(self,'structmappings',None):
return []
if self.structmappings.has_key(datatab):
map_keys=self.structmappings[datatab].keys()
map_keys.sort()
mappings=[]
for map_key in map_keys:
thismap=self.structmappings[datatab][map_key]
mappings.append('%2d: %s mapped to type "%s" elements %s' %(map_key,thismap['Data property'],thismap['Group type'],thismap['Group elements']))
else:
mappings=[]
return mappings
#
# -----
#
def delete_mapping(self):
"""Delete a structmapping"""
delete=self.mapping_box.getcurselection()
if len(delete)==0:
print 'length is zero'
return
delete=str(delete[0])
number=int(delete.split(':')[0])
print 'NUMBER',number
datatab=self.currentdataset.get()
print self.structmappings.keys()
if self.structmappings.has_key(datatab):
if self.structmappings[datatab].has_key(number):
del self.structmappings[datatab][number]
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
return
#
# -----
#
def update_mapping_window(self):
"""Update the mapping window when we change datatabs"""
#
# Update list of current mappings
#
datatab=self.currentdataset.get()
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
#
# Update List of parameters
#
dataprops=['Data source']+self.FIT.parameter_names
self.data_prop_box.setlist(dataprops)
return
def get_assigned(self):
"""Get all unique assigned elements from the mapping dict"""
if not getattr(self,'structmappings',None):
return []
assigned=[]
for key in self.structmappings.keys():
for val in self.structmappings[key].keys():
elements=self.structmappings[key][val]['Group elements']
for e in elements:
if not e in assigned:
assigned.append(e)
return assigned
#
# -----
#
def export_dialog(self):
if hasattr(self, 'export_win'):
if self.export_win != None :
self.export_win.deiconify()
return
self.export_win=Toplevel()
self.export_win.title('Export mappings')
self.set_geometry(self.ekin_win,self.export_win)
#self.setgeometry(self.ekin_win,self.export_win)
self.grouptype = StringVar() #group type
grptypes=['Residues','Atoms','Titratable groups','Any']
self.grouptype.set(grptypes[0])
self.assignedto = StringVar() #titratable group assigned
#self.expdataprops=['Data source']+self.FIT.parameter_names
self.expdataprops=['Data source','pK','span','offset']
self.dataprop = StringVar() #required property
self.dataprop.set(self.expdataprops[0])
elements=self.get_assigned()
elements.append('All')
elements.sort()
self.assignedto.set(elements[0])
row=0
help=Label(self.export_win,text='Use the list of currently assigned mappings to select\n'
+'an assigned residue/element from.\n'
+'A file will be created for the chosen group element',
bg='#CFECEC' )
help.grid(row=row,column=0,columnspan=2,sticky='news',padx=2,pady=2)
row=1
#drop down labels for grp element, data property and assignedto
Label(self.export_win,text='Assigned:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
w = OptionMenu(self.export_win, self.assignedto, *elements)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)
'''row=row+1
Label(self.export_win,text='group type:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
w = OptionMenu(self.export_win, self.grouptype, *grptypes)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)'''
row=row+1
Label(self.export_win,text='data property:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
print self.dataprops
w = OptionMenu(self.export_win, self.dataprop, *self.expdataprops)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)
row=row+1
Button(self.export_win,text='Cancel',bg='#CFECEC',borderwidth=2, relief=GROOVE, width=10,
command=self.close_exp_dialog).grid(row=row,column=0,sticky='news',padx=2,pady=2)
Button(self.export_win,text='Go',bg='#CFECEC',borderwidth=2, relief=GROOVE, width=10,
command=self.export_as_csv).grid(row=row,column=1,sticky='news',padx=2,pady=2)
return
def close_exp_dialog(self):
if hasattr(self,'export_win'):
self.export_win.destroy()
self.export_win=None
return
def choose_savedir(self):
"""Get a directory to save to"""
import tkFileDialog, os
if self.defaultsavedir == None:
self.defaultsavedir = os.getcwd()
dirname=tkFileDialog.askdirectory(parent=self.export_win,
initialdir=self.defaultsavedir)
if not dirname:
print 'Returning'
return NoneType
return dirname
#
# -----
#
def export_as_csv(self):
"""export struct mapping for specific filters as csv"""
#prompt user for save dir
savedir = self.choose_savedir()
if savedir==None:
return
if self.currplatform == 'Windows':
print 'using windows'
import List_Utils
#sub function for tidiness
def getexplist(assignedto):
reslist={}
reskeys=[]
for key in self.structmappings.keys():
for n in self.structmappings[key].keys():
#check if any dataprop list element contains the key eg 'pK' in pK1, pK2 etc..
datapropkey = List_Utils.elements_contain(self.structmappings[key][n]['Data property'], self.dataprop.get())
if datapropkey != None:
#try to extract the value from the ekin dataset
val = self.get_dataprop_value(key, datapropkey)
print 'found ',val,' for ', datapropkey
#print 'val: ', val
#iterate over group elements list
elements=self.structmappings[key][n]['Group elements']
for e in elements:
if assignedto in e:
reslist[key] = ([key,val])
reskeys.append(key)
if len(reslist.keys())==0:
return
#write the list to a csv file, first add heading
import string
#remove whitespace
name=string.join(assignedto.split(), '')
name=name.replace(':', '')
if self.currplatform == 'Windows':
filename = savedir+'/'+name+'.csv'
else:
filename = os.path.join(savedir, name+'.csv')
print filename
writer = open(filename, "wb")
writer.write(assignedto+'\n')
import csv
csvwriter = csv.writer(open(filename, "a"))
keyssorted = self.sort_by_Num(reskeys)
#print reslist
#print keyssorted
p=[];names=[]
#use key sorted mapping to list residues by number
for item in keyssorted:
k=item[1]
csvwriter.writerow(reslist[k])
p.append(reslist[k][1])
names.append(k)
writer.close()
#do a plot and save to same dir as file
try:
import pylab
except:
return
f=pylab.figure(figsize=(10,4))
pylab.rc("font", family='serif')
a=f.add_subplot(111)
ind=numpy.arange(len(names))
a.bar(ind, p , linewidth=0.5)
a.set_xticks(ind)
a.set_ylabel(self.dataprop.get())
a.set_title(name+' assignments')
a.set_xticklabels(names, rotation='vertical', size=5)
f.savefig(savedir+'/'+name+'.png',dpi=300)
return
if self.assignedto.get() == 'All':
for a in self.get_assigned():
getexplist(a)
else:
getexplist(self.assignedto.get())
self.close_exp_dialog()
return
#
# -----
#
def get_dataprop_value(self, key, dataprop):
"""Annoying but necessary helper func to get value of assigned property
from the ekin fit data"""
tabnum = self.currentdataset.get()
if self.fitter_data.has_key(key):
fitdata = self.fitter_data[key]
else:
return None
model = fitdata['model']
#extracts index number from fit model field name
i = self.FIT.get_param_index(dataprop, model)
print tabnum, key
print fitdata, dataprop, i
if i!=None:
val = fitdata[i]
return val
#<|fim▁hole|> def create_new_structgroup(self):
return
#
# ------
#
def add_to_structgroup(self):
return
def sort_by_Num(self, p):
"""Sort text keys by contained numbers - should be put in utils class"""
splitkeys={}
import re
r=re.compile('\D')
for k in p:
splitkeys[k]=int(r.split(k)[1])
items = splitkeys.items()
items = [(v, k) for (k, v) in items]
items.sort()
return items<|fim▁end|> | # -----
# |
<|file_name|>virtual-scroll-viewport.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ListRange} from '@angular/cdk/collections';
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
Inject,
Input,
NgZone,
OnDestroy,
OnInit,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {DomSanitizer, SafeStyle} from '@angular/platform-browser';
import {animationFrameScheduler, fromEvent, Observable, Subject} from 'rxjs';
import {sampleTime, take, takeUntil} from 'rxjs/operators';
import {CdkVirtualForOf} from './virtual-for-of';
import {VIRTUAL_SCROLL_STRATEGY, VirtualScrollStrategy} from './virtual-scroll-strategy';
/** Checks if the given ranges are equal. */
function rangesEqual(r1: ListRange, r2: ListRange): boolean {
return r1.start == r2.start && r1.end == r2.end;
}
/** A viewport that virtualizes it's scrolling with the help of `CdkVirtualForOf`. */
@Component({
moduleId: module.id,
selector: 'cdk-virtual-scroll-viewport',
templateUrl: 'virtual-scroll-viewport.html',
styleUrls: ['virtual-scroll-viewport.css'],
host: {
'class': 'cdk-virtual-scroll-viewport',
'[class.cdk-virtual-scroll-orientation-horizontal]': 'orientation === "horizontal"',
'[class.cdk-virtual-scroll-orientation-vertical]': 'orientation === "vertical"',
},
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class CdkVirtualScrollViewport implements OnInit, OnDestroy {
/** Emits when the viewport is detached from a CdkVirtualForOf. */
private _detachedSubject = new Subject<void>();
/** Emits when the rendered range changes. */
private _renderedRangeSubject = new Subject<ListRange>();
/** The direction the viewport scrolls. */
@Input() orientation: 'horizontal' | 'vertical' = 'vertical';
/** The element that wraps the rendered content. */
@ViewChild('contentWrapper') _contentWrapper: ElementRef;
/** A stream that emits whenever the rendered range changes. */
renderedRangeStream: Observable<ListRange> = this._renderedRangeSubject.asObservable();
/**
* The total size of all content (in pixels), including content that is not currently rendered.
*/
_totalContentSize = 0;
/** The transform used to offset the rendered content wrapper element. */
_renderedContentTransform: SafeStyle;
/** The raw string version of the rendered content transform. */
private _rawRenderedContentTransform: string;
/** The currently rendered range of indices. */
private _renderedRange: ListRange = {start: 0, end: 0};
/** The length of the data bound to this viewport (in number of items). */
private _dataLength = 0;
/** The size of the viewport (in pixels). */
private _viewportSize = 0;
/** The pending scroll offset to be applied during the next change detection cycle. */
private _pendingScrollOffset: number | null;
/** the currently attached CdkVirtualForOf. */
private _forOf: CdkVirtualForOf<any> | null;
/** The last rendered content offset that was set. */
private _renderedContentOffset = 0;
/**
* Whether the last rendered content offset was to the end of the content (and therefore needs to
* be rewritten as an offset to the start of the content).
*/
private _renderedContentOffsetNeedsRewrite = false;
/** Observable that emits when the viewport is destroyed. */
private _destroyed = new Subject<void>();
/** Whether there is a pending change detection cycle. */
private _isChangeDetectionPending = false;
/** A list of functions to run after the next change detection cycle. */
private _runAfterChangeDetection: Function[] = [];
constructor(public elementRef: ElementRef, private _changeDetectorRef: ChangeDetectorRef,
private _ngZone: NgZone, private _sanitizer: DomSanitizer,
@Inject(VIRTUAL_SCROLL_STRATEGY) private _scrollStrategy: VirtualScrollStrategy) {}
ngOnInit() {
// It's still too early to measure the viewport at this point. Deferring with a promise allows
// the Viewport to be rendered with the correct size before we measure. We run this outside the
// zone to avoid causing more change detection cycles. We handle the change detection loop
// ourselves instead.
this._ngZone.runOutsideAngular(() => Promise.resolve().then(() => {
this._measureViewportSize();
this._scrollStrategy.attach(this);
fromEvent(this.elementRef.nativeElement, 'scroll')
// Sample the scroll stream at every animation frame. This way if there are multiple
// scroll events in the same frame we only need to recheck our layout once.
.pipe(sampleTime(0, animationFrameScheduler), takeUntil(this._destroyed))
.subscribe(() => this._scrollStrategy.onContentScrolled());
this._markChangeDetectionNeeded();
}));
}
ngOnDestroy() {
this.detach();
this._scrollStrategy.detach();
this._destroyed.next();
// Complete all subjects
this._renderedRangeSubject.complete();
this._detachedSubject.complete();
this._destroyed.complete();
}
/** Attaches a `CdkVirtualForOf` to this viewport. */
attach(forOf: CdkVirtualForOf<any>) {
if (this._forOf) {
throw Error('CdkVirtualScrollViewport is already attached.');
}
// Subscribe to the data stream of the CdkVirtualForOf to keep track of when the data length
// changes. Run outside the zone to avoid triggering change detection, since we're managing the
// change detection loop ourselves.
this._ngZone.runOutsideAngular(() => {
this._forOf = forOf;
this._forOf.dataStream.pipe(takeUntil(this._detachedSubject)).subscribe(data => {
const newLength = data.length;
if (newLength !== this._dataLength) {
this._dataLength = newLength;
this._scrollStrategy.onDataLengthChanged();
}
});
});
}
/** Detaches the current `CdkVirtualForOf`. */
detach() {
this._forOf = null;
this._detachedSubject.next();
}
/** Gets the length of the data bound to this viewport (in number of items). */
getDataLength(): number {
return this._dataLength;
}
/** Gets the size of the viewport (in pixels). */
getViewportSize(): number {
return this._viewportSize;
}
// TODO(mmalerba): This is technically out of sync with what's really rendered until a render
// cycle happens. I'm being careful to only call it after the render cycle is complete and before
// setting it to something else, but its error prone and should probably be split into
// `pendingRange` and `renderedRange`, the latter reflecting whats actually in the DOM.
/** Get the current rendered range of items. */
getRenderedRange(): ListRange {
return this._renderedRange;
}
/**
* Sets the total size of all content (in pixels), including content that is not currently
* rendered.
*/
setTotalContentSize(size: number) {
if (this._totalContentSize !== size) {
this._totalContentSize = size;
this._markChangeDetectionNeeded();
}
}
/** Sets the currently rendered range of indices. */
setRenderedRange(range: ListRange) {
if (!rangesEqual(this._renderedRange, range)) {
this._renderedRangeSubject.next(this._renderedRange = range);
this._markChangeDetectionNeeded(() => this._scrollStrategy.onContentRendered());
}
}
/**
* Gets the offset from the start of the viewport to the start of the rendered data (in pixels).
*/
getOffsetToRenderedContentStart(): number | null {
return this._renderedContentOffsetNeedsRewrite ? null : this._renderedContentOffset;
}
/**
* Sets the offset from the start of the viewport to either the start or end of the rendered data
* (in pixels).
*/
setRenderedContentOffset(offset: number, to: 'to-start' | 'to-end' = 'to-start') {
const axis = this.orientation === 'horizontal' ? 'X' : 'Y';
let transform = `translate${axis}(${Number(offset)}px)`;
this._renderedContentOffset = offset;
if (to === 'to-end') {
// TODO(mmalerba): The viewport should rewrite this as a `to-start` offset on the next render
// cycle. Otherwise elements will appear to expand in the wrong direction (e.g.
// `mat-expansion-panel` would expand upward).
transform += ` translate${axis}(-100%)`;
this._renderedContentOffsetNeedsRewrite = true;
}
if (this._rawRenderedContentTransform != transform) {
// We know this value is safe because we parse `offset` with `Number()` before passing it
// into the string.<|fim▁hole|> if (this._renderedContentOffsetNeedsRewrite) {
this._renderedContentOffset -= this.measureRenderedContentSize();
this._renderedContentOffsetNeedsRewrite = false;
this.setRenderedContentOffset(this._renderedContentOffset);
} else {
this._scrollStrategy.onRenderedOffsetChanged();
}
});
}
}
/** Sets the scroll offset on the viewport. */
setScrollOffset(offset: number) {
// Rather than setting the offset immediately, we batch it up to be applied along with other DOM
// writes during the next change detection cycle.
this._pendingScrollOffset = offset;
this._markChangeDetectionNeeded();
}
/** Gets the current scroll offset of the viewport (in pixels). */
measureScrollOffset(): number {
return this.orientation === 'horizontal' ?
this.elementRef.nativeElement.scrollLeft : this.elementRef.nativeElement.scrollTop;
}
/** Measure the combined size of all of the rendered items. */
measureRenderedContentSize(): number {
const contentEl = this._contentWrapper.nativeElement;
return this.orientation === 'horizontal' ? contentEl.offsetWidth : contentEl.offsetHeight;
}
/**
* Measure the total combined size of the given range. Throws if the range includes items that are
* not rendered.
*/
measureRangeSize(range: ListRange): number {
if (!this._forOf) {
return 0;
}
return this._forOf.measureRangeSize(range, this.orientation);
}
/** Update the viewport dimensions and re-render. */
checkViewportSize() {
// TODO: Cleanup later when add logic for handling content resize
this._measureViewportSize();
this._scrollStrategy.onDataLengthChanged();
}
/** Measure the viewport size. */
private _measureViewportSize() {
const viewportEl = this.elementRef.nativeElement;
this._viewportSize = this.orientation === 'horizontal' ?
viewportEl.clientWidth : viewportEl.clientHeight;
}
/** Queue up change detection to run. */
private _markChangeDetectionNeeded(runAfter?: Function) {
if (runAfter) {
this._runAfterChangeDetection.push(runAfter);
}
// Use a Promise to batch together calls to `_doChangeDetection`. This way if we set a bunch of
// properties sequentially we only have to run `_doChangeDetection` once at the end.
if (!this._isChangeDetectionPending) {
this._isChangeDetectionPending = true;
this._ngZone.runOutsideAngular(() => Promise.resolve().then(() => {
if (this._ngZone.isStable) {
this._doChangeDetection();
} else {
this._ngZone.onStable.pipe(take(1)).subscribe(() => this._doChangeDetection());
}
}));
}
}
/** Run change detection. */
private _doChangeDetection() {
this._isChangeDetectionPending = false;
// Apply changes to Angular bindings.
this._ngZone.run(() => this._changeDetectorRef.detectChanges());
// Apply the pending scroll offset separately, since it can't be set up as an Angular binding.
if (this._pendingScrollOffset != null) {
if (this.orientation === 'horizontal') {
this.elementRef.nativeElement.scrollLeft = this._pendingScrollOffset;
} else {
this.elementRef.nativeElement.scrollTop = this._pendingScrollOffset;
}
}
for (let fn of this._runAfterChangeDetection) {
fn();
}
this._runAfterChangeDetection = [];
}
}<|fim▁end|> | this._rawRenderedContentTransform = transform;
this._renderedContentTransform = this._sanitizer.bypassSecurityTrustStyle(transform);
this._markChangeDetectionNeeded(() => { |
<|file_name|>Sbs.py<|end_file_name|><|fim▁begin|>class Sbs:
def __init__(self, sbsFilename, sbc_filename, newSbsFilename):
import xml.etree.ElementTree as ET
import Sbc
self.mySbc = Sbc.Sbc(sbc_filename)
self.sbsTree = ET.parse(sbsFilename)
self.sbsRoot = self.sbsTree.getroot()
self.XSI_TYPE = "{http://www.w3.org/2001/XMLSchema-instance}type"
self.newSbsFilename = newSbsFilename
def findPlayerBySteamID(self, steam_id):
if (steam_id == 0):
return False
print("looking for player with steamID of %s" % steam_id)
ourPlayerDict = self.mySbc.getPlayerDict()
for player in ourPlayerDict:
# print playerDict[player]['steamID']
if ourPlayerDict[player]['steamID'] == steam_id:
return ourPlayerDict[player]
# if we don't find the user
return False
def giveReward(self, rewardOwner, rewardType, rewardAmount):
"""
This method will hunt down the first cargo container owned by
<Owner> matching their ingame ID, and with with "CustomName"
of "LOOT" and place the rewards in it
"""
import xml.etree.ElementTree as ET
print("trying to give %s %s units of %s" % (rewardOwner, rewardAmount, rewardType))
for sectorObjects in self.sbsRoot.iter('SectorObjects'):
for entityBase in sectorObjects.iter('MyObjectBuilder_EntityBase'):
# EntityId = entityBase.find('EntityId')
# print ("checking entityID %s" % EntityId.text)
gridSize = entityBase.find('GridSizeEnum')
# TODO+: some kind of warning if we have a reward to give, but can't find this user's LOOT container
if hasattr(gridSize, 'text'):
cubeBlocks = entityBase.find('CubeBlocks')
for myCubeBlock in cubeBlocks.iter('MyObjectBuilder_CubeBlock'):
owner = myCubeBlock.find("Owner")
EntityId = myCubeBlock.find('EntityId')
customName = myCubeBlock.find('CustomName')
if hasattr(owner, 'text') and owner.text == rewardOwner and myCubeBlock.get(self.XSI_TYPE) == "MyObjectBuilder_CargoContainer" and hasattr(customName, 'text'):
if "LOOT" in customName.text:
print("I found a cargo container owned by %s with entityID of %s and name of %s" % (owner.text, EntityId.text, customName.text))
componentContainer = myCubeBlock.find('ComponentContainer')
components = componentContainer.find('Components')
componentData = components.find('ComponentData')
component = componentData.find('Component')
items = component.find('Items')
itemCount = 0
for myInventoryItems in items.iter('MyObjectBuilder_InventoryItem'):
itemCount += 1
print("planning to add %s of %s into it as item %s" % (rewardAmount, rewardType, itemCount))
# <MyObjectBuilder_InventoryItem>
# <Amount>200</Amount>
# <PhysicalContent xsi:type="MyObjectBuilder_Ore">
# <SubtypeName>Uranium</SubtypeName> ## from rewardType
# </PhysicalContent>
# <ItemId>4</ItemId> ## from itemCount
# <AmountDecimal>200</AmountDecimal> ## from rewardAmount
# </MyObjectBuilder_InventoryItem>
# myCubeBlock.append((ET.fromstring('<MyObjectBuilder_InventoryItem><Amount>123456789</Amount></MyObjectBuilder_InventoryItem>')))
inventoryItem = ET.SubElement(items, 'MyObjectBuilder_InventoryItem')
amount = ET.SubElement(inventoryItem, 'Amount')
amount.text = str(rewardAmount)
physicalContent = ET.SubElement(inventoryItem, 'PhysicalContent')
physicalContent.set(self.XSI_TYPE, 'MyObjectBuilder_Ore')
subtypeName = ET.SubElement(physicalContent, 'SubtypeName')
subtypeName.text = rewardType
itemId = ET.SubElement(inventoryItem, 'ItemId')
itemId.text = str(itemCount)
amountDecimal = ET.SubElement(inventoryItem, 'AmountDecimal')
amountDecimal.text = str(rewardAmount)
nextItemId = component.find('nextItemId')
nextItemId.text = str(itemCount + 1)
# FIXME: this makes a mess of the html, figure out a way to clean it up?
def removeFloaters(self):
import xml.etree.ElementTree as ET
removedCount = 0
warnCount = 0
for sectorObjects in self.sbsRoot.iter('SectorObjects'):
for entityBase in sectorObjects.iter('MyObjectBuilder_EntityBase'):
cubeGridID = entityBase.find('EntityId')
gridSizeEnum = entityBase.find('GridSizeEnum')
objectType = entityBase.get(self.XSI_TYPE)
isStatic = entityBase.find('IsStatic') # FIXME: this does not do what I thought it did. Tested with simple station, and it isn't set as static when I build it from scratch.
# TODO: only way I can see to easily fix is check for <Forward x="-0" y="-0" z="-1" /> for static things
# print cubeGridID.text if hasattr(cubeGridID, 'text') else 'not defined'
if hasattr(cubeGridID, 'text'):
print("Grid EntityID: %s " % cubeGridID.text)
else:
print("FIXME: no gridID")
# print ("\t is objectType %s" % objectType )
if hasattr(isStatic, 'text'):
# this is a base, all of our checks are null and void. Bases don't float or cost me CPU
print("\t skipping trash checks because this IsStatic")
continue
if hasattr(gridSizeEnum, 'text'):
# is a grid, small or large
gridName = entityBase.find('DisplayName').text
print("\t is a grid size %s %s" % (gridSizeEnum.text, gridName))
# if the name contains DEL.WRN
if "[DEL.WRN]" in gridName:
print("\t ALREADY HAD DEL.WRN in the NAME, GOODBYE")
sectorObjects.remove(entityBase)
removedCount += 1
else:
# it doesn't have a DEL WRN yet, lets check for our rules
# TODO: look through the whole entityBase for 6 thrusters, a power supply, and at least one block not owned by pirates
thrusterCount = 0
powerSource = 0
controlSurface = 0
gyroCount = 0
turretCount = 0
ownerCount = 0
ownedThings = 0
ownerList = []
cubeBlocks = entityBase.find('CubeBlocks')
for myCubeBlock in cubeBlocks.iter('MyObjectBuilder_CubeBlock'):
owner = myCubeBlock.find("Owner")
# subtype = myCubeBlock.find('SubtypeName')
cubeType = myCubeBlock.get(self.XSI_TYPE)
entityID = myCubeBlock.find("EntityId")
# print ("\t\tTODO: cubeType of: %s" % cubeType)
if "Thrust" in cubeType:
thrusterCount += 1
elif "Cockpit" in cubeType:
controlSurface += 1
elif "Reactor" in cubeType:
powerSource += 1
elif "SolarPanel" in cubeType:
powerSource += 1
elif "RemoteControl" in cubeType:
controlSurface += 1
elif "Gyro" in cubeType:
gyroCount += 1
elif "Turret" in cubeType:
turretCount += 1
if hasattr(owner, 'text'):
# print ("\tOwner: %s" % owner.text)
if owner.text not in ownerList:
ownerList.append(owner.text)
ownerCount += 1
ownedThings += 1 # TODO: this is how many blocks have an owner, above is distinct owners of this grid
print("\t totals: %s %s %s %s %s %s %s" % (thrusterCount, powerSource, controlSurface, gyroCount, turretCount, ownerCount, len(ownerList)))
# TODO: if it fails all my tests,
# [CHECK] set name to [DEL.WRN]
# set ShowOnHUD to True ## can't, this is per cube. Ignore this.
if (thrusterCount < 6 or controlSurface < 1 or powerSource < 1 or gyroCount < 1 or ownerCount < 1):
print("\tWARNING: THIS GRID IS DUE TO DELETE")
gridNameToUpdate = entityBase.find('DisplayName')
gridNameToUpdate.text = "[DEL.WRN]" + gridNameToUpdate.text
print("\tname is now: %s" % gridNameToUpdate.text)
warnCount += 1
for myCubeBlock in cubeBlocks.iter('MyObjectBuilder_CubeBlock'):
# set all DeformationRatio to 1 (right up under owner) <DeformationRatio>0.5</DeformationRatio>
deformationElement = ET.SubElement(myCubeBlock, "DeformationRatio")
deformationElement.text = ".77"
# myCubeBlock.append('DeformationRatio', '.77')
else:
if (objectType == "MyObjectBuilder_FloatingObject"):
print("\t GOODBYE")
sectorObjects.remove(entityBase)
removedCount += 1
elif (objectType == "MyObjectBuilder_ReplicableEntity"):
# print ("\t Backpack!")
backPackName = entityBase.find('Name')
if hasattr(backPackName, 'text'):
print("\t Backpackname: %s" % backPackName.text)
print("\t GOODBYE")
sectorObjects.remove(entityBase)
removedCount += 1
elif (objectType == "MyObjectBuilder_VoxelMap"):
voxelStorageName = entityBase.find('StorageName')
if hasattr(voxelStorageName, 'text'):
print("\t voxelStorageName: %s" % voxelStorageName.text)
elif (objectType == "MyObjectBuilder_Character"):
# oops, someone was online
# entityID matches CharacterEntityId in the sbc
entityID = entityBase.find('EntityId').text # steamID
print("\t looking for %s entityID in playerDict" % entityID)
thisPlayersDict = self.findPlayerBySteamID(entityID) # returns False if we didn't have this players steamID in the sbc, meaning they weren't online
if (thisPlayersDict is not False and entityID is not False):
print("\t Sorry player: %s %s" % (entityID, thisPlayersDict["username"]))
else:
print("\tFIXME: this player was online, but I don't have their steamID of %s in the sbc" % entityID)<|fim▁hole|> # tree = ET.ElementTree(sbsRoot)
# sbsRoot.attrib["xmlns:xsd"]="http://www.w3.org/2001/XMLSchema"
# tree.write(newSbsFileName, encoding='utf-8', xml_declaration=True)
return (removedCount, warnCount)
def writeFile(self):
import xml.etree.ElementTree as ET
print("writing tree out to %s" % self.newSbsFilename)
tree = ET.ElementTree(self.sbsRoot)
self.sbsRoot.attrib["xmlns:xsd"] = "http://www.w3.org/2001/XMLSchema"
tree.write(self.newSbsFilename, encoding='utf-8', xml_declaration=True)<|fim▁end|> | else:
print("\t ##### has no grid size")
# print ("writing tree out to %s" % newSbsFileName) |
<|file_name|>sitemaps.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import Sitemap
from . import models
class BlogSitemap(Sitemap):
changefreq = "daily"
priority = 0.5
def items(self):
return models.Post.objects.filter(is_draft=False)
def lastmod(self, obj):
return obj.update_time
class PageSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
def items(self):
return models.Page.objects.filter(is_draft=False)
def lastmod(self, obj):
return obj.update_time
# class CategorySitemap(Sitemap):
# changefreq = "weekly"
# priority = 0.6
# def items(self):
# return models.Category.objects.all()
# class TagSitemap(Sitemap):
# changefreq = "weekly"
# priority = 0.6
# def items(self):
# return models.Tag.objects.all()<|fim▁hole|> 'page': PageSitemap,
# 'category': CategorySitemap,
# 'tag': TagSitemap,
}<|fim▁end|> |
sitemaps = {
'blog': BlogSitemap, |
<|file_name|>problems.py<|end_file_name|><|fim▁begin|>from transitfeed import TYPE_ERROR, TYPE_WARNING, TYPE_NOTICE
from oba_rvtd_monitor.feedvalidator import LimitPerTypeProblemAccumulator
<|fim▁hole|><|fim▁end|> | class MonitoringProblemAccumulator(LimitPerTypeProblemAccumulator):
pass |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from urllib.parse import urlparse
<|fim▁hole|>from allauth.socialaccount.providers.oauth2.views import (
OAuth2CallbackView,
OAuth2LoginView,
)
from kuma.core.decorators import redirect_in_maintenance_mode
from kuma.core.ga_tracking import ACTION_AUTH_STARTED, CATEGORY_SIGNUP_FLOW, track_event
class KumaOAuth2LoginView(OAuth2LoginView):
def dispatch(self, request):
# TODO: Figure out a way to NOT trigger the "ACTION_AUTH_STARTED" when
# simply following the link. We've seen far too many submissions when
# curl or some browser extensions follow the link but not actually being
# users who proceed "earnestly".
# For now, to make a simple distinction between uses of `curl` and normal
# browser clicks we check that a HTTP_REFERER is actually set and comes
# from the same host as the request.
# Note! This is the same in kuma.users.providers.github.KumaOAuth2LoginView
# See https://github.com/mdn/kuma/issues/6759
http_referer = request.META.get("HTTP_REFERER")
if http_referer:
if urlparse(http_referer).netloc == request.get_host():
track_event(CATEGORY_SIGNUP_FLOW, ACTION_AUTH_STARTED, "google")
return super().dispatch(request)
oauth2_login = redirect_in_maintenance_mode(
KumaOAuth2LoginView.adapter_view(GoogleOAuth2Adapter)
)
oauth2_callback = redirect_in_maintenance_mode(
OAuth2CallbackView.adapter_view(GoogleOAuth2Adapter)
)<|fim▁end|> | from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter |
<|file_name|>orffasta.py<|end_file_name|><|fim▁begin|>"""
Creates fasta file from orfs
"""
import argparse<|fim▁hole|>import sys
import site
import re
import numpy as np
import numpy.random
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
base_path="%s/src"%base_path
for directory_name in os.listdir(base_path):
site.addsitedir(os.path.join(base_path, directory_name))
import fasta
import faa
import gff
import cPickle
if __name__=="__main__":
parser = argparse.ArgumentParser(description=\
'Extracts orfs and creates fasta files')
parser.add_argument('--gff', type=str, required=False,
help='Input gff file')
parser.add_argument('--fasta', type=str, required=False,
help='Input fasta file')
parser.add_argument('--faidx', type=str, required=False,
help='Input fasta file index')
parser.add_argument('--faa', type=str, required=False,
help='Input fasta file for proteins')
parser.add_argument('--pickle', type=str, required=False,default=None,
help='Pickle file containing all clusters')
parser.add_argument('--out', type=str, required=False,
help='Output file for translated orfs')
args = parser.parse_args()
queries = []
print "Started"
if not os.path.exists("faa.pickle"):
print "Creating pickle"
faaindex = faa.FAA(args.faa)
faaindex.index()
print len(faaindex.indexer)
assert 'AAD07798.1' in faaindex.indexer,"len=%d"%len(faaindex.indexer)
faaindex.dump("faa.pickle")
else:
print "Loading pickle"
faaindex = faa.FAA(args.faa)
faaindex.load("faa.pickle")
if args.pickle==None:
clusters = clique_filter.findContextGeneClusters(all_hits,faidx,backtrans=False,
functions=["toxin","transport"])
cPickle.dump(open(args.pickle,'wb'))
else:
clusters,_ = cPickle.load(open(args.pickle,'rb'))
gff = gff.GFF(args.gff,fasta_file=args.fasta,fasta_index=args.faidx)
for cluster in clusters:
for node in cluster:
acc,clrname,full_evalue,hmm_st,hmm_end,env_st,env_end,description = node.split('|')
function = clrname.split('.')[0]
if function=='toxin':
queries.append((acc,clrname,full_evalue,hmm_st,hmm_end,env_st,env_end,description))
gff.translate_orfs(queries,faaindex,args.out)<|fim▁end|> | import os |
<|file_name|>transform.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Generic types for CSS values that are related to transformations.
use crate::values::computed::length::Length as ComputedLength;
use crate::values::computed::length::LengthPercentage as ComputedLengthPercentage;
use crate::values::specified::angle::Angle as SpecifiedAngle;
use crate::values::specified::length::Length as SpecifiedLength;
use crate::values::specified::length::LengthPercentage as SpecifiedLengthPercentage;
use crate::values::{computed, CSSFloat};
use crate::Zero;
use app_units::Au;
use euclid;
use euclid::default::{Rect, Transform3D};
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
/// A generic 2D transformation matrix.
#[allow(missing_docs)]
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[css(comma, function = "matrix")]
#[repr(C)]
pub struct GenericMatrix<T> {
pub a: T,
pub b: T,
pub c: T,
pub d: T,
pub e: T,
pub f: T,
}
pub use self::GenericMatrix as Matrix;
#[allow(missing_docs)]
#[cfg_attr(rustfmt, rustfmt_skip)]
#[css(comma, function = "matrix3d")]
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct GenericMatrix3D<T> {
pub m11: T, pub m12: T, pub m13: T, pub m14: T,
pub m21: T, pub m22: T, pub m23: T, pub m24: T,
pub m31: T, pub m32: T, pub m33: T, pub m34: T,
pub m41: T, pub m42: T, pub m43: T, pub m44: T,
}
pub use self::GenericMatrix3D as Matrix3D;
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<T: Into<f64>> From<Matrix<T>> for Transform3D<f64> {
#[inline]
fn from(m: Matrix<T>) -> Self {
Transform3D::row_major(
m.a.into(), m.b.into(), 0.0, 0.0,
m.c.into(), m.d.into(), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
m.e.into(), m.f.into(), 0.0, 1.0,
)
}
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<T: Into<f64>> From<Matrix3D<T>> for Transform3D<f64> {
#[inline]
fn from(m: Matrix3D<T>) -> Self {
Transform3D::row_major(
m.m11.into(), m.m12.into(), m.m13.into(), m.m14.into(),
m.m21.into(), m.m22.into(), m.m23.into(), m.m24.into(),
m.m31.into(), m.m32.into(), m.m33.into(), m.m34.into(),
m.m41.into(), m.m42.into(), m.m43.into(), m.m44.into(),
)
}
}
/// A generic transform origin.
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct GenericTransformOrigin<H, V, Depth> {
/// The horizontal origin.
pub horizontal: H,
/// The vertical origin.
pub vertical: V,
/// The depth.
pub depth: Depth,
}
pub use self::GenericTransformOrigin as TransformOrigin;
impl<H, V, D> TransformOrigin<H, V, D> {
/// Returns a new transform origin.
pub fn new(horizontal: H, vertical: V, depth: D) -> Self {
Self {
horizontal,
vertical,
depth,
}
}
}
fn is_same<N: PartialEq>(x: &N, y: &N) -> bool {
x == y
}
#[derive(
Clone,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
/// A single operation in the list of a `transform` value
/// cbindgen:derive-tagged-enum-copy-constructor=true
pub enum GenericTransformOperation<Angle, Number, Length, Integer, LengthPercentage>
where
Angle: Zero,
LengthPercentage: Zero,
Number: PartialEq,
{
/// Represents a 2D 2x3 matrix.
Matrix(GenericMatrix<Number>),
/// Represents a 3D 4x4 matrix.
Matrix3D(GenericMatrix3D<Number>),
/// A 2D skew.
///
/// If the second angle is not provided it is assumed zero.
///
/// Syntax can be skew(angle) or skew(angle, angle)
#[css(comma, function)]
Skew(Angle, #[css(skip_if = "Zero::is_zero")] Angle),
/// skewX(angle)
#[css(function = "skewX")]
SkewX(Angle),
/// skewY(angle)
#[css(function = "skewY")]
SkewY(Angle),
/// translate(x, y) or translate(x)
#[css(comma, function)]
Translate(
LengthPercentage,
#[css(skip_if = "Zero::is_zero")] LengthPercentage,
),
/// translateX(x)
#[css(function = "translateX")]
TranslateX(LengthPercentage),
/// translateY(y)
#[css(function = "translateY")]
TranslateY(LengthPercentage),
/// translateZ(z)
#[css(function = "translateZ")]
TranslateZ(Length),
/// translate3d(x, y, z)
#[css(comma, function = "translate3d")]
Translate3D(LengthPercentage, LengthPercentage, Length),
/// A 2D scaling factor.
///
/// Syntax can be scale(factor) or scale(factor, factor)
#[css(comma, function)]
Scale(Number, #[css(contextual_skip_if = "is_same")] Number),
/// scaleX(factor)
#[css(function = "scaleX")]
ScaleX(Number),
/// scaleY(factor)
#[css(function = "scaleY")]
ScaleY(Number),
/// scaleZ(factor)
#[css(function = "scaleZ")]
ScaleZ(Number),
/// scale3D(factorX, factorY, factorZ)
#[css(comma, function = "scale3d")]
Scale3D(Number, Number, Number),
/// Describes a 2D Rotation.
///
/// In a 3D scene `rotate(angle)` is equivalent to `rotateZ(angle)`.
#[css(function)]
Rotate(Angle),
/// Rotation in 3D space around the x-axis.
#[css(function = "rotateX")]
RotateX(Angle),
/// Rotation in 3D space around the y-axis.
#[css(function = "rotateY")]
RotateY(Angle),
/// Rotation in 3D space around the z-axis.
#[css(function = "rotateZ")]
RotateZ(Angle),
/// Rotation in 3D space.
///
/// Generalization of rotateX, rotateY and rotateZ.
#[css(comma, function = "rotate3d")]
Rotate3D(Number, Number, Number, Angle),
/// Specifies a perspective projection matrix.
///
/// Part of CSS Transform Module Level 2 and defined at
/// [§ 13.1. 3D Transform Function](https://drafts.csswg.org/css-transforms-2/#funcdef-perspective).
///
/// The value must be greater than or equal to zero.
#[css(function)]
Perspective(Length),
/// A intermediate type for interpolation of mismatched transform lists.
#[allow(missing_docs)]
#[css(comma, function = "interpolatematrix")]
InterpolateMatrix {
from_list: GenericTransform<
GenericTransformOperation<Angle, Number, Length, Integer, LengthPercentage>,
>,
to_list: GenericTransform<
GenericTransformOperation<Angle, Number, Length, Integer, LengthPercentage>,
>,
progress: computed::Percentage,
},
/// A intermediate type for accumulation of mismatched transform lists.
#[allow(missing_docs)]
#[css(comma, function = "accumulatematrix")]
AccumulateMatrix {
from_list: GenericTransform<
GenericTransformOperation<Angle, Number, Length, Integer, LengthPercentage>,
>,
to_list: GenericTransform<
GenericTransformOperation<Angle, Number, Length, Integer, LengthPercentage>,
>,
count: Integer,
},
}
pub use self::GenericTransformOperation as TransformOperation;
#[derive(
Clone,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
/// A value of the `transform` property
pub struct GenericTransform<T>(#[css(if_empty = "none", iterable)] pub crate::OwnedSlice<T>);
pub use self::GenericTransform as Transform;
impl<Angle, Number, Length, Integer, LengthPercentage>
TransformOperation<Angle, Number, Length, Integer, LengthPercentage>
where
Angle: Zero,
LengthPercentage: Zero,
Number: PartialEq,
{
/// Check if it is any rotate function.
pub fn is_rotate(&self) -> bool {
use self::TransformOperation::*;
matches!(
*self,
Rotate(..) | Rotate3D(..) | RotateX(..) | RotateY(..) | RotateZ(..)
)
}
/// Check if it is any translate function
pub fn is_translate(&self) -> bool {
use self::TransformOperation::*;
match *self {
Translate(..) | Translate3D(..) | TranslateX(..) | TranslateY(..) | TranslateZ(..) => {
true
},
_ => false,
}
}
/// Check if it is any scale function
pub fn is_scale(&self) -> bool {
use self::TransformOperation::*;
match *self {
Scale(..) | Scale3D(..) | ScaleX(..) | ScaleY(..) | ScaleZ(..) => true,
_ => false,
}
}
}
/// Convert a length type into the absolute lengths.
pub trait ToAbsoluteLength {
/// Returns the absolute length as pixel value.
fn to_pixel_length(&self, containing_len: Option<Au>) -> Result<CSSFloat, ()>;
}
impl ToAbsoluteLength for SpecifiedLength {
// This returns Err(()) if there is any relative length or percentage. We use this when
// parsing a transform list of DOMMatrix because we want to return a DOM Exception
// if there is relative length.
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
match *self {
SpecifiedLength::NoCalc(len) => len.to_computed_pixel_length_without_context(),
SpecifiedLength::Calc(ref calc) => calc.to_computed_pixel_length_without_context(),
}
}
}
impl ToAbsoluteLength for SpecifiedLengthPercentage {
// This returns Err(()) if there is any relative length or percentage. We use this when
// parsing a transform list of DOMMatrix because we want to return a DOM Exception
// if there is relative length.
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
use self::SpecifiedLengthPercentage::*;
match *self {
Length(len) => len.to_computed_pixel_length_without_context(),
Calc(ref calc) => calc.to_computed_pixel_length_without_context(),
_ => Err(()),
}
}
}
impl ToAbsoluteLength for ComputedLength {
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
Ok(self.px())
}
}
impl ToAbsoluteLength for ComputedLengthPercentage {
#[inline]
fn to_pixel_length(&self, containing_len: Option<Au>) -> Result<CSSFloat, ()> {
match containing_len {
Some(relative_len) => Ok(self.to_pixel_length(relative_len).px()),
// If we don't have reference box, we cannot resolve the used value,
// so only retrieve the length part. This will be used for computing
// distance without any layout info.
//
// FIXME(emilio): This looks wrong.
None => Ok(self.length_component().px()),
}
}
}
/// Support the conversion to a 3d matrix.
pub trait ToMatrix {
/// Check if it is a 3d transform function.
fn is_3d(&self) -> bool;
/// Return the equivalent 3d matrix.
fn to_3d_matrix(&self, reference_box: Option<&Rect<Au>>) -> Result<Transform3D<f64>, ()>;
}
/// A little helper to deal with both specified and computed angles.
pub trait ToRadians {
/// Return the radians value as a 64-bit floating point value.
fn radians64(&self) -> f64;
}
impl ToRadians for computed::angle::Angle {
#[inline]
fn radians64(&self) -> f64 {
computed::angle::Angle::radians64(self)
}
}
impl ToRadians for SpecifiedAngle {
#[inline]
fn radians64(&self) -> f64 {
computed::angle::Angle::from_degrees(self.degrees()).radians64()
}
}
impl<Angle, Number, Length, Integer, LoP> ToMatrix
for TransformOperation<Angle, Number, Length, Integer, LoP>
where
Angle: Zero + ToRadians + Copy,
Number: PartialEq + Copy + Into<f32> + Into<f64>,
Length: ToAbsoluteLength,
LoP: Zero + ToAbsoluteLength,
{
#[inline]
fn is_3d(&self) -> bool {
use self::TransformOperation::*;
match *self {
Translate3D(..) | TranslateZ(..) | Rotate3D(..) | RotateX(..) | RotateY(..) |
RotateZ(..) | Scale3D(..) | ScaleZ(..) | Perspective(..) | Matrix3D(..) => true,
_ => false,
}
}
/// If |reference_box| is None, we will drop the percent part from translate because
/// we cannot resolve it without the layout info, for computed TransformOperation.
/// However, for specified TransformOperation, we will return Err(()) if there is any relative
/// lengths because the only caller, DOMMatrix, doesn't accept relative lengths.
#[inline]
fn to_3d_matrix(&self, reference_box: Option<&Rect<Au>>) -> Result<Transform3D<f64>, ()> {
use self::TransformOperation::*;
use std::f64;
const TWO_PI: f64 = 2.0f64 * f64::consts::PI;
let reference_width = reference_box.map(|v| v.size.width);
let reference_height = reference_box.map(|v| v.size.height);
let matrix = match *self {
Rotate3D(ax, ay, az, theta) => {
let theta = TWO_PI - theta.radians64();
let (ax, ay, az, theta) =
get_normalized_vector_and_angle(ax.into(), ay.into(), az.into(), theta);
Transform3D::create_rotation(
ax as f64,
ay as f64,
az as f64,
euclid::Angle::radians(theta),
)
},
RotateX(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.radians64());
Transform3D::create_rotation(1., 0., 0., theta)
},
RotateY(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.radians64());
Transform3D::create_rotation(0., 1., 0., theta)
},
RotateZ(theta) | Rotate(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.radians64());
Transform3D::create_rotation(0., 0., 1., theta)
},
Perspective(ref d) => {
let m = create_perspective_matrix(d.to_pixel_length(None)?);
m.cast()
},
Scale3D(sx, sy, sz) => Transform3D::create_scale(sx.into(), sy.into(), sz.into()),
Scale(sx, sy) => Transform3D::create_scale(sx.into(), sy.into(), 1.),
ScaleX(s) => Transform3D::create_scale(s.into(), 1., 1.),
ScaleY(s) => Transform3D::create_scale(1., s.into(), 1.),
ScaleZ(s) => Transform3D::create_scale(1., 1., s.into()),
Translate3D(ref tx, ref ty, ref tz) => {
let tx = tx.to_pixel_length(reference_width)? as f64;
let ty = ty.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(tx, ty, tz.to_pixel_length(None)? as f64)
},
Translate(ref tx, ref ty) => {
let tx = tx.to_pixel_length(reference_width)? as f64;
let ty = ty.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(tx, ty, 0.)
},
TranslateX(ref t) => {
let t = t.to_pixel_length(reference_width)? as f64;
Transform3D::create_translation(t, 0., 0.)
},
TranslateY(ref t) => {
let t = t.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(0., t, 0.)
},
TranslateZ(ref z) => {
Transform3D::create_translation(0., 0., z.to_pixel_length(None)? as f64)
},
Skew(theta_x, theta_y) => Transform3D::create_skew(
euclid::Angle::radians(theta_x.radians64()),
euclid::Angle::radians(theta_y.radians64()),
),
SkewX(theta) => Transform3D::create_skew(
euclid::Angle::radians(theta.radians64()),
euclid::Angle::radians(0.),
),
SkewY(theta) => Transform3D::create_skew(
euclid::Angle::radians(0.),
euclid::Angle::radians(theta.radians64()),
),
Matrix3D(m) => m.into(),
Matrix(m) => m.into(),
InterpolateMatrix { .. } | AccumulateMatrix { .. } => {
// TODO: Convert InterpolateMatrix/AccumulateMatrix into a valid Transform3D by
// the reference box and do interpolation on these two Transform3D matrices.
// Both Gecko and Servo don't support this for computing distance, and Servo
// doesn't support animations on InterpolateMatrix/AccumulateMatrix, so
// return an identity matrix.
// Note: DOMMatrix doesn't go into this arm.
Transform3D::identity()
},
};
Ok(matrix)
}
}
impl<T> Transform<T> {
/// `none`
pub fn none() -> Self {
Transform(Default::default())
}
}
impl<T: ToMatrix> Transform<T> {
/// Return the equivalent 3d matrix of this transform list.
/// We return a pair: the first one is the transform matrix, and the second one
/// indicates if there is any 3d transform function in this transform list.
#[cfg_attr(rustfmt, rustfmt_skip)]
pub fn to_transform_3d_matrix(
&self,
reference_box: Option<&Rect<Au>>
) -> Result<(Transform3D<CSSFloat>, bool), ()> {
let cast_3d_transform = |m: Transform3D<f64>| -> Transform3D<CSSFloat> {
use std::{f32, f64};
let cast = |v: f64| { v.min(f32::MAX as f64).max(f32::MIN as f64) as f32 };
Transform3D::row_major(
cast(m.m11), cast(m.m12), cast(m.m13), cast(m.m14),
cast(m.m21), cast(m.m22), cast(m.m23), cast(m.m24),
cast(m.m31), cast(m.m32), cast(m.m33), cast(m.m34),
cast(m.m41), cast(m.m42), cast(m.m43), cast(m.m44),
)
};
let (m, is_3d) = self.to_transform_3d_matrix_f64(reference_box)?;
Ok((cast_3d_transform(m), is_3d))
}
/// Same as Transform::to_transform_3d_matrix but a f64 version.
pub fn to_transform_3d_matrix_f64(
&self,
reference_box: Option<&Rect<Au>>,
) -> Result<(Transform3D<f64>, bool), ()> {
// We intentionally use Transform3D<f64> during computation to avoid error propagation
// because using f32 to compute triangle functions (e.g. in create_rotation()) is not
// accurate enough. In Gecko, we also use "double" to compute the triangle functions.
// Therefore, let's use Transform3D<f64> during matrix computation and cast it into f32
// in the end.
let mut transform = Transform3D::<f64>::identity();
let mut contain_3d = false;
for operation in &*self.0 {
let matrix = operation.to_3d_matrix(reference_box)?;
contain_3d |= operation.is_3d();
transform = transform.pre_transform(&matrix);
}
Ok((transform, contain_3d))
}
}
/// Return the transform matrix from a perspective length.
#[inline]
pub fn create_perspective_matrix(d: CSSFloat) -> Transform3D<CSSFloat> {
// TODO(gw): The transforms spec says that perspective length must
// be positive. However, there is some confusion between the spec
// and browser implementations as to handling the case of 0 for the
// perspective value. Until the spec bug is resolved, at least ensure
// that a provided perspective value of <= 0.0 doesn't cause panics
// and behaves as it does in other browsers.
// See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details.
if d <= 0.0 {
Transform3D::identity()
} else {
Transform3D::create_perspective(d)
}
}
/// Return the normalized direction vector and its angle for Rotate3D.
pub fn get_normalized_vector_and_angle<T: Zero>(
x: CSSFloat,
y: CSSFloat,
z: CSSFloat,
angle: T,
) -> (CSSFloat, CSSFloat, CSSFloat, T) {
use crate::values::computed::transform::DirectionVector;
use euclid::approxeq::ApproxEq;
let vector = DirectionVector::new(x, y, z);
if vector.square_length().approx_eq(&f32::zero()) {
// https://www.w3.org/TR/css-transforms-1/#funcdef-rotate3d
// A direction vector that cannot be normalized, such as [0, 0, 0], will cause the
// rotation to not be applied, so we use identity matrix (i.e. rotate3d(0, 0, 1, 0)).
(0., 0., 1., T::zero())
} else {
let vector = vector.robust_normalize();
(vector.x, vector.y, vector.z, angle)
}
}
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
/// A value of the `Rotate` property
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum GenericRotate<Number, Angle> {
/// 'none'
None,
/// '<angle>'
Rotate(Angle),
/// '<number>{3} <angle>'
Rotate3D(Number, Number, Number, Angle),
}
pub use self::GenericRotate as Rotate;
/// A trait to check if the current 3D vector is parallel to the DirectionVector.
/// This is especially for serialization on Rotate.
pub trait IsParallelTo {
/// Returns true if this is parallel to the vector.
fn is_parallel_to(&self, vector: &computed::transform::DirectionVector) -> bool;
}
impl<Number, Angle> ToCss for Rotate<Number, Angle>
where
Number: Copy + ToCss,
Angle: ToCss,
(Number, Number, Number): IsParallelTo,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
use crate::values::computed::transform::DirectionVector;
match *self {
Rotate::None => dest.write_str("none"),
Rotate::Rotate(ref angle) => angle.to_css(dest),
Rotate::Rotate3D(x, y, z, ref angle) => {
// If a 3d rotation is specified, the property must serialize with an axis
// specified. If the axis is parallel with the x, y, or z axises, it must
// serialize as the appropriate keyword.
// https://drafts.csswg.org/css-transforms-2/#individual-transform-serialization
let v = (x, y, z);
if v.is_parallel_to(&DirectionVector::new(1., 0., 0.)) {
dest.write_char('x')?;
} else if v.is_parallel_to(&DirectionVector::new(0., 1., 0.)) {
dest.write_char('y')?;<|fim▁hole|> dest.write_char(' ')?;
y.to_css(dest)?;
dest.write_char(' ')?;
z.to_css(dest)?;
}
dest.write_char(' ')?;
angle.to_css(dest)
},
}
}
}
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
/// A value of the `Scale` property
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum GenericScale<Number> {
/// 'none'
None,
/// '<number>{1,2}'
Scale(Number, Number),
/// '<number>{3}'
Scale3D(Number, Number, Number),
}
pub use self::GenericScale as Scale;
impl<Number: ToCss + PartialEq> ToCss for Scale<Number> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
match *self {
Scale::None => dest.write_str("none"),
Scale::Scale(ref x, ref y) => {
x.to_css(dest)?;
if x != y {
dest.write_char(' ')?;
y.to_css(dest)?;
}
Ok(())
},
Scale::Scale3D(ref x, ref y, ref z) => {
x.to_css(dest)?;
dest.write_char(' ')?;
y.to_css(dest)?;
dest.write_char(' ')?;
z.to_css(dest)
},
}
}
}
#[derive(
Clone,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
/// A value of the `translate` property
///
/// https://drafts.csswg.org/css-transforms-2/#individual-transform-serialization:
///
/// If a 2d translation is specified, the property must serialize with only one
/// or two values (per usual, if the second value is 0px, the default, it must
/// be omitted when serializing).
///
/// If a 3d translation is specified, all three values must be serialized.
///
/// We don't omit the 3rd component even if it is 0px for now, and the
/// related spec issue is https://github.com/w3c/csswg-drafts/issues/3305
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum GenericTranslate<LengthPercentage, Length>
where
LengthPercentage: Zero,
{
/// 'none'
None,
/// '<length-percentage>' or '<length-percentage> <length-percentage>'
Translate(
LengthPercentage,
#[css(skip_if = "Zero::is_zero")] LengthPercentage,
),
/// '<length-percentage> <length-percentage> <length>'
Translate3D(LengthPercentage, LengthPercentage, Length),
}
pub use self::GenericTranslate as Translate;
#[allow(missing_docs)]
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
pub enum TransformStyle {
#[cfg(feature = "servo")]
Auto,
Flat,
#[css(keyword = "preserve-3d")]
Preserve3d,
}<|fim▁end|> | } else if v.is_parallel_to(&DirectionVector::new(0., 0., 1.)) {
dest.write_char('z')?;
} else {
x.to_css(dest)?; |
<|file_name|>two.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#encoding=utf-8
from onefile import *
def two():
print "at two\n",
def second():
print "at second\n",<|fim▁hole|>if __name__ == '__main__':
two()
#one()
#first()<|fim▁end|> | |
<|file_name|>test_volcano_point_building.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Impact function Test Cases.**
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__date__ = '11/12/2015'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import unittest
from safe.impact_functions.impact_function_manager import ImpactFunctionManager
from safe.impact_functions.volcanic.volcano_point_building.impact_function \
import VolcanoPointBuildingFunction
from safe.test.utilities import test_data_path, get_qgis_app
from safe.storage.core import read_layer
from safe.storage.safe_layer import SafeLayer
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
class TestVolcanoPointBuildingFunction(unittest.TestCase):
"""Test for Volcano Point on Building Impact Function."""
def setUp(self):
registry = ImpactFunctionManager().registry
registry.clear()
registry.register(VolcanoPointBuildingFunction)
def test_run(self):
"""TestVolcanoPointBuildingFunction: Test running the IF."""
volcano_path = test_data_path('hazard', 'volcano_point.shp')
building_path = test_data_path('exposure', 'buildings.shp')
hazard_layer = read_layer(volcano_path)
exposure_layer = read_layer(building_path)
impact_function = VolcanoPointBuildingFunction.instance()
impact_function.hazard = SafeLayer(hazard_layer)
impact_function.exposure = SafeLayer(exposure_layer)
impact_function.run()
impact_layer = impact_function.impact
# Check the question
expected_question = (
'In the event of volcano point how many buildings might be '
'affected')
message = 'The question should be %s, but it returns %s' % (
expected_question, impact_function.question)
self.assertEqual(expected_question, impact_function.question, message)
# The buildings should all be categorised into 3000 zone
zone_sum = sum(impact_layer.get_data(
attribute=impact_function.target_field))
expected_sum = 3 * 181
message = 'Expecting %s, but it returns %s' % (expected_sum, zone_sum)
self.assertEqual(zone_sum, expected_sum, message)
def test_filter(self):
"""TestVolcanoPointBuildingFunction: Test filtering IF"""
hazard_keywords = {
'volcano_name_field': 'NAME',
'hazard_category': 'multiple_event',
'keyword_version': 3.2,
'title': 'Volcano Point',
'hazard': 'volcano',
'source': 'smithsonian',
'layer_geometry': 'point',
'layer_purpose': 'hazard',
'layer_mode': 'classified',
}<|fim▁hole|> exposure_keywords = {
'license': 'Open Data Commons Open Database License (ODbL)',
'keyword_version': 3.2,
'structure_class_field': 'TYPE',
'title': 'Buildings',
'layer_geometry': 'polygon',
'source': 'OpenStreetMap - www.openstreetmap.org',
'date': '26-03-2015 14:03',
'layer_purpose': 'exposure',
'layer_mode': 'classified',
'exposure': 'structure'}
impact_functions = ImpactFunctionManager().filter_by_keywords(
hazard_keywords, exposure_keywords)
message = 'There should be 1 impact function, but there are: %s' % \
len(impact_functions)
self.assertEqual(1, len(impact_functions), message)
retrieved_if = impact_functions[0].metadata().as_dict()['id']
expected = ImpactFunctionManager().get_function_id(
VolcanoPointBuildingFunction)
message = 'Expecting %s, but getting %s instead' % (
expected, retrieved_if)
self.assertEqual(expected, retrieved_if, message)<|fim▁end|> | |
<|file_name|>CWE369_Divide_by_Zero__int_zero_modulo_82_goodB2G.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE369_Divide_by_Zero__int_zero_modulo_82_goodB2G.cpp
Label Definition File: CWE369_Divide_by_Zero__int.label.xml
Template File: sources-sinks-82_goodB2G.tmpl.cpp
<|fim▁hole|>/*
* @description
* CWE: 369 Divide by Zero
* BadSource: zero Fixed value of zero
* GoodSource: Non-zero
* Sinks: modulo
* GoodSink: Check for zero before modulo
* BadSink : Modulo a constant with data
* Flow Variant: 82 Data flow: data passed in a parameter to an virtual method called via a pointer
*
* */
#ifndef OMITGOOD
#include "std_testcase.h"
#include "CWE369_Divide_by_Zero__int_zero_modulo_82.h"
namespace CWE369_Divide_by_Zero__int_zero_modulo_82
{
void CWE369_Divide_by_Zero__int_zero_modulo_82_goodB2G::action(int data)
{
/* FIX: test for a zero denominator */
if( data != 0 )
{
printIntLine(100 % data);
}
else
{
printLine("This would result in a divide by zero");
}
}
}
#endif /* OMITGOOD */<|fim▁end|> | */
|
<|file_name|>stripe-card.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { default } from 'ember-stripe-elements/components/stripe-card'; |
<|file_name|>login.page-objects.ts<|end_file_name|><|fim▁begin|>import { browser, element, by, ElementFinder, protractor, $ } from 'protractor';
import { PageObject } from "./page-object";
export class LoginPageObject extends PageObject {
browseToPage(): void{
browser.get('');
let EC = protractor.ExpectedConditions;
browser.wait( EC.invisibilityOf( $('.loading-md') ), 15000 );
}
logOut() {
let menuButton = element(by.css('#main-menu'));
menuButton.click();
browser.driver.sleep(2000);
let disconnectionButton = element(by.css('#disconnection'));
disconnectionButton.click();
browser.driver.sleep(2000);
}
<|fim▁hole|> getLoginButton(): ElementFinder {
return element(by.css('#login'));
}
getEmailValue() {
return element(by.css('#login-name > input')).getText();
}
getPasswordValue() {
return element(by.css('#login-password')).getText();
}
setEmailValue(email) {
element(by.css('#login-name > input')).sendKeys(email);
}
setPasswordValue(password: string) {
element(by.css('#login-password > input')).sendKeys(password);
}
getTitlePage() {
return element(by.css('#title-page')).getText();
}
}<|fim▁end|> | getRegisterButton(): ElementFinder {
return element(by.css('.register-btn'));
}
|
<|file_name|>GeometryFactory.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2011 by The Authors.
* Published under the LGPL 2.1 license.
* See /license-notice.txt for the full text of the license notice.
* See /license.txt for the full text of the license.
*/
/**
* Supplies a set of utility methods for building Geometry objects from lists
* of Coordinates.
*
* Note that the factory constructor methods do <b>not</b> change the input
* coordinates in any way.
*
* In particular, they are not rounded to the supplied <tt>PrecisionModel</tt>.
* It is assumed that input Coordinates meet the given precision.
*/
/**
* @requires jsts/geom/PrecisionModel.js
*/
/**
* Constructs a GeometryFactory that generates Geometries having a floating
* PrecisionModel and a spatial-reference ID of 0.
*
* @constructor
*/
jsts.geom.GeometryFactory = function(precisionModel) {
this.precisionModel = precisionModel || new jsts.geom.PrecisionModel();
};
jsts.geom.GeometryFactory.prototype.precisionModel = null;
jsts.geom.GeometryFactory.prototype.getPrecisionModel = function() {
return this.precisionModel;
};
/**
* Creates a Point using the given Coordinate; a null Coordinate will create an
* empty Geometry.
*
* @param {Coordinate}
* coordinate Coordinate to base this Point on.
* @return {Point} A new Point.
*/
jsts.geom.GeometryFactory.prototype.createPoint = function(coordinate) {
var point = new jsts.geom.Point(coordinate, this);
return point;
};
/**
* Creates a LineString using the given Coordinates; a null or empty array will
* create an empty LineString. Consecutive points must not be equal.
*
* @param {Coordinate[]}
* coordinates an array without null elements, or an empty array, or
* null.
* @return {LineString} A new LineString.
*/
jsts.geom.GeometryFactory.prototype.createLineString = function(coordinates) {
var lineString = new jsts.geom.LineString(coordinates, this);
return lineString;
};
/**
* Creates a LinearRing using the given Coordinates; a null or empty array will
* create an empty LinearRing. The points must form a closed and simple
* linestring. Consecutive points must not be equal.
*
* @param {Coordinate[]}
* coordinates an array without null elements, or an empty array, or
* null.
* @return {LinearRing} A new LinearRing.
*/
jsts.geom.GeometryFactory.prototype.createLinearRing = function(coordinates) {
var linearRing = new jsts.geom.LinearRing(coordinates, this);
return linearRing;
};
/**
* Constructs a <code>Polygon</code> with the given exterior boundary and
* interior boundaries.
*
* @param {LinearRing}
* shell the outer boundary of the new <code>Polygon</code>, or
* <code>null</code> or an empty <code>LinearRing</code> if the
* empty geometry is to be created.
* @param {LinearRing[]}
* holes the inner boundaries of the new <code>Polygon</code>, or
* <code>null</code> or empty <code>LinearRing</code> s if the
* empty geometry is to be created.
* @return {Polygon} A new Polygon.
*/
jsts.geom.GeometryFactory.prototype.createPolygon = function(shell, holes) {
var polygon = new jsts.geom.Polygon(shell, holes, this);
return polygon;
};
jsts.geom.GeometryFactory.prototype.createMultiPoint = function(points) {
if (points && points[0] instanceof jsts.geom.Coordinate) {
var converted = [];
var i;
for (i = 0; i < points.length; i++) {
converted.push(this.createPoint(points[i]));
}
points = converted;
}<|fim▁hole|>jsts.geom.GeometryFactory.prototype.createMultiLineString = function(
lineStrings) {
return new jsts.geom.MultiLineString(lineStrings, this);
};
jsts.geom.GeometryFactory.prototype.createMultiPolygon = function(polygons) {
return new jsts.geom.MultiPolygon(polygons, this);
};
/**
* Build an appropriate <code>Geometry</code>, <code>MultiGeometry</code>,
* or <code>GeometryCollection</code> to contain the <code>Geometry</code>s
* in it. For example:<br>
*
* <ul>
* <li> If <code>geomList</code> contains a single <code>Polygon</code>,
* the <code>Polygon</code> is returned.
* <li> If <code>geomList</code> contains several <code>Polygon</code>s, a
* <code>MultiPolygon</code> is returned.
* <li> If <code>geomList</code> contains some <code>Polygon</code>s and
* some <code>LineString</code>s, a <code>GeometryCollection</code> is
* returned.
* <li> If <code>geomList</code> is empty, an empty
* <code>GeometryCollection</code> is returned
* </ul>
*
* Note that this method does not "flatten" Geometries in the input, and hence
* if any MultiGeometries are contained in the input a GeometryCollection
* containing them will be returned.
*
* @param geomList
* the <code>Geometry</code>s to combine.
* @return {Geometry} a <code>Geometry</code> of the "smallest", "most
* type-specific" class that can contain the elements of
* <code>geomList</code> .
*/
jsts.geom.GeometryFactory.prototype.buildGeometry = function(geomList) {
/**
* Determine some facts about the geometries in the list
*/
var geomClass = null;
var isHeterogeneous = false;
var hasGeometryCollection = false;
for (var i = geomList.iterator(); i.hasNext();) {
var geom = i.next();
var partClass = geom.CLASS_NAME;
if (geomClass === null) {
geomClass = partClass;
}
if (!(partClass === geomClass)) {
isHeterogeneous = true;
}
if (geom.isGeometryCollectionBase())
hasGeometryCollection = true;
}
/**
* Now construct an appropriate geometry to return
*/
// for the empty geometry, return an empty GeometryCollection
if (geomClass === null) {
return this.createGeometryCollection(null);
}
if (isHeterogeneous || hasGeometryCollection) {
return this.createGeometryCollection(geomList.toArray());
}
// at this point we know the collection is hetereogenous.
// Determine the type of the result from the first Geometry in the list
// this should always return a geometry, since otherwise an empty collection
// would have already been returned
var geom0 = geomList.get(0);
var isCollection = geomList.size() > 1;
if (isCollection) {
if (geom0 instanceof jsts.geom.Polygon) {
return this.createMultiPolygon(geomList.toArray());
} else if (geom0 instanceof jsts.geom.LineString) {
return this.createMultiLineString(geomList.toArray());
} else if (geom0 instanceof jsts.geom.Point) {
return this.createMultiPoint(geomList.toArray());
}
jsts.util.Assert.shouldNeverReachHere('Unhandled class: ' + geom0);
}
return geom0;
};
jsts.geom.GeometryFactory.prototype.createGeometryCollection = function(
geometries) {
return new jsts.geom.GeometryCollection(geometries, this);
};
/**
* Creates a {@link Geometry} with the same extent as the given envelope. The
* Geometry returned is guaranteed to be valid. To provide this behaviour, the
* following cases occur:
* <p>
* If the <code>Envelope</code> is:
* <ul>
* <li>null : returns an empty {@link Point}
* <li>a point : returns a non-empty {@link Point}
* <li>a line : returns a two-point {@link LineString}
* <li>a rectangle : returns a {@link Polygon}> whose points are (minx, miny),
* (minx, maxy), (maxx, maxy), (maxx, miny), (minx, miny).
* </ul>
*
* @param {jsts.geom.Envelope}
* envelope the <code>Envelope</code> to convert.
* @return {jsts.geom.Geometry} an empty <code>Point</code> (for null
* <code>Envelope</code>s), a <code>Point</code> (when min x = max
* x and min y = max y) or a <code>Polygon</code> (in all other cases).
*/
jsts.geom.GeometryFactory.prototype.toGeometry = function(envelope) {
// null envelope - return empty point geometry
if (envelope.isNull()) {
return this.createPoint(null);
}
// point?
if (envelope.getMinX() === envelope.getMaxX() &&
envelope.getMinY() === envelope.getMaxY()) {
return this.createPoint(new jsts.geom.Coordinate(envelope.getMinX(),
envelope.getMinY()));
}
// vertical or horizontal line?
if (envelope.getMinX() === envelope.getMaxX() ||
envelope.getMinY() === envelope.getMaxY()) {
return this.createLineString([
new jsts.geom.Coordinate(envelope.getMinX(), envelope.getMinY()),
new jsts.geom.Coordinate(envelope.getMaxX(), envelope.getMaxY())]);
}
// create a CW ring for the polygon
return this.createPolygon(this.createLinearRing([
new jsts.geom.Coordinate(envelope.getMinX(), envelope.getMinY()),
new jsts.geom.Coordinate(envelope.getMinX(), envelope.getMaxY()),
new jsts.geom.Coordinate(envelope.getMaxX(), envelope.getMaxY()),
new jsts.geom.Coordinate(envelope.getMaxX(), envelope.getMinY()),
new jsts.geom.Coordinate(envelope.getMinX(), envelope.getMinY())]), null);
};<|fim▁end|> |
return new jsts.geom.MultiPoint(points, this);
};
|
<|file_name|>list-patients.client.controller.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular
.module('patients')
.controller('PatientsListController', PatientsListController);
PatientsListController.$inject = ['PatientsService'];
function PatientsListController(PatientsService) {
var vm = this;
vm.patients = PatientsService.query();<|fim▁hole|>})();<|fim▁end|> | } |
<|file_name|>yaku.py<|end_file_name|><|fim▁begin|>import warnings
class Yaku:
yaku_id = None
tenhou_id = None
name = None
han_open = None
han_closed = None
is_yakuman = None
def __init__(self, yaku_id=None):
self.tenhou_id = None
self.yaku_id = yaku_id
self.set_attributes()
def __str__(self):
return self.name
def __repr__(self):
# for calls in array
return self.__str__()
def is_condition_met(self, hand, *args):
"""
Is this yaku exists in the hand?
:param: hand
:param: args: some yaku requires additional attributes
:return: boolean
"""
raise NotImplementedError
def set_attributes(self):
"""
Set id, name, han related to the yaku<|fim▁hole|> raise NotImplementedError
@property
def english(self):
warnings.warn("Use .name attribute instead of .english attribute", DeprecationWarning)
return self.name
@property
def japanese(self):
warnings.warn("Use .name attribute instead of .japanese attribute", DeprecationWarning)
return self.name<|fim▁end|> | """ |
<|file_name|>extdeps.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ! Check for external package sources. Allow only vendorable packages.
use std::fs;
use std::path::Path;
/// List of whitelisted sources for packages
const WHITELISTED_SOURCES: &[&str] = &[
"\"registry+https://github.com/rust-lang/crates.io-index\"",
];
/// check for external package sources
pub fn check(path: &Path, bad: &mut bool) {
// Cargo.lock of rust (tidy runs inside src/)
let path = path.join("../Cargo.lock");
// open and read the whole file
let cargo_lock = t!(fs::read_to_string(&path));
// process each line
for line in cargo_lock.lines() {
// consider only source entries
if ! line.starts_with("source = ") {
continue;
}
// extract source value
let source = line.splitn(2, '=').nth(1).unwrap().trim();
// ensure source is whitelisted
if !WHITELISTED_SOURCES.contains(&&*source) {
println!("invalid source: {}", source);
*bad = true;<|fim▁hole|><|fim▁end|> | }
}
} |
<|file_name|>UserAbilitiesParser.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2017, 2018, 2019, 2020 Stephen Powis https://github.com/Crim/pardot-java-client
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.darksci.pardot.api.parser.user;
import com.darksci.pardot.api.parser.JacksonFactory;
import com.darksci.pardot.api.parser.ResponseParser;
import com.darksci.pardot.api.response.user.UserAbilitiesResponse;
import java.io.IOException;
/**
* Handles parsing UserAbilities API responses into POJOs.
*/
public class UserAbilitiesParser implements ResponseParser<UserAbilitiesResponse.Result> {
<|fim▁hole|> }
}<|fim▁end|> | @Override
public UserAbilitiesResponse.Result parseResponse(final String responseStr) throws IOException {
return JacksonFactory.newInstance().readValue(responseStr, UserAbilitiesResponse.class).getResult(); |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// softlayer_network_storage_iscsi - SoftLayer's iscsi product extends upon the base functionality of
// SoftLayer offerings by providing snapshot and replication capabilities. An iscsi volume is mounted
// through SoftLayer's private network and allows for block level additional storage on a highly
// redundant disk array. SoftLayer's iscsi offering is capable of taking volume snapshots which can be<|fim▁hole|>// provide a solid disaster recovery solution.
package softlayer_network_storage_iscsi
// DO NOT EDIT. THIS FILE WAS AUTOMATICALLY GENERATED<|fim▁end|> | // mounted read-only or used for an immediate volume data restore. This high-end Storage offering is
// also capable of being configured for remote data replication to any of SoftLayer's datacenters to |
<|file_name|>inputstream.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import codecs
import re
import types
import sys
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import encodings, ReparseException
from . import utils
from io import StringIO
try:
from io import BytesIO
except ImportError:
BytesIO = StringIO
try:
from io import BufferedIOBase
except ImportError:
class BufferedIOBase(object):
pass
#Non-unicode versions of constants for use in the pre-parser
spaceCharactersBytes = frozenset([item.encode(u"ascii") for item in spaceCharacters])
asciiLettersBytes = frozenset([item.encode(u"ascii") for item in asciiLetters])
asciiUppercaseBytes = frozenset([item.encode(u"ascii") for item in asciiUppercase])
spacesAngleBrackets = spaceCharactersBytes | frozenset([">", "<"])
invalid_unicode_re = re.compile(u"[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uD800-\uDFFF\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]")
non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
0x10FFFE, 0x10FFFF])
ascii_punctuation_re = re.compile(u"[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]")
# Cache for charsUntil()
charsUntilRegEx = {}
class BufferedStream(object):
u"""Buffering for streams that do not have buffering of their own
The buffer is implemented as a list of chunks on the assumption that
joining many strings will be slow since it is O(n**2)
"""
def __init__(self, stream):
self.stream = stream
self.buffer = []
self.position = [-1,0] #chunk number, offset
__init__.func_annotations = {}
def tell(self):
pos = 0
for chunk in self.buffer[:self.position[0]]:
pos += len(chunk)
pos += self.position[1]
return pos
tell.func_annotations = {}
def seek(self, pos):
assert pos < self._bufferedBytes()
offset = pos
i = 0
while len(self.buffer[i]) < offset:
offset -= pos
i += 1
self.position = [i, offset]
seek.func_annotations = {}
def read(self, str):
if not self.buffer:
return self._readStream(str)
elif (self.position[0] == len(self.buffer) and
self.position[1] == len(self.buffer[-1])):
return self._readStream(str)
else:
return self._readFromBuffer(str)
read.func_annotations = {}
def _bufferedBytes(self):
return sum([len(item) for item in self.buffer])
_bufferedBytes.func_annotations = {}
def _readStream(self, str):
data = self.stream.read(str)
self.buffer.append(data)
self.position[0] += 1
self.position[1] = len(data)
return data
_readStream.func_annotations = {}
def _readFromBuffer(self, str):
remainingBytes = str
rv = []
bufferIndex = self.position[0]
bufferOffset = self.position[1]
while bufferIndex < len(self.buffer) and remainingBytes != 0:
assert remainingBytes > 0
bufferedData = self.buffer[bufferIndex]
if remainingBytes <= len(bufferedData) - bufferOffset:
bytesToRead = remainingBytes
self.position = [bufferIndex, bufferOffset + bytesToRead]
else:
bytesToRead = len(bufferedData) - bufferOffset
self.position = [bufferIndex, len(bufferedData)]
bufferIndex += 1
data = rv.append(bufferedData[bufferOffset:
bufferOffset + bytesToRead])
remainingBytes -= bytesToRead
bufferOffset = 0
if remainingBytes:
rv.append(self._readStream(remainingBytes))
return u"".join(rv)
_readFromBuffer.func_annotations = {}
def HTMLInputStream(source, encoding=None, parseMeta=True, chardet=True):
if hasattr(source, u"read"):
isUnicode = isinstance(source.read(0), unicode)
else:
isUnicode = isinstance(source, unicode)
if isUnicode:
if encoding is not None:
raise TypeError(u"Cannot explicitly set an encoding with a unicode string")
return HTMLUnicodeInputStream(source)
else:
return HTMLBinaryInputStream(source, encoding, parseMeta, chardet)
HTMLInputStream.func_annotations = {}
class HTMLUnicodeInputStream(object):
u"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
_defaultChunkSize = 10240
def __init__(self, source):
u"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
parseMeta - Look for a <meta> element containing encoding information
"""
#Craziness
if len(u"\U0010FFFF") == 1:
self.reportCharacterErrors = self.characterErrorsUCS4
self.replaceCharactersRegexp = re.compile(u"[\uD800-\uDFFF]")
else:
self.reportCharacterErrors = self.characterErrorsUCS2
self.replaceCharactersRegexp = re.compile(u"([\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?<![\uD800-\uDBFF])[\uDC00-\uDFFF])")
# List of where new lines occur
self.newLines = [0]
self.charEncoding = (u"utf-8", u"certain")
self.dataStream = self.openStream(source)
self.reset()
__init__.func_annotations = {}
def reset(self):
self.chunk = u""
self.chunkSize = 0
self.chunkOffset = 0
self.errors = []
# number of (complete) lines in previous chunks
self.prevNumLines = 0
# number of columns in the last line of the previous chunk
self.prevNumCols = 0
#Deal with CR LF and surrogates split over chunk boundaries
self._bufferedCharacter = None
reset.func_annotations = {}
def openStream(self, source):
u"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, u'read'):
stream = source
else:
stream = StringIO(source)
if (#not isinstance(stream, BufferedIOBase) and
not(hasattr(stream, u"tell") and
hasattr(stream, u"seek")) or
stream is sys.stdin):
stream = BufferedStream(stream)
return stream
openStream.func_annotations = {}
def _position(self, offset):
chunk = self.chunk
nLines = chunk.count(u'\n', 0, offset)
positionLine = self.prevNumLines + nLines
lastLinePos = chunk.rfind(u'\n', 0, offset)
if lastLinePos == -1:
positionColumn = self.prevNumCols + offset
else:
positionColumn = offset - (lastLinePos + 1)
return (positionLine, positionColumn)
_position.func_annotations = {}
def position(self):
u"""Returns (line, col) of the current position in the stream."""
line, col = self._position(self.chunkOffset)
return (line+1, col)
position.func_annotations = {}
def char(self):
u""" Read one character from the stream or queue if available. Return
EOF when EOF is reached.
"""
# Read a new chunk from the input stream if necessary
if self.chunkOffset >= self.chunkSize:
if not self.readChunk():
return EOF
chunkOffset = self.chunkOffset
char = self.chunk[chunkOffset]
self.chunkOffset = chunkOffset + 1
return char
char.func_annotations = {}
def readChunk(self, chunkSize=None):
if chunkSize is None:
chunkSize = self._defaultChunkSize
self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
self.chunk = u""
self.chunkSize = 0
self.chunkOffset = 0
data = self.dataStream.read(chunkSize)
#Deal with CR LF and surrogates broken across chunks
if self._bufferedCharacter:
data = self._bufferedCharacter + data
self._bufferedCharacter = None
elif not data:
# We have no more data, bye-bye stream
return False
if len(data) > 1:
lastv = ord(data[-1])
if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
self._bufferedCharacter = data[-1]
data = data[:-1]
self.reportCharacterErrors(data)
# Replace invalid characters
# Note U+0000 is dealt with in the tokenizer
data = self.replaceCharactersRegexp.sub(u"\ufffd", data)
data = data.replace(u"\r\n", u"\n")
data = data.replace(u"\r", u"\n")
self.chunk = data
self.chunkSize = len(data)
return True
readChunk.func_annotations = {}
def characterErrorsUCS4(self, data):
for i in xrange(len(invalid_unicode_re.findall(data))):
self.errors.append(u"invalid-codepoint")
characterErrorsUCS4.func_annotations = {}
def characterErrorsUCS2(self, data):
#Someone picked the wrong compile option
#You lose
skip = False
import sys
for match in invalid_unicode_re.finditer(data):
if skip:
continue
codepoint = ord(match.group())
pos = match.start()
#Pretty sure there should be endianness issues here
if utils.isSurrogatePair(data[pos:pos+2]):
#We have a surrogate pair!
char_val = utils.surrogatePairToCodepoint(data[pos:pos+2])
if char_val in non_bmp_invalid_codepoints:
self.errors.append(u"invalid-codepoint")
skip = True
elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
pos == len(data) - 1):
self.errors.append(u"invalid-codepoint")
else:
skip = False
self.errors.append(u"invalid-codepoint")
characterErrorsUCS2.func_annotations = {}
def charsUntil(self, characters, opposite = False):
u""" Returns a string of characters from the stream up to but not
including any character in 'characters' or EOF. 'characters' must be
a container that supports the 'in' method and iteration over its
characters.
"""
# Use a cache of regexps to find the required characters
try:
chars = charsUntilRegEx[(characters, opposite)]
except KeyError:
if __debug__:
for c in characters:
assert(ord(c) < 128)
regex = u"".join([u"\\x%02x" % ord(c) for c in characters])
if not opposite:
regex = u"^%s" % regex
chars = charsUntilRegEx[(characters, opposite)] = re.compile(u"[%s]+" % regex)
rv = []
while True:
# Find the longest matching prefix
m = chars.match(self.chunk, self.chunkOffset)
if m is None:
# If nothing matched, and it wasn't because we ran out of chunk,
# then stop
if self.chunkOffset != self.chunkSize:
break
else:
end = m.end()
# If not the whole chunk matched, return everything
# up to the part that didn't match
if end != self.chunkSize:
rv.append(self.chunk[self.chunkOffset:end])
self.chunkOffset = end
break
# If the whole remainder of the chunk matched,
# use it all and read the next chunk
rv.append(self.chunk[self.chunkOffset:])
if not self.readChunk():
# Reached EOF
break
r = u"".join(rv)
return r
charsUntil.func_annotations = {}
def unget(self, char):
# Only one character is allowed to be ungotten at once - it must
# be consumed again before any further call to unget
if char is not None:
if self.chunkOffset == 0:
# unget is called quite rarely, so it's a good idea to do
# more work here if it saves a bit of work in the frequently
# called char and charsUntil.
# So, just prepend the ungotten character onto the current
# chunk:
self.chunk = char + self.chunk
self.chunkSize += 1
else:
self.chunkOffset -= 1
assert self.chunk[self.chunkOffset] == char
unget.func_annotations = {}
class HTMLBinaryInputStream(HTMLUnicodeInputStream):
u"""Provides a unicode stream of characters to the HTMLTokenizer.
<|fim▁hole|> This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
def __init__(self, source, encoding=None, parseMeta=True, chardet=True):
u"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
parseMeta - Look for a <meta> element containing encoding information
"""
# Raw Stream - for unicode objects this will encode to utf-8 and set
# self.charEncoding as appropriate
self.rawStream = self.openStream(source)
HTMLUnicodeInputStream.__init__(self, self.rawStream)
self.charEncoding = (codecName(encoding), u"certain")
# Encoding Information
#Number of bytes to use when looking for a meta element with
#encoding information
self.numBytesMeta = 512
#Number of bytes to use when using detecting encoding using chardet
self.numBytesChardet = 100
#Encoding to use if no other information can be found
self.defaultEncoding = u"windows-1252"
#Detect encoding iff no explicit "transport level" encoding is supplied
if (self.charEncoding[0] is None):
self.charEncoding = self.detectEncoding(parseMeta, chardet)
#Call superclass
self.reset()
__init__.func_annotations = {}
def reset(self):
self.dataStream = codecs.getreader(self.charEncoding[0])(self.rawStream,
u'replace')
HTMLUnicodeInputStream.reset(self)
reset.func_annotations = {}
def openStream(self, source):
u"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, u'read'):
stream = source
else:
stream = BytesIO(source)
if (not(hasattr(stream, u"tell") and hasattr(stream, u"seek")) or
stream is sys.stdin):
stream = BufferedStream(stream)
return stream
openStream.func_annotations = {}
def detectEncoding(self, parseMeta=True, chardet=True):
#First look for a BOM
#This will also read past the BOM if present
encoding = self.detectBOM()
confidence = u"certain"
#If there is no BOM need to look for meta elements with encoding
#information
if encoding is None and parseMeta:
encoding = self.detectEncodingMeta()
confidence = u"tentative"
#Guess with chardet, if avaliable
if encoding is None and chardet:
confidence = u"tentative"
try:
from chardet.universaldetector import UniversalDetector
buffers = []
detector = UniversalDetector()
while not detector.done:
buffer = self.rawStream.read(self.numBytesChardet)
assert isinstance(buffer, str)
if not buffer:
break
buffers.append(buffer)
detector.feed(buffer)
detector.close()
encoding = detector.result[u'encoding']
self.rawStream.seek(0)
except ImportError:
pass
# If all else fails use the default encoding
if encoding is None:
confidence=u"tentative"
encoding = self.defaultEncoding
#Substitute for equivalent encodings:
encodingSub = {u"iso-8859-1":u"windows-1252"}
if encoding.lower() in encodingSub:
encoding = encodingSub[encoding.lower()]
return encoding, confidence
detectEncoding.func_annotations = {}
def changeEncoding(self, newEncoding):
assert self.charEncoding[1] != u"certain"
newEncoding = codecName(newEncoding)
if newEncoding in (u"utf-16", u"utf-16-be", u"utf-16-le"):
newEncoding = u"utf-8"
if newEncoding is None:
return
elif newEncoding == self.charEncoding[0]:
self.charEncoding = (self.charEncoding[0], u"certain")
else:
self.rawStream.seek(0)
self.reset()
self.charEncoding = (newEncoding, u"certain")
raise ReparseException(u"Encoding changed from %s to %s"%(self.charEncoding[0], newEncoding))
changeEncoding.func_annotations = {}
def detectBOM(self):
u"""Attempts to detect at BOM at the start of the stream. If
an encoding can be determined from the BOM return the name of the
encoding otherwise return None"""
bomDict = {
codecs.BOM_UTF8: u'utf-8',
codecs.BOM_UTF16_LE: u'utf-16-le', codecs.BOM_UTF16_BE: u'utf-16-be',
codecs.BOM_UTF32_LE: u'utf-32-le', codecs.BOM_UTF32_BE: u'utf-32-be'
}
# Go to beginning of file and read in 4 bytes
string = self.rawStream.read(4)
assert isinstance(string, str)
# Try detecting the BOM using bytes from the string
encoding = bomDict.get(string[:3]) # UTF-8
seek = 3
if not encoding:
# Need to detect UTF-32 before UTF-16
encoding = bomDict.get(string) # UTF-32
seek = 4
if not encoding:
encoding = bomDict.get(string[:2]) # UTF-16
seek = 2
# Set the read position past the BOM if one was found, otherwise
# set it to the start of the stream
self.rawStream.seek(encoding and seek or 0)
return encoding
detectBOM.func_annotations = {}
def detectEncodingMeta(self):
u"""Report the encoding declared by the meta element
"""
buffer = self.rawStream.read(self.numBytesMeta)
assert isinstance(buffer, str)
parser = EncodingParser(buffer)
self.rawStream.seek(0)
encoding = parser.getEncoding()
if encoding in (u"utf-16", u"utf-16-be", u"utf-16-le"):
encoding = u"utf-8"
return encoding
detectEncodingMeta.func_annotations = {}
class EncodingBytes(str):
u"""String-like object with an associated position and various extra methods
If the position is ever greater than the string length then an exception is
raised"""
def __new__(self, value):
assert isinstance(value, str)
return str.__new__(self, value.lower())
__new__.func_annotations = {}
def __init__(self, value):
self._position=-1
__init__.func_annotations = {}
def __iter__(self):
return self
__iter__.func_annotations = {}
def next(self):
p = self._position = self._position + 1
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
return self[p:p+1]
next.func_annotations = {}
def previous(self):
p = self._position
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
self._position = p = p - 1
return self[p:p+1]
previous.func_annotations = {}
def setPosition(self, position):
if self._position >= len(self):
raise StopIteration
self._position = position
setPosition.func_annotations = {}
def getPosition(self):
if self._position >= len(self):
raise StopIteration
if self._position >= 0:
return self._position
else:
return None
getPosition.func_annotations = {}
position = property(getPosition, setPosition)
def getCurrentByte(self):
return self[self.position:self.position+1]
getCurrentByte.func_annotations = {}
currentByte = property(getCurrentByte)
def skip(self, chars=spaceCharactersBytes):
u"""Skip past a list of characters"""
p = self.position # use property for the error-checking
while p < len(self):
c = self[p:p+1]
if c not in chars:
self._position = p
return c
p += 1
self._position = p
return None
skip.func_annotations = {}
def skipUntil(self, chars):
p = self.position
while p < len(self):
c = self[p:p+1]
if c in chars:
self._position = p
return c
p += 1
self._position = p
return None
skipUntil.func_annotations = {}
def matchBytes(self, str):
u"""Look for a sequence of bytes at the start of a string. If the bytes
are found return True and advance the position to the byte after the
match. Otherwise return False and leave the position alone"""
p = self.position
data = self[p:p+len(str)]
rv = data.startswith(str)
if rv:
self.position += len(str)
return rv
matchBytes.func_annotations = {}
def jumpTo(self, str):
u"""Look for the next sequence of bytes matching a given sequence. If
a match is found advance the position to the last byte of the match"""
newPosition = self[self.position:].find(str)
if newPosition > -1:
# XXX: This is ugly, but I can't see a nicer way to fix this.
if self._position == -1:
self._position = 0
self._position += (newPosition + len(str)-1)
return True
else:
raise StopIteration
jumpTo.func_annotations = {}
class EncodingParser(object):
u"""Mini parser for detecting character encoding from meta elements"""
def __init__(self, data):
u"""string - the data to work on for encoding detection"""
self.data = EncodingBytes(data)
self.encoding = None
__init__.func_annotations = {}
def getEncoding(self):
methodDispatch = (
("<!--",self.handleComment),
("<meta",self.handleMeta),
("</",self.handlePossibleEndTag),
("<!",self.handleOther),
("<?",self.handleOther),
("<",self.handlePossibleStartTag))
for byte in self.data:
keepParsing = True
for key, method in methodDispatch:
if self.data.matchBytes(key):
try:
keepParsing = method()
break
except StopIteration:
keepParsing=False
break
if not keepParsing:
break
return self.encoding
getEncoding.func_annotations = {}
def handleComment(self):
u"""Skip over comments"""
return self.data.jumpTo("-->")
handleComment.func_annotations = {}
def handleMeta(self):
if self.data.currentByte not in spaceCharactersBytes:
#if we have <meta not followed by a space so just keep going
return True
#We have a valid meta element we want to search for attributes
hasPragma = False
pendingEncoding = None
while True:
#Try to find the next attribute after the current position
attr = self.getAttribute()
if attr is None:
return True
else:
if attr[0] == "http-equiv":
hasPragma = attr[1] == "content-type"
if hasPragma and pendingEncoding is not None:
self.encoding = pendingEncoding
return False
elif attr[0] == "charset":
tentativeEncoding = attr[1]
codec = codecName(tentativeEncoding)
if codec is not None:
self.encoding = codec
return False
elif attr[0] == "content":
contentParser = ContentAttrParser(EncodingBytes(attr[1]))
tentativeEncoding = contentParser.parse()
if tentativeEncoding is not None:
codec = codecName(tentativeEncoding)
if codec is not None:
if hasPragma:
self.encoding = codec
return False
else:
pendingEncoding = codec
handleMeta.func_annotations = {}
def handlePossibleStartTag(self):
return self.handlePossibleTag(False)
handlePossibleStartTag.func_annotations = {}
def handlePossibleEndTag(self):
self.data.next()
return self.handlePossibleTag(True)
handlePossibleEndTag.func_annotations = {}
def handlePossibleTag(self, endTag):
data = self.data
if data.currentByte not in asciiLettersBytes:
#If the next byte is not an ascii letter either ignore this
#fragment (possible start tag case) or treat it according to
#handleOther
if endTag:
data.previous()
self.handleOther()
return True
c = data.skipUntil(spacesAngleBrackets)
if c == "<":
#return to the first step in the overall "two step" algorithm
#reprocessing the < byte
data.previous()
else:
#Read all attributes
attr = self.getAttribute()
while attr is not None:
attr = self.getAttribute()
return True
handlePossibleTag.func_annotations = {}
def handleOther(self):
return self.data.jumpTo(">")
handleOther.func_annotations = {}
def getAttribute(self):
u"""Return a name,value pair for the next attribute in the stream,
if one is found, or None"""
data = self.data
# Step 1 (skip chars)
c = data.skip(spaceCharactersBytes | frozenset(["/"]))
assert c is None or len(c) == 1
# Step 2
if c in (">", None):
return None
# Step 3
attrName = []
attrValue = []
#Step 4 attribute name
while True:
if c == "=" and attrName:
break
elif c in spaceCharactersBytes:
#Step 6!
c = data.skip()
break
elif c in ("/", ">"):
return "".join(attrName), ""
elif c in asciiUppercaseBytes:
attrName.append(c.lower())
elif c == None:
return None
else:
attrName.append(c)
#Step 5
c = data.next()
#Step 7
if c != "=":
data.previous()
return "".join(attrName), ""
#Step 8
data.next()
#Step 9
c = data.skip()
#Step 10
if c in ("'", '"'):
#10.1
quoteChar = c
while True:
#10.2
c = data.next()
#10.3
if c == quoteChar:
data.next()
return "".join(attrName), "".join(attrValue)
#10.4
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
#10.5
else:
attrValue.append(c)
elif c == ">":
return "".join(attrName), ""
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
# Step 11
while True:
c = data.next()
if c in spacesAngleBrackets:
return "".join(attrName), "".join(attrValue)
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
getAttribute.func_annotations = {}
class ContentAttrParser(object):
def __init__(self, data):
assert isinstance(data, str)
self.data = data
__init__.func_annotations = {}
def parse(self):
try:
#Check if the attr name is charset
#otherwise return
self.data.jumpTo("charset")
self.data.position += 1
self.data.skip()
if not self.data.currentByte == "=":
#If there is no = sign keep looking for attrs
return None
self.data.position += 1
self.data.skip()
#Look for an encoding between matching quote marks
if self.data.currentByte in ('"', "'"):
quoteMark = self.data.currentByte
self.data.position += 1
oldPosition = self.data.position
if self.data.jumpTo(quoteMark):
return self.data[oldPosition:self.data.position]
else:
return None
else:
#Unquoted value
oldPosition = self.data.position
try:
self.data.skipUntil(spaceCharactersBytes)
return self.data[oldPosition:self.data.position]
except StopIteration:
#Return the whole remaining value
return self.data[oldPosition:]
except StopIteration:
return None
parse.func_annotations = {}
def codecName(encoding):
u"""Return the python codec name corresponding to an encoding or None if the
string doesn't correspond to a valid encoding."""
if isinstance(encoding, str):
try:
encoding = encoding.decode(u"ascii")
except UnicodeDecodeError:
return None
if encoding:
canonicalName = ascii_punctuation_re.sub(u"", encoding).lower()
return encodings.get(canonicalName, None)
else:
return None
codecName.func_annotations = {}<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from views import calendarpage, jsonsearch, fcdragmodify, EventManager, event_view
event_manager = EventManager.as_view()
urlpatterns = [
url(r'^$', calendarpage, name='events'),
url(r'^json/', jsonsearch, name='jsonsearch'),
url(r'^modify/', fcdragmodify, name='fcdragmodify'),
<|fim▁hole|> url(r'^(?P<eventid>\d+)/edit/', event_manager, name='editevent'),
url(r'^recurring/(?P<eventid>\d+)/edit/(?:orig-(?P<originaleventid>\d+)/)?', event_manager, {'editingregularevent': False}, name='editrecurringevent'),
# Must be last...
url(r'^(?P<eventid>\d+)/', event_view, name='viewevent'),
]<|fim▁end|> | url(r'^new/', event_manager, name='newevent'), |
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>//import 'bootstrap';
//import 'bootstrap/css/bootstrap.css!';
export class App {<|fim▁hole|> router;
configureRouter(config, router){
config.title = 'Aurelia';
config.map([
{ route: ['','layout'], moduleId: 'views/layout', nav: true, title:'Compose' },
{ route: ['html','html-render'], moduleId: 'views/html-render', nav: true, title:'HTML Render' },
{ route: ['content-selectors'], moduleId: 'views/content-selectors', nav: true, title:'Content Selectors' },
{ route: ['modal'], moduleId: 'views/modal/index', nav: true, title:'Modal sample' },
{ route: ['sharedstate'], moduleId: 'views/sharing-state/index', nav: true, title:'Shared state' }
]);
this.router = router;
}
}<|fim▁end|> | |
<|file_name|>import3.py<|end_file_name|><|fim▁begin|>from....import a
from...import b
from..import c
from.import d
from : keyword.control.import.python, source.python
.... : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
a : source.python<|fim▁hole|>b : source.python
from : keyword.control.import.python, source.python
.. : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
c : source.python
from : keyword.control.import.python, source.python
. : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
d : source.python<|fim▁end|> | from : keyword.control.import.python, source.python
... : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python |
<|file_name|>temperature_server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from flask import Flask, request, render_template
import os
import json
import time
import datetime
from smarthomemongo import SmartHomeDB
app = Flask(__name__)
smartDB = SmartHomeDB()
@app.route('/')
def index():
records = smartDB.getCurrentStats('raspberry')
if( 'timestamp' in records.keys() ):
ts = datetime.datetime.fromtimestamp(records['timestamp']).strftime('%Y-%m-%d %H:%M:%S')
records['timestamp_string'] = ts
return render_template('index.html',records=records)
@app.route('/add', methods=['POST'])
def add():
recordJson = request.get_json()
smartDB.insertTemperaturePoint(recordJson)
return 'Success', 200
@app.route('/update_stats', methods=['POST'])
def update_stats():
recordJson = request.get_json()
smartDB.updateCurrentStats(recordJson)
return 'Success', 200
@app.route('/get_current_stats',methods=['GET'])
def get_current_stats():
record = smartDB.getCurrentStats('raspberry')
return json.dumps(record)
<|fim▁hole|>
@app.route('/data.csv')
def get_data_csv():
records = smartDB.getTemperaturePoints()
return json.dumps(records)
@app.route('/upload_test1', methods=['POST'])
def upload_test():
recordJson = request.get_json()
smartDB.upload_collection(recordJson)
return 'Success', 200
if __name__ == '__main__':
app.run(host='0.0.0.0',debug=True)<|fim▁end|> | @app.route('/line_graph')
def get_line_graph():
return render_template('graph.html') |
<|file_name|>attachments.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core';
import { environment } from 'src/environments/environment';
import { ImageService } from 'src/app/maincontent/image';
@Component({
selector: 'app-attachments',
templateUrl: 'attachments.component.html',
styleUrls: ['attachments.component.css']
})
export class AttachmentsComponent {
@Input() attachments!: any[];
/**
* eg: media/news/3/thumbnails
*/
@Input() recorduri: string ='';
constructor(private imageService: ImageService) {
}
public getDefaultImage(picture: any): string {
return this.imageService.getDefaultThumbnail(environment.server, this.recorduri, picture);
}
/**<|fim▁hole|> return this.imageService.getThumbnailSrcSet(environment.server, this.recorduri, picture);
}
}<|fim▁end|> | * https://css-tricks.com/responsive-images-youre-just-changing-resolutions-use-srcset/
*/
public getThumbnailSrcSet(picture: any): string { |
<|file_name|>nodes.js<|end_file_name|><|fim▁begin|>function f() {
console.log(1);
console.log(2);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>issue-182.ts<|end_file_name|><|fim▁begin|>import "reflect-metadata";
import {createTestingConnections, closeTestingConnections, reloadTestingDatabases} from "../../utils/test-utils";
import {Connection} from "../../../src/connection/Connection";
import {Post} from "./entity/Post";
import {expect} from "chai";
import {PostStatus} from "./model/PostStatus";
describe("github issues > #182 enums are not saved properly", () => {
let connections: Connection[];
before(async () => connections = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
schemaCreate: true,
dropSchemaOnConnection: true,
enabledDrivers: ["mysql"] // we can properly test lazy-relations only on one platform
}));<|fim▁hole|> after(() => closeTestingConnections(connections));
it("should persist successfully with enum values", () => Promise.all(connections.map(async connection => {
const post1 = new Post();
post1.status = PostStatus.NEW;
post1.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post1);
const loadedPosts1 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts1!).not.to.be.empty;
loadedPosts1!.should.be.eql({
id: 1,
title: "Hello Post #1",
status: PostStatus.NEW
});
// remove persisted
await connection.entityManager.remove(post1);
const post2 = new Post();
post2.status = PostStatus.ACTIVE;
post2.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post2);
const loadedPosts2 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts2!).not.to.be.empty;
loadedPosts2!.should.be.eql({
id: 2,
title: "Hello Post #1",
status: PostStatus.ACTIVE
});
// remove persisted
await connection.entityManager.remove(post2);
const post3 = new Post();
post3.status = PostStatus.ACHIEVED;
post3.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post3);
const loadedPosts3 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts3!).not.to.be.empty;
loadedPosts3!.should.be.eql({
id: 3,
title: "Hello Post #1",
status: PostStatus.ACHIEVED
});
// remove persisted
await connection.entityManager.remove(post3);
})));
});<|fim▁end|> | beforeEach(() => reloadTestingDatabases(connections)); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Encoding of portable pixmap Images
pub use self::encoder::PPMEncoder as PPMEncoder;
pub use self::decoder::PPMDecoder as PPMDecoder;
mod encoder;
mod decoder;<|fim▁hole|>mod test {
use color::ColorType;
use image::{ImageDecoder, DecodingResult};
#[test]
fn test_roundtrip_ppm() {
// 3x3 image that tries all the 0/255 RGB combinations
let buf: [u8; 27] = [
0, 0, 0,
0, 0, 255,
0, 255, 0,
0, 255, 255,
255, 0, 0,
255, 0, 255,
255, 255, 0,
255, 255, 255,
255, 255, 255,
];
let mut stream = Vec::<u8>::new();
{
let mut encoder = super::PPMEncoder::new(&mut stream);
match encoder.encode(&buf, 3, 3, ColorType::RGB(8)) {
Ok(_) => {},
Err(_) => panic!("PPM encoder failed"),
};
}
let mut decoder = match super::PPMDecoder::new(&stream[..]) {
Ok(img) => img,
Err(e) => panic!("PPM decoder failed with {}", e),
};
match decoder.read_image() {
Ok(DecodingResult::U8(vec)) => {
assert_eq!(&buf[..], &vec[..]);
},
r => {
panic!("PPM: Got a strange image result {:?}", r);
}
}
}
#[test]
fn test_roundtrip_ppm_16bit() {
// 3x3 image that tries all the 0/65535 RGB combinations plus a few more values
// that check for big-endian conversion correctness
let buf: [u16; 27] = [
0, 0, 0,
0, 0, 65535,
0, 65535, 0,
0, 65535, 65535,
65535, 0, 0,
65535, 0, 65535,
65535, 65535, 0,
65535, 65535, 65535,
1000, 2000, 3000,
];
let mut bytebuf = [0 as u8; 54];
for (o, i) in bytebuf.chunks_mut(2).zip(buf.iter()) {
o[0] = (i >> 8) as u8;
o[1] = (i & 0xff) as u8;
}
let mut stream = Vec::<u8>::new();
{
let mut encoder = super::PPMEncoder::new(&mut stream);
match encoder.encode(&bytebuf, 3, 3, ColorType::RGB(16)) {
Ok(_) => {},
Err(_) => panic!("PPM encoder failed"),
};
}
let mut decoder = match super::PPMDecoder::new(&stream[..]) {
Ok(img) => img,
Err(e) => panic!("PPM decoder failed with {}", e),
};
match decoder.read_image() {
Ok(DecodingResult::U16(vec)) => {
assert_eq!(&buf[..], &vec[..]);
},
r => {
panic!("PPM: Got a strange image result {:?}", r);
}
}
}
}<|fim▁end|> |
#[cfg(test)] |
<|file_name|>top.hh<|end_file_name|><|fim▁begin|>#ifndef _SDD_DD_TOP_HH_
#define _SDD_DD_TOP_HH_
#include <exception>
#include <memory> // make_shared, shared_ptr
#include <sstream>
#include <string>
#include <vector>
#include "sdd/dd/definition_fwd.hh"
namespace sdd {
/*------------------------------------------------------------------------------------------------*/
/// @internal
/// @brief A base class to wrap operations of different type.
struct operation_wrapper_base
{
virtual
~operation_wrapper_base()
{}
virtual std::string print() const noexcept = 0;
};
/// @internal
/// @brief A new type for each different operation, but which inherits from
/// operation_wrapper_base.
///
/// It it thus possible to have a list containing different operations by having a pointer
/// to the base class operation_wrapper_base.
template <typename Operation>
struct operation_wrapper
: public operation_wrapper_base
{
const Operation operation_;
/// @brief Constructor.
///
/// Operations are non-copyable, but movable.
operation_wrapper(Operation&& op)
: operation_(std::move(op))
{}
/// @brief Return a textual description of the contained operation.
std::string
print()
const noexcept
{
std::stringstream ss;
ss << operation_;
return ss.str();
}
};
/*------------------------------------------------------------------------------------------------*/
/// @exception top
/// @brief The top terminal.
///
/// The top terminal is represented with an exception thrown when encoutering incompatible SDD.
template <typename C>
class top final
: public std::exception
{
private:
/// @brief The left incompatible operand.
const SDD<C> lhs_;
/// @brief The right incompatible operand.
const SDD<C> rhs_;
/// @brief The sequence, in reverse order, of operations that led to the error.
std::vector<std::shared_ptr<operation_wrapper_base>> steps_;
/// @brief Textual description of the error.
mutable std::string description_;
public:
/// @internal
top(const SDD<C>& lhs, const SDD<C>& rhs)
: lhs_(lhs), rhs_(rhs), steps_(), description_()
{}
~top()
noexcept
{}
/// @brief Return the textual description of the error.
///
/// All operations that led to the error are printed.
const char*
what()
const noexcept
{
return description().c_str();
}
/// @brief Get the left incompatible operand.
///
/// Note that 'left' and 'right' are arbitrary.
SDD<C>
lhs()
const noexcept
{
return lhs_;
}
/// @brief Get the right incompatible operand.
///
/// Note that 'left' and 'right' are arbitrary.
SDD<C>
rhs()
const noexcept
{
return rhs_;
}
/// @internal
/// @brief Add an operation to the sequence of operations that lead to incompatible SDD.
///
/// Called by mem::cache.
template <typename Operation>
void
add_step(Operation&& op)
{
steps_.emplace_back(std::make_shared<operation_wrapper<Operation>>(std::move(op)));
}
/// @internal
/// @brief Return a textual description.
std::string&
description()
const noexcept
{
if (description_.empty())
{
std::stringstream ss;
ss << "Incompatible SDD: " << lhs_ << " and " << rhs_ << "."
<< std::endl
<< "The following operations led to this error (first to last): "
<< std::endl;
std::size_t i = 1;
for (auto rcit = steps_.crbegin(); rcit != steps_.crend(); ++rcit, ++i)
{
ss << i << " : " << (*rcit)->print() << std::endl;
}
description_ = ss.str();
}<|fim▁hole|> return description_;
}
};
/*------------------------------------------------------------------------------------------------*/
} // namespace sdd
#endif // _SDD_DD_TOP_HH_<|fim▁end|> | |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common::Config;
#[cfg(target_os = "windows")]
use std::env;
/// Conversion table from triple OS name to Rust SYSNAME
static OS_TABLE: &'static [(&'static str, &'static str)] = &[
("mingw32", "windows"),
("win32", "windows"),
("windows", "windows"),
("darwin", "macos"),
("android", "android"),
("linux", "linux"),
("freebsd", "freebsd"),
("dragonfly", "dragonfly"),
("openbsd", "openbsd"),
];
pub fn get_os(triple: &str) -> &'static str {
for &(triple_os, os) in OS_TABLE {
if triple.contains(triple_os) {
return os
}
}<|fim▁hole|>}
#[cfg(target_os = "windows")]
pub fn make_new_path(path: &str) -> String {
// Windows just uses PATH as the library search path, so we have to
// maintain the current value while adding our own
match env::var(lib_path_env_var()) {
Ok(curr) => {
format!("{}{}{}", path, path_div(), curr)
}
Err(..) => path.to_string()
}
}
#[cfg(target_os = "windows")]
pub fn lib_path_env_var() -> &'static str { "PATH" }
#[cfg(target_os = "windows")]
pub fn path_div() -> &'static str { ";" }
pub fn logv(config: &Config, s: String) {
debug!("{}", s);
if config.verbose { println!("{}", s); }
}<|fim▁end|> | panic!("Cannot determine OS from triple"); |
<|file_name|>insert3_neg.cc<|end_file_name|><|fim▁begin|>// Copyright (C) 2010-2013 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
//
// { dg-require-debug-mode "" }
// { dg-do run { xfail *-*-* } }
#include <map><|fim▁hole|>void test01()
{
__gnu_test::check_insert3<std::map<int, int> >();
}
int main()
{
test01();
return 0;
}<|fim▁end|> | #include <debug/checks.h>
|
<|file_name|>Use0004.rs<|end_file_name|><|fim▁begin|>use a::b::{c, d, e};
use foo::bar::{self, moo, goo};
use foo::bar::{moo, self, goo};
use a::b::{;
use a::b::};
use a::b::{};
use a::{super};
use a::{*};
<|fim▁hole|><|fim▁end|> | use a::{b}::c; |
<|file_name|>BackHomeButton.test.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { mount } from 'enzyme';
import { Link, HashRouter as Router } from 'react-router-dom';
import { Button } from 'react-bootstrap';
import BackHomeButton from '../BackHomeButton';
describe('<BackHomeButton/>', () => {
it('<BackHomeButton /> should render <Router/>', () => {
const component = mount(<BackHomeButton />);
expect(component.find(Router)).toHaveLength(1);
});
it('<BackHomeButton /> should render <Link/> with correct props', () => {
const component = mount(<BackHomeButton />);
expect(component.find(Router)).toHaveLength(1);
expect(component.find(Link)).toHaveLength(1);
const props = component.find(Link).props();
expect(props).toEqual({ to: '/', children: expect.anything() });
expect(component.find(Link)).toHaveLength(1);
});
it('<BackHomeButton /> should render <Button/> with correct props', () => {
const component = mount(<BackHomeButton />);
expect(component.find(Router)).toHaveLength(1);
expect(component.find(Link)).toHaveLength(1);
expect(component.find(Button)).toHaveLength(1);
const ButtonProps = component.find(Button).props();
expect(ButtonProps).toEqual({<|fim▁hole|> variant: 'primary',
children: 'Back Home',
active: false,
disabled: false,
type: 'button',
});
});
});<|fim▁end|> | |
<|file_name|>lint-stability.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:lint_stability.rs
// aux-build:inherited_stability.rs
#![feature(globs)]
#![deny(unstable)]
#![deny(deprecated)]
#![deny(experimental)]
#![allow(dead_code)]
mod cross_crate {
extern crate lint_stability;
use self::lint_stability::*;
fn test() {
let foo = MethodTester;
deprecated(); //~ ERROR use of deprecated item
foo.method_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated(); //~ ERROR use of deprecated item
deprecated_text(); //~ ERROR use of deprecated item: text
foo.method_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
experimental(); //~ ERROR use of experimental item
foo.method_experimental(); //~ ERROR use of experimental item
foo.trait_experimental(); //~ ERROR use of experimental item
experimental_text(); //~ ERROR use of experimental item: text
foo.method_experimental_text(); //~ ERROR use of experimental item: text
foo.trait_experimental_text(); //~ ERROR use of experimental item: text
unstable(); //~ ERROR use of unstable item
foo.method_unstable(); //~ ERROR use of unstable item
foo.trait_unstable(); //~ ERROR use of unstable item
unstable_text(); //~ ERROR use of unstable item: text
foo.method_unstable_text(); //~ ERROR use of unstable item: text
foo.trait_unstable_text(); //~ ERROR use of unstable item: text
unmarked(); //~ ERROR use of unmarked item
foo.method_unmarked(); //~ ERROR use of unmarked item
foo.trait_unmarked(); //~ ERROR use of unmarked item
stable();
foo.method_stable();
foo.trait_stable();
stable_text();
foo.method_stable_text();
foo.trait_stable_text();
frozen();
foo.method_frozen();
foo.trait_frozen();
frozen_text();
foo.method_frozen_text();
foo.trait_frozen_text();
locked();
foo.method_locked();
foo.trait_locked();
locked_text();
foo.method_locked_text();
foo.trait_locked_text();
let _ = DeprecatedStruct { i: 0 }; //~ ERROR use of deprecated item
let _ = ExperimentalStruct { i: 0 }; //~ ERROR use of experimental item
let _ = UnstableStruct { i: 0 }; //~ ERROR use of unstable item
let _ = UnmarkedStruct { i: 0 }; //~ ERROR use of unmarked item
let _ = StableStruct { i: 0 };
let _ = FrozenStruct { i: 0 };
let _ = LockedStruct { i: 0 };
let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item
let _ = ExperimentalUnitStruct; //~ ERROR use of experimental item
let _ = UnstableUnitStruct; //~ ERROR use of unstable item
let _ = UnmarkedUnitStruct; //~ ERROR use of unmarked item
let _ = StableUnitStruct;
let _ = FrozenUnitStruct;
let _ = LockedUnitStruct;
let _ = DeprecatedVariant; //~ ERROR use of deprecated item
let _ = ExperimentalVariant; //~ ERROR use of experimental item
let _ = UnstableVariant; //~ ERROR use of unstable item
let _ = UnmarkedVariant; //~ ERROR use of unmarked item
let _ = StableVariant;
let _ = FrozenVariant;
let _ = LockedVariant;
let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item
let _ = ExperimentalTupleStruct (1); //~ ERROR use of experimental item
let _ = UnstableTupleStruct (1); //~ ERROR use of unstable item
let _ = UnmarkedTupleStruct (1); //~ ERROR use of unmarked item
let _ = StableTupleStruct (1);
let _ = FrozenTupleStruct (1);
let _ = LockedTupleStruct (1);
}
fn test_method_param<F: Trait>(foo: F) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_experimental(); //~ ERROR use of experimental item
foo.trait_experimental_text(); //~ ERROR use of experimental item: text
foo.trait_unstable(); //~ ERROR use of unstable item
foo.trait_unstable_text(); //~ ERROR use of unstable item: text
foo.trait_unmarked(); //~ ERROR use of unmarked item
foo.trait_stable();
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_experimental(); //~ ERROR use of experimental item
foo.trait_experimental_text(); //~ ERROR use of experimental item: text
foo.trait_unstable(); //~ ERROR use of unstable item
foo.trait_unstable_text(); //~ ERROR use of unstable item: text
foo.trait_unmarked(); //~ ERROR use of unmarked item
foo.trait_stable();
}
}
mod inheritance {
extern crate inherited_stability;
use self::inherited_stability::*;
fn test_inheritance() {
experimental(); //~ ERROR use of experimental item
stable();
stable_mod::experimental(); //~ ERROR use of experimental item
stable_mod::stable();
experimental_mod::experimental(); //~ ERROR use of experimental item
experimental_mod::stable();
let _ = ExperimentalVariant; //~ ERROR use of experimental item
let _ = StableVariant;
let x: uint = 0;
x.experimental(); //~ ERROR use of experimental item
x.stable();
}
}
mod this_crate {
#[deprecated]
pub fn deprecated() {}
#[deprecated="text"]
pub fn deprecated_text() {}
#[experimental]
pub fn experimental() {}
#[experimental="text"]
pub fn experimental_text() {}
#[unstable]
pub fn unstable() {}
#[unstable="text"]
pub fn unstable_text() {}
pub fn unmarked() {}
#[stable]
pub fn stable() {}
#[stable="text"]
pub fn stable_text() {}
#[locked]
pub fn locked() {}
#[locked="text"]
pub fn locked_text() {}
#[frozen]
pub fn frozen() {}
#[frozen="text"]
pub fn frozen_text() {}
#[stable]
pub struct MethodTester;
impl MethodTester {
#[deprecated]
pub fn method_deprecated(&self) {}
#[deprecated="text"]
pub fn method_deprecated_text(&self) {}
#[experimental]
pub fn method_experimental(&self) {}
#[experimental="text"]
pub fn method_experimental_text(&self) {}
#[unstable]
pub fn method_unstable(&self) {}
#[unstable="text"]
pub fn method_unstable_text(&self) {}
pub fn method_unmarked(&self) {}
#[stable]
pub fn method_stable(&self) {}
#[stable="text"]
pub fn method_stable_text(&self) {}
#[locked]
pub fn method_locked(&self) {}
#[locked="text"]
pub fn method_locked_text(&self) {}
#[frozen]
pub fn method_frozen(&self) {}
#[frozen="text"]
pub fn method_frozen_text(&self) {}
}
pub trait Trait {
#[deprecated]
fn trait_deprecated(&self) {}
#[deprecated="text"]
fn trait_deprecated_text(&self) {}
#[experimental]
fn trait_experimental(&self) {}
#[experimental="text"]
fn trait_experimental_text(&self) {}
#[unstable]
fn trait_unstable(&self) {}
#[unstable="text"]
fn trait_unstable_text(&self) {}
fn trait_unmarked(&self) {}
#[stable]
fn trait_stable(&self) {}
#[stable="text"]
fn trait_stable_text(&self) {}
#[locked]
fn trait_locked(&self) {}
#[locked="text"]
fn trait_locked_text(&self) {}
#[frozen]
fn trait_frozen(&self) {}
#[frozen="text"]
fn trait_frozen_text(&self) {}
}
impl Trait for MethodTester {}
#[deprecated]
pub struct DeprecatedStruct { i: int }
#[experimental]
pub struct ExperimentalStruct { i: int }
#[unstable]
pub struct UnstableStruct { i: int }
pub struct UnmarkedStruct { i: int }
#[stable]
pub struct StableStruct { i: int }
#[frozen]
pub struct FrozenStruct { i: int }
#[locked]
pub struct LockedStruct { i: int }
#[deprecated]
pub struct DeprecatedUnitStruct;
#[experimental]
pub struct ExperimentalUnitStruct;
#[unstable]
pub struct UnstableUnitStruct;
pub struct UnmarkedUnitStruct;
#[stable]
pub struct StableUnitStruct;
#[frozen]
pub struct FrozenUnitStruct;
#[locked]
pub struct LockedUnitStruct;
pub enum Enum {
#[deprecated]
DeprecatedVariant,
#[experimental]
ExperimentalVariant,
#[unstable]
UnstableVariant,
UnmarkedVariant,
#[stable]
StableVariant,
#[frozen]
FrozenVariant,
#[locked]
LockedVariant,
}
#[deprecated]
pub struct DeprecatedTupleStruct(int);
#[experimental]
pub struct ExperimentalTupleStruct(int);
#[unstable]
pub struct UnstableTupleStruct(int);
pub struct UnmarkedTupleStruct(int);
#[stable]
pub struct StableTupleStruct(int);
#[frozen]
pub struct FrozenTupleStruct(int);
#[locked]
pub struct LockedTupleStruct(int);
fn test() {
// None of the following should generate errors, because
// stability attributes now have meaning only *across* crates,
// not within a single crate.
let foo = MethodTester;
deprecated();
foo.method_deprecated();
foo.trait_deprecated();
deprecated_text();
foo.method_deprecated_text();
foo.trait_deprecated_text();
experimental();
foo.method_experimental();
foo.trait_experimental();
experimental_text();
foo.method_experimental_text();
foo.trait_experimental_text();
unstable();
foo.method_unstable();
foo.trait_unstable();
unstable_text();
foo.method_unstable_text();
foo.trait_unstable_text();
unmarked();
foo.method_unmarked();
foo.trait_unmarked();
stable();
foo.method_stable();
foo.trait_stable();
stable_text();
foo.method_stable_text();
foo.trait_stable_text();
frozen();
foo.method_frozen();
foo.trait_frozen();
frozen_text();
foo.method_frozen_text();
foo.trait_frozen_text();
locked();
foo.method_locked();
foo.trait_locked();
locked_text();
foo.method_locked_text();
foo.trait_locked_text();
let _ = DeprecatedStruct { i: 0 };
let _ = ExperimentalStruct { i: 0 };
let _ = UnstableStruct { i: 0 };
let _ = UnmarkedStruct { i: 0 };
let _ = StableStruct { i: 0 };
let _ = FrozenStruct { i: 0 };
let _ = LockedStruct { i: 0 };
let _ = DeprecatedUnitStruct;
let _ = ExperimentalUnitStruct;
let _ = UnstableUnitStruct;
let _ = UnmarkedUnitStruct;
let _ = StableUnitStruct;
let _ = FrozenUnitStruct;
let _ = LockedUnitStruct;
let _ = DeprecatedVariant;
let _ = ExperimentalVariant;
let _ = UnstableVariant;
let _ = UnmarkedVariant;
let _ = StableVariant;
let _ = FrozenVariant;
let _ = LockedVariant;
let _ = DeprecatedTupleStruct (1);
let _ = ExperimentalTupleStruct (1);
let _ = UnstableTupleStruct (1);
let _ = UnmarkedTupleStruct (1);
let _ = StableTupleStruct (1);
let _ = FrozenTupleStruct (1);
let _ = LockedTupleStruct (1);
}
fn test_method_param<F: Trait>(foo: F) {
foo.trait_deprecated();
foo.trait_deprecated_text();
foo.trait_experimental();<|fim▁hole|> foo.trait_unstable();
foo.trait_unstable_text();
foo.trait_unmarked();
foo.trait_stable();
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated();
foo.trait_deprecated_text();
foo.trait_experimental();
foo.trait_experimental_text();
foo.trait_unstable();
foo.trait_unstable_text();
foo.trait_unmarked();
foo.trait_stable();
}
}
fn main() {}<|fim▁end|> | foo.trait_experimental_text(); |
<|file_name|>number.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2011, The Dojo Foundation All Rights Reserved.
<|fim▁hole|>//>>built
define("dojo/cldr/nls/nl/number",{"group":".","percentSign":"%","exponential":"E","scientificFormat":"#E0","percentFormat":"#,##0%","list":";","infinity":"∞","minusSign":"-","decimal":",","nan":"NaN","perMille":"‰","decimalFormat":"#,##0.###","currencyFormat":"¤ #,##0.00;¤ #,##0.00-","plusSign":"+","decimalFormat-long":"000 biljoen","decimalFormat-short":"000 bln'.'"});<|fim▁end|> | Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
|
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Serf(SConsPackage):
"""Apache Serf - a high performance C-based HTTP client library
built upon the Apache Portable Runtime (APR) library"""
homepage = 'https://serf.apache.org/'
url = 'https://archive.apache.org/dist/serf/serf-1.3.9.tar.bz2'
maintainers = ['cosmicexplorer']
version('1.3.9', sha256='549c2d21c577a8a9c0450facb5cca809f26591f048e466552240947bdf7a87cc')
version('1.3.8', sha256='e0500be065dbbce490449837bb2ab624e46d64fc0b090474d9acaa87c82b2590')
variant('debug', default=False,
description='Enable debugging info and strict compile warnings')
depends_on('apr')
depends_on('apr-util')
depends_on('openssl')
depends_on('python+pythoncmd', type='build')
depends_on('[email protected]:', type='build')
depends_on('uuid')
depends_on('zlib')
patch('py3syntax.patch')
patch('py3-hashbang.patch')
def build_args(self, spec, prefix):
args = {
'PREFIX': prefix,
'APR': spec['apr'].prefix,
'APU': spec['apr-util'].prefix,
'OPENSSL': spec['openssl'].prefix,
'ZLIB': spec['zlib'].prefix,
'DEBUG': 'yes' if '+debug' in spec else 'no',
}
# SCons doesn't pass Spack environment variables to the
# execution environment. Therefore, we can't use Spack's compiler
# wrappers. Use the actual compilers. SCons seems to RPATH things<|fim▁hole|>
# Old versions of serf ignore the ZLIB variable on non-Windows platforms.
# Also, there is no UUID variable to specify its installation location.
# Pass explicit link flags for both.
library_dirs = []
include_dirs = []
for dep in spec.dependencies(deptype='link'):
query = self.spec[dep.name]
library_dirs.extend(query.libs.directories)
include_dirs.extend(query.headers.directories)
rpath = self.compiler.cc_rpath_arg
args['LINKFLAGS'] = '-L' + ' -L'.join(library_dirs)
args['LINKFLAGS'] += ' ' + rpath + (' ' + rpath).join(library_dirs)
args['CPPFLAGS'] = '-I' + ' -I'.join(include_dirs)
return [key + '=' + value for key, value in args.items()]
def build_test(self):
# FIXME: Several test failures:
#
# There were 14 failures:
# 1) test_ssl_trust_rootca
# 2) test_ssl_certificate_chain_with_anchor
# 3) test_ssl_certificate_chain_all_from_server
# 4) test_ssl_no_servercert_callback_allok
# 5) test_ssl_large_response
# 6) test_ssl_large_request
# 7) test_ssl_client_certificate
# 8) test_ssl_future_server_cert
# 9) test_setup_ssltunnel
# 10) test_ssltunnel_basic_auth
# 11) test_ssltunnel_basic_auth_server_has_keepalive_off
# 12) test_ssltunnel_basic_auth_proxy_has_keepalive_off
# 13) test_ssltunnel_basic_auth_proxy_close_conn_on_200resp
# 14) test_ssltunnel_digest_auth
#
# These seem to be related to:
# https://groups.google.com/forum/#!topic/serf-dev/YEFTTdF1Qwc
scons('check')<|fim▁end|> | # on its own anyway.
args['CC'] = self.compiler.cc |
<|file_name|>VisibleSpecifiedColumnElementForStg.py<|end_file_name|><|fim▁begin|>from Elements.STG.Base.IElementForStg import IElementForStg
class VisibleSpecifiedColumnElementForStg(IElementForStg):
def __init__(self, templates, settingsObject):
self.typeName = "Specified"<|fim▁hole|> super(VisibleSpecifiedColumnElementForStg, self).__init__(templates, settingsObject)
def getType(self):
return self.typeName
def getTemplateName(self):
return self.templateName<|fim▁end|> | self.templateName = "SpecifiedVisibledColumn"
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
The OpenMP package does contain all code templates required for the openMP
code generation in ANNarchy.
BaseTemplates:
defines the basic defintions common to all sparse matrix formates, e. g. projection header
[FORMAT]_SingleThread:
defines the format specific defintions for the currently available formats:<|fim▁hole|> * CSR: compressed sparse row
* ELL: ELLPACK/ITPACK
* ELL-R: ELLPACK format with row-length array
* Dense: a full matrix representation
there are some special purpose implementations:
* CSR_T: compressed sparse row (transposed)
* LIL_P: a partitioned LIL representation
"""
from . import LIL as LIL_OpenMP
from . import LIL_P as LIL_Sliced_OpenMP
from . import COO as COO_OpenMP
from . import CSR as CSR_OpenMP
from . import CSR_T as CSR_T_OpenMP
from . import CSR_T_P as CSR_T_Sliced_OpenMP
from . import ELL as ELL_OpenMP
from . import ELLR as ELLR_OpenMP
from . import Dense as Dense_OpenMP
__all__ = ["BaseTemplates", "LIL_OpenMP", "LIL_Sliced_OpenMP", "COO_OpenMP", "CSR_OpenMP", "CSR_T_OpenMP", "CSR_T_Sliced_OpenMP", "ELL_OpenMP", "ELLR_OpenMP", "Dense_OpenMP"]<|fim▁end|> |
* LIL: list-in-list
* COO: coordinate |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.