prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>preprocess_data.py<|end_file_name|><|fim▁begin|># Adapted from: https://github.com/sussexwearlab/DeepConvLSTM
__author__ = 'fjordonez, gchevalier'
from signal_filtering import filter_opportunity_datasets_accelerometers
import os
import zipfile
import argparse
import numpy as np
import cPickle as cp
from io import BytesIO
from pandas import Series
# Hardcoded number of sensor channels employed in the OPPORTUNITY challenge
NB_SENSOR_CHANNELS = 113
NB_SENSOR_CHANNELS_WITH_FILTERING = 149 # =77 gyros +36*2 accelerometer channels
# Hardcoded names of the files defining the OPPORTUNITY challenge data. As named in the original data.
OPPORTUNITY_DATA_FILES_TRAIN = [
'OpportunityUCIDataset/dataset/S1-Drill.dat',
'OpportunityUCIDataset/dataset/S1-ADL1.dat',
'OpportunityUCIDataset/dataset/S1-ADL2.dat',
'OpportunityUCIDataset/dataset/S1-ADL3.dat',
'OpportunityUCIDataset/dataset/S1-ADL4.dat',
'OpportunityUCIDataset/dataset/S1-ADL5.dat',
'OpportunityUCIDataset/dataset/S2-Drill.dat',
'OpportunityUCIDataset/dataset/S2-ADL1.dat',
'OpportunityUCIDataset/dataset/S2-ADL2.dat',
'OpportunityUCIDataset/dataset/S2-ADL3.dat',
'OpportunityUCIDataset/dataset/S3-Drill.dat',
'OpportunityUCIDataset/dataset/S3-ADL1.dat',
'OpportunityUCIDataset/dataset/S3-ADL2.dat',
'OpportunityUCIDataset/dataset/S3-ADL3.dat'
]
OPPORTUNITY_DATA_FILES_TEST = [
'OpportunityUCIDataset/dataset/S2-ADL4.dat',
'OpportunityUCIDataset/dataset/S2-ADL5.dat',
'OpportunityUCIDataset/dataset/S3-ADL4.dat',
'OpportunityUCIDataset/dataset/S3-ADL5.dat'
]
def select_columns_opp(data):
"""Selection of the 113 columns employed in the OPPORTUNITY challenge
:param data: numpy integer matrix
Sensor data (all features)
:return: tuple((numpy integer 2D matrix, numpy integer 1D matrix))
(Selection of features (N, f), feature_is_accelerometer (f,) one-hot)
"""
# In term of column_names.txt's ranges: excluded-included (here 0-indexed)
features_delete = np.arange(46, 50)
features_delete = np.concatenate([features_delete, np.arange(59, 63)])
features_delete = np.concatenate([features_delete, np.arange(72, 76)])
features_delete = np.concatenate([features_delete, np.arange(85, 89)])
features_delete = np.concatenate([features_delete, np.arange(98, 102)])
features_delete = np.concatenate([features_delete, np.arange(134, 243)])
features_delete = np.concatenate([features_delete, np.arange(244, 249)])
# In term of column_names.txt's ranges: excluded-included
features_delete = np.arange(46, 50)
features_delete = np.concatenate([features_delete, np.arange(59, 63)])
features_delete = np.concatenate([features_delete, np.arange(72, 76)])
features_delete = np.concatenate([features_delete, np.arange(85, 89)])
features_delete = np.concatenate([features_delete, np.arange(98, 102)])
features_delete = np.concatenate([features_delete, np.arange(134, 243)])
features_delete = np.concatenate([features_delete, np.arange(244, 249)])
# In term of column_names.txt's ranges: excluded-included
features_acc = np.arange(1, 37)
features_acc = np.concatenate([features_acc, np.arange(134, 194)])
features_acc = np.concatenate([features_acc, np.arange(207, 231)])
# One-hot for everything that is an accelerometer
is_accelerometer = np.zeros([243])
is_accelerometer[features_acc] = 1
# Deleting some signals to keep only the 113 of the challenge
data = np.delete(data, features_delete, 1)
is_accelerometer = np.delete(is_accelerometer, features_delete, 0)
# Shape `(N, f), (f, )`
# where N is number of timesteps and f is 113 features, one-hot
return data, is_accelerometer
def normalize(x):
"""Normalizes all sensor channels by mean substraction,
dividing by the standard deviation and by 2.
:param x: numpy integer matrix
Sensor data
:return:
Normalized sensor data
"""
x = np.array(x, dtype=np.float32)
m = np.mean(x, axis=0)
x -= m
std = np.std(x, axis=0)
std += 0.000001
x /= (std * 2) # 2 is for having smaller values
return x
def split_data_into_time_gyros_accelerometers(data, is_accelerometer):
# Assuming index 0 of features is reserved for time.
# Splitting data into gyros, accelerometers and time:
is_accelerometer = np.array(is_accelerometer*2-1, dtype=np.int32)
# is_accelerometer's zeros have been replaced by -1. 1's are untouched.
plane = np.arange(len(is_accelerometer)) * is_accelerometer
delete_gyros = [-e for e in plane if e <= 0]
delete_accms = [ e for e in plane if e >= 0]
time = data[:,0]
gyros = np.delete(data, delete_accms, 1)
accms = np.delete(data, delete_gyros, 1)
return time, gyros, accms
def divide_x_y(data, label, filter_accelerometers):
"""Segments each sample into (time+features) and (label)
:param data: numpy integer matrix
Sensor data
:param label: string, ['gestures' (default), 'locomotion']
Type of activities to be recognized
:return: numpy integer matrix, numpy integer array
Features encapsulated into a matrix and labels as an array
"""
if filter_accelerometers:
data_x = data[:, :114]
else:
data_x = data[:,1:114]
# Choose labels type for y
if label not in ['locomotion', 'gestures']:
raise RuntimeError("Invalid label: '%s'" % label)
if label == 'locomotion':
data_y = data[:, 114] # Locomotion label
elif label == 'gestures':
data_y = data[:, 115] # Gestures label
return data_x, data_y
def adjust_idx_labels(data_y, label):
"""Transforms original labels into the range [0, nb_labels-1]
:param data_y: numpy integer array
Sensor labels
:param label: string, ['gestures' (default), 'locomotion']
Type of activities to be recognized
:return: numpy integer array
Modified sensor labels
"""
if label == 'locomotion': # Labels for locomotion are adjusted
data_y[data_y == 4] = 3
data_y[data_y == 5] = 4
elif label == 'gestures': # Labels for gestures are adjusted
data_y[data_y == 406516] = 1
data_y[data_y == 406517] = 2
data_y[data_y == 404516] = 3
data_y[data_y == 404517] = 4
data_y[data_y == 406520] = 5
data_y[data_y == 404520] = 6
data_y[data_y == 406505] = 7
data_y[data_y == 404505] = 8
data_y[data_y == 406519] = 9
data_y[data_y == 404519] = 10
data_y[data_y == 406511] = 11
data_y[data_y == 404511] = 12
data_y[data_y == 406508] = 13
data_y[data_y == 404508] = 14
data_y[data_y == 408512] = 15
data_y[data_y == 407521] = 16
data_y[data_y == 405506] = 17
return data_y
def check_data(data_set):
"""Try to access to the file and checks if dataset is in the data directory
In case the file is not found try to download it from original location
:param data_set:
Path with original OPPORTUNITY zip file
:return:
"""
print 'Checking dataset {0}'.format(data_set)
data_dir, data_file = os.path.split(data_set)
# When a directory is not provided, check if dataset is in the data directory
if data_dir == "" and not os.path.isfile(data_set):
new_path = os.path.join(os.path.split(__file__)[0], "data", data_set)
if os.path.isfile(new_path) or data_file == 'OpportunityUCIDataset.zip':
data_set = new_path
# When dataset not found, try to download it from UCI repository
if (not os.path.isfile(data_set)) and data_file == 'OpportunityUCIDataset.zip':
print '... dataset path {0} not found'.format(data_set)
import urllib
origin = (
'https://archive.ics.uci.edu/ml/machine-learning-databases/00226/OpportunityUCIDataset.zip'
)
if not os.path.exists(data_dir):
print '... creating directory {0}'.format(data_dir)
os.makedirs(data_dir)
print '... downloading data from {0}'.format(origin)
urllib.urlretrieve(origin, data_set)
return data_dir
def process_dataset_file(data, label, filter_accelerometers):
"""Function defined as a pipeline to process individual OPPORTUNITY files
:param data: numpy integer matrix
Matrix containing data samples (rows) for every sensor channel (column)
:param label: string, ['gestures' (default), 'locomotion']
Type of activities to be recognized
:return: numpy integer matrix, numy integer array
Processed sensor data, segmented into features (x) and labels (y)
"""
# Select correct columns
data, is_accelerometer = select_columns_opp(data)
# Colums are segmentd into features and labels
data_x, data_y = divide_x_y(data, label, filter_accelerometers)
data_y = adjust_idx_labels(data_y, label)
data_y = data_y.astype(int)
# Perform linear interpolation (a.k.a. filling in NaN)
data_x = np.array([Series(i).interpolate() for i in data_x.T]).T
# Remaining missing data are converted to zero
data_x[np.isnan(data_x)] = 0
# All sensor channels are normalized
data_x = normalize(data_x)
if filter_accelerometers:
# x's accelerometers, are filtered out by some LP passes for noise and gravity.
# Time is discarded, accelerometers are filtered to
# split gravity and remove noise.
_, x_gyros, x_accms = split_data_into_time_gyros_accelerometers(
data_x, is_accelerometer
)
print "gyros' shape: {}".format(x_gyros.shape)
print "old accelerometers' shape: {}".format(x_accms.shape)
x_accms = normalize(filter_opportunity_datasets_accelerometers(x_accms))
print "new accelerometers' shape: {}".format(x_accms.shape)
# Put features together (inner concatenation with transposals)
data_x = np.hstack([x_gyros, x_accms])
print "new total shape: {}".format(data_x.shape)
return data_x, data_y
def load_data_files(zipped_dataset, label, data_files, filter_accelerometers=False):
"""Loads specified data files' features (x) and labels (y)
:param zipped_dataset: ZipFile
OPPORTUNITY zip file to read from
:param label: string, ['gestures' (default), 'locomotion']
Type of activities to be recognized. The OPPORTUNITY dataset includes several annotations to perform
recognition modes of locomotion/postures and recognition of sporadic gestures.
:param data_files: list of strings
Data files to load.
:return: numpy integer matrix, numy integer array
Loaded sensor data, segmented into features (x) and labels (y)
"""
nb_sensors = NB_SENSOR_CHANNELS_WITH_FILTERING if filter_accelerometers else NB_SENSOR_CHANNELS
data_x = np.empty((0, nb_sensors))
data_y = np.empty((0))
for filename in data_files:
try:
data = np.loadtxt(BytesIO(zipped_dataset.read(filename)))
print '... file {0}'.format(filename)
x, y = process_dataset_file(data, label, filter_accelerometers)
data_x = np.vstack((data_x, x))
data_y = np.concatenate([data_y, y])
print "Data's shape yet: "
print data_x.shape
except KeyError:
print 'ERROR: Did not find {0} in zip file'.format(filename)
return data_x, data_y
def generate_data(dataset, target_filename, label):
"""Function to read the OPPORTUNITY challenge raw data and process all sensor channels
:param dataset: string
Path with original OPPORTUNITY zip file
:param target_filename: string
Processed file
:param label: string, ['gestures' (default), 'locomotion']
Type of activities to be recognized. The OPPORTUNITY dataset includes several annotations to perform
recognition modes of locomotion/postures and recognition of sporadic gestures.
"""
data_dir = check_data(dataset)
zf = zipfile.ZipFile(dataset)
print '\nProcessing train dataset files...\n'
X_train, y_train = load_data_files(zf, label, OPPORTUNITY_DATA_FILES_TRAIN)
print '\nProcessing test dataset files...\n'
X_test, y_test = load_data_files(zf, label, OPPORTUNITY_DATA_FILES_TEST)
print "Final datasets with size: | train {0} | test {1} | ".format(X_train.shape, X_test.shape)
obj = [(X_train, y_train), (X_test, y_test)]
f = file(os.path.join(data_dir, target_filename), 'wb')
cp.dump(obj, f, protocol=cp.HIGHEST_PROTOCOL)
f.close()
def get_args():
'''This function parses and return arguments passed in'''
parser = argparse.ArgumentParser(
description='Preprocess OPPORTUNITY dataset')
# Add arguments
parser.add_argument(
'-i', '--input', type=str, help='OPPORTUNITY zip file', required=True)
parser.add_argument(
'-o', '--output', type=str, help='Processed data file', required=True)
parser.add_argument(
'-t', '--task', type=str.lower, help='Type of activities to be recognized', default="gestures", choices = ["gestures", "locomotion"], required=False)
# Array for all arguments passed to script<|fim▁hole|> dataset = args.input
target_filename = args.output
label = args.task
# Return all variable values
return dataset, target_filename, label
if __name__ == '__main__':
OpportunityUCIDataset_zip, output, l = get_args();
generate_data(OpportunityUCIDataset_zip, output, l)<|fim▁end|>
|
args = parser.parse_args()
# Assign args to variables
|
<|file_name|>draft-converter.js<|end_file_name|><|fim▁begin|>// Modified from https://github.com/dburrows/draft-js-basic-html-editor/blob/master/src/utils/draftRawToHtml.js
'use strict';
import { List } from 'immutable';
import * as InlineStylesProcessor from './inline-styles-processor';
import ApiDataInstance from './api-data-instance';
import AtomicBlockProcessor from './atomic-block-processor';
import ENTITY from './entities';
import merge from 'lodash/merge';
const _ = {
merge,
}
const annotationIndicatorPrefix = '__ANNOTATION__=';
let defaultBlockTagMap = {
'atomic': `<div>%content%</div>`,
'blockquote': `<blockquote>%content%</blockquote>`,
'code-block': `<code>%content%</code>`,
'default': `<p>%content%</p>`,<|fim▁hole|> 'header-four': `<h4>%content%</h4>`,
'header-five': `<h5>%content%</h5>`,
'header-six': `<h6>%content%</h6>`,
'ordered-list-item': `<li>%content%</li>`,
'paragraph': `<p>%content%</p>`,
'unordered-list-item': `<li>%content%</li>`,
'unstyled': `<p>%content%</p>`,
};
let inlineTagMap = {
BOLD: ['<strong>', '</strong>'],
CODE: ['<code>', '</code>'],
default: ['<span>', '</span>'],
ITALIC: ['<em>', '</em>'],
UNDERLINE: ['<u>', '</u>'],
};
let defaultEntityTagMap = {
[ENTITY.ANNOTATION.type]: ['<abbr title="<%= data.pureAnnotationText %>"><%= data.text %>', '</abbr>'],
[ENTITY.AUDIO.type]: ['<div class="audio-container"><div class="audio-title"><%= data.title %></div><div class="audio-desc"><%= data.description %></div><audio src="<%= data.url %>" />', '</div>'],
[ENTITY.BLOCKQUOTE.type]: ['<blockquote><div><%= data.quote %></div><div><%= data.quoteBy %></div>', '<blockquote>'],
[ENTITY.EMBEDDEDCODE.type]: ['<div><%= data.embeddedCode%>', '</div>'],
[ENTITY.INFOBOX.type]: ['<div class="info-box-container"><div class="info-box-title"><%= data.title %></div><div class="info-box-body"><%= data.body %></div>', '</div>'],
[ENTITY.LINK.type]: ['<a target="_blank" href="<%= data.url %>">', '</a>'],
[ENTITY.IMAGE.type]: ['<img alt="<%= data.description %>" src="<%= data.url %>">', '</img>'],
[ENTITY.IMAGELINK.type]: ['<img alt="<%= data.description %>" src="<%= data.url %>">', '</img>'],
[ENTITY.SLIDESHOW.type]: ['<!-- slideshow component start --> <ol class="slideshow-container"> <% if(!data) { data = []; } data.forEach(function(image) { %><li class="slideshow-slide"><img src="<%- image.url %>" /></li><% }); %>', '</ol><!-- slideshow component end -->'],
[ENTITY.IMAGEDIFF.type]: ['<!-- imageDiff component start --> <ol class="image-diff-container"> <% if(!data) { data = []; } data.forEach(function(image, index) { if (index > 1) { return; } %><li class="image-diff-item"><img src="<%- image.url %>" /></li><% }); %>', '</ol><!-- imageDiff component end-->'],
[ENTITY.YOUTUBE.type]: ['<iframe width="560" height="315" src="https://www.youtube.com/embed/<%= data.youtubeId %>" frameborder="0" allowfullscreen>', '</iframe>'],
};
let nestedTagMap = {
'ordered-list-item': ['<ol>', '</ol>'],
'unordered-list-item': ['<ul>', '</ul>'],
};
function _convertInlineStyle (block, entityMap, blockTagMap, entityTagMap) {
return blockTagMap[block.type] ? blockTagMap[block.type].replace(
'%content%',
InlineStylesProcessor.convertToHtml(inlineTagMap, entityTagMap, entityMap, block)
) : blockTagMap.default.replace(
'%content%',
InlineStylesProcessor.convertToHtml(inlineTagMap, block)
);
}
function _convertBlocksToHtml (blocks, entityMap, blockTagMap, entityTagMap) {
let html = '';
let nestLevel = []; // store the list type of the previous item: null/ol/ul
blocks.forEach((block) => {
// create tag for <ol> or <ul>: deal with ordered/unordered list item
// if the block is a list-item && the previous block is not a list-item
if (nestedTagMap[block.type] && nestLevel[0] !== block.type) {
html += nestedTagMap[block.type][0]; // start with <ol> or <ul>
nestLevel.unshift(block.type);
}
// end tag with </ol> or </ul>: deal with ordered/unordered list item
if (nestLevel.length > 0 && nestLevel[0] !== block.type) {
html += nestedTagMap[nestLevel.shift()][1]; // close with </ol> or </ul>
}
html += _convertInlineStyle(block, entityMap, blockTagMap, entityTagMap);
});
// end tag with </ol> or </ul>: or if it is the last block
if (blocks.length > 0 && nestedTagMap[blocks[blocks.length - 1].type]) {
html += nestedTagMap[nestLevel.shift()][1]; // close with </ol> or </ul>
}
return html;
}
function convertBlocksToApiData (blocks, entityMap, entityTagMap) {
let apiDataArr = List();
let content = [];
let nestLevel = [];
blocks.forEach((block) => {
// block is not a list-item
if (!nestedTagMap[block.type]) {
// if previous block is a list-item
if (content.length > 0 && nestLevel.length > 0) {
apiDataArr = apiDataArr.push(new ApiDataInstance({ type: nestLevel[0], content: content }));
content = [];
nestLevel.shift();
}
if (block.type.startsWith('atomic') || block.type.startsWith('media')) {
apiDataArr = apiDataArr.push(AtomicBlockProcessor.convertBlock(entityMap, block));
} else {
let converted = InlineStylesProcessor.convertToHtml(inlineTagMap, entityTagMap, entityMap, block);
let type = block.type;
// special case for block containing annotation entity
// set this block type as annotation
if (converted.indexOf(annotationIndicatorPrefix) > -1) {
type = ENTITY.ANNOTATION.type.toLowerCase();
}
apiDataArr = apiDataArr.push(new ApiDataInstance({ id: block.key, type: type, content: [converted] }));
}
} else {
let converted = InlineStylesProcessor.convertToHtml(inlineTagMap, entityTagMap, entityMap, block);
// previous block is not an item-list block
if (nestLevel.length === 0) {
nestLevel.unshift(block.type);
content.push(converted);
} else if (nestLevel[0] === block.type) {
// previous block is a item-list and current block is the same item-list
content.push(converted);
} else if (nestLevel[0] !== block.type) {
// previous block is a different item-list.
apiDataArr = apiDataArr.push(new ApiDataInstance({ id: block.key, type: nestLevel[0], content: content }));
content = [converted];
nestLevel[0] = block.type;
}
}
});
// last block is a item-list
if (blocks.length > 0 && nestLevel.length > 0) {
let block = blocks[blocks.length - 1];
apiDataArr = apiDataArr.push(new ApiDataInstance({ id: block.key, type: block.type, content: content }));
}
return apiDataArr;
}
function convertRawToHtml (raw, blockTagMap, entityTagMap) {
blockTagMap = _.merge({}, defaultBlockTagMap, blockTagMap);
entityTagMap = entityTagMap || defaultEntityTagMap;
let html = '';
raw = raw || {};
const blocks = Array.isArray(raw.blocks) ? raw.blocks : [];
const entityMap = typeof raw.entityMap === 'object' ? raw.entityMap : {};
html = _convertBlocksToHtml(blocks, entityMap, blockTagMap, entityTagMap);
return html;
}
function convertRawToApiData (raw) {
let apiData;
raw = raw || {};
const blocks = Array.isArray(raw.blocks) ? raw.blocks : [];
const entityMap = typeof raw.entityMap === 'object' ? raw.entityMap : {};
let entityTagMap = _.merge({}, defaultEntityTagMap, {
// special handling for annotation entity
// annotation entity data will be included in the speical comment.
[ENTITY.ANNOTATION.type]: [`<!--${annotationIndicatorPrefix}<%= JSON.stringify(data) %>--><!--`, '-->'],
});
apiData = convertBlocksToApiData(blocks, entityMap, entityTagMap);
return apiData;
}
export default {
convertToHtml: convertRawToHtml,
convertToApiData: convertRawToApiData,
};<|fim▁end|>
|
'header-one': `<h1>%content%</h1>`,
'header-two': `<h2>%content%</h2>`,
'header-three': `<h3>%content%</h3>`,
|
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# https://github.com/swistakm/django-rest-framework/blob/master/rest_framework/runtests/runtests.py
import os
import sys
# fix sys path so we don't need to setup PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
os.environ['DJANGO_SETTINGS_MODULE'] = 'oauth_provider.runtests.settings'
from django.conf import settings
from django.test.utils import get_runner
from south.management.commands import patch_for_test_db_setup
def usage():<|fim▁hole|>
You can pass the Class name of the `UnitTestClass` you want to test.
Append a method name if you only want to test a specific method of that class.
"""
def main():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=2)
if len(sys.argv) == 2:
test_case = '.' + sys.argv[1]
elif len(sys.argv) == 1:
test_case = ''
else:
print(usage())
sys.exit(1)
patch_for_test_db_setup()
failures = test_runner.run_tests(['tests' + test_case])
sys.exit(failures)
if __name__ == '__main__':
main()<|fim▁end|>
|
return """
Usage: python runtests.py [UnitTestClass].[method]
|
<|file_name|>Utils.py<|end_file_name|><|fim▁begin|>import os
<|fim▁hole|> exec(compile(open(filename).read(), filename, 'exec'))<|fim▁end|>
|
def run(name='test1.py'):
filename = os.getcwd() + name
|
<|file_name|>const-block-item.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
mod foo {
pub trait Value {
fn value(&self) -> usize;
}
}
static BLOCK_USE: usize = {
use foo::Value;
100
};
static BLOCK_PUB_USE: usize = {
pub use foo::Value;
200
};
static BLOCK_STRUCT_DEF: usize = {
struct Foo {
a: usize
}
Foo{ a: 300 }.a<|fim▁hole|> fn foo(a: usize) -> usize {
a + 10
}
foo
};
static BLOCK_MACRO_RULES: usize = {
macro_rules! baz {
() => (412)
}
baz!()
};
pub fn main() {
assert_eq!(BLOCK_USE, 100);
assert_eq!(BLOCK_PUB_USE, 200);
assert_eq!(BLOCK_STRUCT_DEF, 300);
assert_eq!(BLOCK_FN_DEF(390), 400);
assert_eq!(BLOCK_MACRO_RULES, 412);
}<|fim▁end|>
|
};
static BLOCK_FN_DEF: fn(usize) -> usize = {
|
<|file_name|>graphql-query.go<|end_file_name|><|fim▁begin|>/*
* EliasDB
*
* Copyright 2016 Matthias Ladkau. All rights reserved.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package v1
import (
"encoding/json"
"net/http"
"github.com/krotik/common/stringutil"
"github.com/krotik/eliasdb/api"
"github.com/krotik/eliasdb/graphql"
)
/*
EndpointGraphQLQuery is a query-only GraphQL endpoint URL (rooted). Handles
everything under graphql-query/...
*/
const EndpointGraphQLQuery = api.APIRoot + APIv1 + "/graphql-query/"
/*
GraphQLQueryEndpointInst creates a new endpoint handler.
*/
func GraphQLQueryEndpointInst() api.RestEndpointHandler {
return &graphQLQueryEndpoint{}
}
/*
Handler object for GraphQL operations.
*/
type graphQLQueryEndpoint struct {
*api.DefaultEndpointHandler
}
/*
HandleGET handles GraphQL queries.
*/
func (e *graphQLQueryEndpoint) HandleGET(w http.ResponseWriter, r *http.Request, resources []string) {
gqlquery := map[string]interface{}{
"variables": nil,
"operationName": nil,
}
partition := r.URL.Query().Get("partition")
if partition == "" && len(resources) > 0 {
partition = resources[0]
}
if partition == "" {
http.Error(w, "Need a partition", http.StatusBadRequest)
return
}
query := r.URL.Query().Get("query")
if query == "" {
http.Error(w, "Need a query parameter", http.StatusBadRequest)
return
}
gqlquery["query"] = query
if operationName := r.URL.Query().Get("operationName"); operationName != "" {<|fim▁hole|> if variables := r.URL.Query().Get("variables"); variables != "" {
varData := make(map[string]interface{})
if err := json.Unmarshal([]byte(variables), &varData); err != nil {
http.Error(w, "Could not decode variables: "+err.Error(), http.StatusBadRequest)
return
}
gqlquery["variables"] = varData
}
res, err := graphql.RunQuery(stringutil.CreateDisplayString(partition)+" query",
partition, gqlquery, api.GM, nil, true)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
w.Header().Set("content-type", "application/json; charset=utf-8")
json.NewEncoder(w).Encode(res)
}
/*
SwaggerDefs is used to describe the endpoint in swagger.
*/
func (e *graphQLQueryEndpoint) SwaggerDefs(s map[string]interface{}) {
s["paths"].(map[string]interface{})["/v1/graphql-query/{partition}"] = map[string]interface{}{
"get": map[string]interface{}{
"summary": "GraphQL interface which only executes non-modifying queries.",
"description": "The GraphQL interface can be used to query data.",
"consumes": []string{
"application/json",
},
"produces": []string{
"text/plain",
"application/json",
},
"parameters": []map[string]interface{}{
{
"name": "partition",
"in": "path",
"description": "Partition to query.",
"required": true,
"type": "string",
},
{
"name": "operationName",
"in": "query",
"description": "GraphQL query operation name.",
"required": false,
"type": "string",
},
{
"name": "query",
"in": "query",
"description": "GraphQL query.",
"required": true,
"type": "string",
},
{
"name": "variables",
"in": "query",
"description": "GraphQL query variable values.",
"required": false,
"type": "string",
},
},
"responses": map[string]interface{}{
"200": map[string]interface{}{
"description": "The operation was successful.",
},
"default": map[string]interface{}{
"description": "Error response",
"schema": map[string]interface{}{
"$ref": "#/definitions/Error",
},
},
},
},
}
}<|fim▁end|>
|
gqlquery["operationName"] = operationName
}
|
<|file_name|>parser.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition
else:
return condition
def parse(file):
receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):<|fim▁hole|> elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition))
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters<|fim▁end|>
|
conditions.append(trimmed[4:])
elif line.startswith('#endif') and conditions:
conditions.pop()
|
<|file_name|>CanvasOGL.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2013-2014 by Kristina Simpson <[email protected]>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgement in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*/
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include "CanvasOGL.hpp"
#include "ShadersOpenGL.hpp"
#include "TextureOpenGL.hpp"
namespace KRE
{
namespace
{
CanvasPtr& get_instance()
{
static CanvasPtr res = CanvasPtr(new CanvasOGL());
return res;
}
}
CanvasOGL::CanvasOGL()
{
handleDimensionsChanged();
}
CanvasOGL::~CanvasOGL()
{
}
void CanvasOGL::handleDimensionsChanged()
{
mvp_ = glm::ortho(0.0f, float(width()), float(height()), 0.0f);
}
void CanvasOGL::blitTexture(const TexturePtr& tex, const rect& src, float rotation, const rect& dst, const Color& color) const
{
auto texture = std::dynamic_pointer_cast<OpenGLTexture>(tex);
ASSERT_LOG(texture != NULL, "Texture passed in was not of expected type.");
const float tx1 = float(src.x()) / texture->width();
const float ty1 = float(src.y()) / texture->height();
const float tx2 = src.w() == 0 ? 1.0f : float(src.x2()) / texture->width();
const float ty2 = src.h() == 0 ? 1.0f : float(src.y2()) / texture->height();
const float uv_coords[] = {
tx1, ty1,
tx2, ty1,
tx1, ty2,
tx2, ty2,
};
const float vx1 = float(dst.x());
const float vy1 = float(dst.y());
const float vx2 = float(dst.x2());
const float vy2 = float(dst.y2());
const float vtx_coords[] = {
vx1, vy1,
vx2, vy1,
vx1, vy2,
vx2, vy2,
};
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3((vx1+vx2)/2.0f,(vy1+vy2)/2.0f,0.0f)) * glm::rotate(glm::mat4(1.0f), rotation, glm::vec3(0.0f,0.0f,1.0f)) * glm::translate(glm::mat4(1.0f), glm::vec3(-(vx1+vx2)/2.0f,-(vy1+vy2)/2.0f,0.0f));
glm::mat4 mvp = mvp_ * model * getModelMatrix();
auto shader = OpenGL::ShaderProgram::defaultSystemShader();
shader->makeActive();
texture->bind();
shader->setUniformValue(shader->getMvpUniform(), glm::value_ptr(mvp));
if(color != KRE::Color::colorWhite()) {
shader->setUniformValue(shader->getColorUniform(), (color*getColor()).asFloatVector());
} else {
shader->setUniformValue(shader->getColorUniform(), getColor().asFloatVector());
}
shader->setUniformValue(shader->getTexMapUniform(), 0);
// XXX the following line are only temporary, obviously.
//shader->SetUniformValue(shader->GetUniformIterator("discard"), 0);
glEnableVertexAttribArray(shader->getVertexAttribute()->second.location);
glVertexAttribPointer(shader->getVertexAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, vtx_coords);
glEnableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glVertexAttribPointer(shader->getTexcoordAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, uv_coords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glDisableVertexAttribArray(shader->getVertexAttribute()->second.location);
}
void CanvasOGL::blitTexture(const TexturePtr& tex, const std::vector<vertex_texcoord>& vtc, float rotation, const Color& color)
{
ASSERT_LOG(false, "XXX CanvasOGL::blitTexture()");
}
void CanvasOGL::blitTexture(const MaterialPtr& mat, float rotation, const rect& dst, const Color& color) const
{
ASSERT_LOG(mat != NULL, "Material was null");
const float vx1 = float(dst.x());
const float vy1 = float(dst.y());
const float vx2 = float(dst.x2());
const float vy2 = float(dst.y2());
const float vtx_coords[] = {
vx1, vy1,
vx2, vy1,
vx1, vy2,
vx2, vy2,
};
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3((vx1+vx2)/2.0f,(vy1+vy2)/2.0f,0.0f)) * glm::rotate(glm::mat4(1.0f), rotation, glm::vec3(0.0f,0.0f,1.0f)) * glm::translate(glm::mat4(1.0f), glm::vec3(-(vx1+vy1)/2.0f,-(vy1+vy1)/2.0f,0.0f));
glm::mat4 mvp = mvp_ * model * getModelMatrix();
auto shader = OpenGL::ShaderProgram::defaultSystemShader();
shader->makeActive();
shader->setUniformValue(shader->getMvpUniform(), glm::value_ptr(mvp));
//if(color != KRE::Color::colorWhite()) {
shader->setUniformValue(shader->getColorUniform(), color.asFloatVector());
//}
shader->setUniformValue(shader->getTexMapUniform(), 0);
mat->apply();
for(auto it = mat->getTexture().begin(); it != mat->getTexture().end(); ++it) {
auto texture = std::dynamic_pointer_cast<OpenGLTexture>(*it);
ASSERT_LOG(texture != NULL, "Texture passed in was not of expected type.");
auto uv_coords = mat->getNormalisedTextureCoords(it);
texture->bind();
// XXX the following line are only temporary, obviously.
//shader->SetUniformValue(shader->GetUniformIterator("discard"), 0);
glEnableVertexAttribArray(shader->getVertexAttribute()->second.location);
glVertexAttribPointer(shader->getVertexAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, vtx_coords);
glEnableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glVertexAttribPointer(shader->getTexcoordAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, &uv_coords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glDisableVertexAttribArray(shader->getVertexAttribute()->second.location);
}
mat->unapply();
}
void CanvasOGL::blitTexture(const MaterialPtr& mat, const rect& src, float rotation, const rect& dst, const Color& color) const
{
ASSERT_LOG(mat != NULL, "Material was null");
const float vx1 = float(dst.x());
const float vy1 = float(dst.y());
const float vx2 = float(dst.x2());
const float vy2 = float(dst.y2());
const float vtx_coords[] = {
vx1, vy1,
vx2, vy1,
vx1, vy2,
vx2, vy2,
};
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3((vx1+vx2)/2.0f,(vy1+vy2)/2.0f,0.0f)) * glm::rotate(glm::mat4(1.0f), rotation, glm::vec3(0.0f,0.0f,1.0f)) * glm::translate(glm::mat4(1.0f), glm::vec3(-(vx1+vy1)/2.0f,-(vy1+vy1)/2.0f,0.0f));
glm::mat4 mvp = mvp_ * model * getModelMatrix();
auto shader = OpenGL::ShaderProgram::defaultSystemShader();
shader->makeActive();
shader->setUniformValue(shader->getMvpUniform(), glm::value_ptr(mvp));
//if(color) {
shader->setUniformValue(shader->getColorUniform(), color.asFloatVector());
//}
shader->setUniformValue(shader->getTexMapUniform(), 0);
mat->apply();
for(auto it = mat->getTexture().begin(); it != mat->getTexture().end(); ++it) {
auto texture = std::dynamic_pointer_cast<OpenGLTexture>(*it);
ASSERT_LOG(texture != NULL, "Texture passed in was not of expected type.");
const float tx1 = float(src.x()) / texture->width();
const float ty1 = float(src.y()) / texture->height();
const float tx2 = src.w() == 0 ? 1.0f : float(src.x2()) / texture->width();
const float ty2 = src.h() == 0 ? 1.0f : float(src.y2()) / texture->height();
const float uv_coords[] = {
tx1, ty1,
tx2, ty1,
tx1, ty2,
tx2, ty2,
};
texture->bind();
// XXX the following line are only temporary, obviously.
//shader->SetUniformValue(shader->GetUniformIterator("discard"), 0);
glEnableVertexAttribArray(shader->getVertexAttribute()->second.location);
glVertexAttribPointer(shader->getVertexAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, vtx_coords);
glEnableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glVertexAttribPointer(shader->getTexcoordAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, &uv_coords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableVertexAttribArray(shader->getTexcoordAttribute()->second.location);
glDisableVertexAttribArray(shader->getVertexAttribute()->second.location);
}
mat->unapply();
}
void CanvasOGL::drawSolidRect(const rect& r, const Color& fill_color, const Color& stroke_color, float rotation) const
{
rectf vtx = r.as_type<float>();
const float vtx_coords[] = {
vtx.x1(), vtx.y1(),
vtx.x2(), vtx.y1(),
vtx.x1(), vtx.y2(),
vtx.x2(), vtx.y2(),
};
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3(vtx.mid_x(),vtx.mid_y(),0.0f)) * glm::rotate(glm::mat4(1.0f), rotation, glm::vec3(0.0f,0.0f,1.0f)) * glm::translate(glm::mat4(1.0f), glm::vec3(-vtx.mid_x(),-vtx.mid_y(),0.0f));
glm::mat4 mvp = mvp_ * model * getModelMatrix();
static OpenGL::ShaderProgramPtr shader = OpenGL::ShaderProgram::factory("simple");
shader->makeActive();
shader->setUniformValue(shader->getMvpUniform(), glm::value_ptr(mvp));
// Draw a filled rect
shader->setUniformValue(shader->getColorUniform(), fill_color.asFloatVector());
glEnableVertexAttribArray(shader->getVertexAttribute()->second.location);
glVertexAttribPointer(shader->getVertexAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, vtx_coords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// Draw stroke if stroke_color is specified.
// XXX I think there is an easier way of doing this, with modern GL
const float vtx_coords_line[] = {
vtx.x1(), vtx.y1(),
vtx.x2(), vtx.y1(),
vtx.x2(), vtx.y2(),
vtx.x1(), vtx.y2(),
vtx.x1(), vtx.y1(),
};
shader->setUniformValue(shader->getColorUniform(), stroke_color.asFloatVector());
glEnableVertexAttribArray(shader->getVertexAttribute()->second.location);
glVertexAttribPointer(shader->getVertexAttribute()->second.location, 2, GL_FLOAT, GL_FALSE, 0, vtx_coords_line);
// XXX this may not be right.
glDrawArrays(GL_LINE_STRIP, 0, 5);
}
void CanvasOGL::drawSolidRect(const rect& r, const Color& fill_color, float rotate) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawSolidRect()");
}
void CanvasOGL::drawHollowRect(const rect& r, const Color& stroke_color, float rotate) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawHollowRect()");
}
void CanvasOGL::drawLine(const point& p1, const point& p2, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLine()");
}
void CanvasOGL::drawLines(const std::vector<glm::vec2>& varray, float line_width, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLines()");
}
void CanvasOGL::drawLines(const std::vector<glm::vec2>& varray, float line_width, const std::vector<glm::u8vec4>& carray) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLines()");
}
void CanvasOGL::drawLineStrip(const std::vector<glm::vec2>& points, float line_width, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLineStrip()");
}
void CanvasOGL::drawLineLoop(const std::vector<glm::vec2>& varray, float line_width, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLineLoop()");
}
void CanvasOGL::drawLine(const pointf& p1, const pointf& p2, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawLine()");
}
void CanvasOGL::drawPolygon(const std::vector<glm::vec2>& points, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawPolygon()");
}
void CanvasOGL::drawSolidCircle(const point& centre, double radius, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawSolidCircle()");
}<|fim▁hole|>
void CanvasOGL::drawSolidCircle(const point& centre, double radius, const std::vector<uint8_t>& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawSolidCircle()");
}
void CanvasOGL::drawHollowCircle(const point& centre, double radius, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawHollowCircle()");
}
void CanvasOGL::drawSolidCircle(const pointf& centre, double radius, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawSolidCircle()");
}
void CanvasOGL::drawSolidCircle(const pointf& centre, double radius, const std::vector<uint8_t>& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawSolidCircle()");
}
void CanvasOGL::drawHollowCircle(const pointf& centre, double radius, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawHollowCircle()");
}
void CanvasOGL::drawPoints(const std::vector<glm::vec2>& points, float radius, const Color& color) const
{
ASSERT_LOG(false, "XXX write function CanvasOGL::drawPoints()");
}
CanvasPtr CanvasOGL::getInstance()
{
return get_instance();
}
}<|fim▁end|>
| |
<|file_name|>EXTEND_Creep.js<|end_file_name|><|fim▁begin|>Creep.prototype.report = function(){
this.say(this.test_carryPercent().toFixed(1) + '%')
}
Creep.prototype.test_carryPercent = function(){
return (100*(_.sum(this.carry)/this.carryCapacity))
}
Creep.prototype.test_energyPercent = function(){
return (100*(this.carry.energy/this.carryCapacity))
}
/** Creep moves toward <structure>
* When in range, transfers <amount> of <resource> to <structure>
* @param {structure} creep - structure to deliver to
* @param {resource} resource - resource to deliver. Defaults to RESOURCE_ENERGY
* @param {int} amount - amount to deliver. Defaults to all.
*/
Creep.prototype.deliver = function(structure, resource, amount){
<|fim▁hole|>}
Creep.prototype.findNearestPath = function(){
}
Creep.prototype.idle = function(){
}<|fim▁end|>
| |
<|file_name|>CacheManager.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.services.sync.cache;
import com.hazelcast.core.HazelcastInstance;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Map;
/**
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
public final class CacheManager {
@Autowired
private HazelcastInstance hzInstance;
public <K, V> Map<K, V> getCache(String name) {
return hzInstance.getMap(name);
}
}<|fim▁end|>
|
*
* http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>exer08.py<|end_file_name|><|fim▁begin|># Faça um Programa que pergunte quanto você ganha por hora
# e o número de horas trabalhadas no mês. Calcule e mostre o
# total do seu salário no referido mês.
salarioXhoras = float(input('Quanto voce ganha por hora? '))
horas = float(input('Quantas horas trabalhadas? '))
salario = horas * salarioXhoras<|fim▁hole|><|fim▁end|>
|
print('Voce ganhará %2.f de salario esse mes' %(salario))
|
<|file_name|>file_metadata.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from client.constants import FieldKeyword
from metadata import Metadata
class FileUrlMetadata(Metadata):
def get_title(self, soup):
image_url = self.prop_map[FieldKeyword.URL]
return image_url.split('/')[-1]
def get_files_list(self, response):
file_list = collections.OrderedDict()
file_list[FieldKeyword.COUNT] = 1
file_list[FieldKeyword.DATA] = [{
FieldKeyword.URL: response.request_url,
FieldKeyword.TYPE: None
}]
return file_list
def fetch_site_data(self, sanitized_url, status_code):
return self.generic_fetch_content(sanitized_url, status_code)
def parse_content(self, response):
self.generic_parse_content(response)
self.prop_map[FieldKeyword.FILES] = self.get_files_list(response)<|fim▁end|>
|
import collections
|
<|file_name|>sam_to_fastq.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
import sys
for line in open(sys.argv[1]):
cut=line.split('\t')
if len(cut)<11: continue
print ">"+cut[0]
print cut[9]<|fim▁hole|><|fim▁end|>
|
print "+"
print cut[10]
|
<|file_name|>davsturns.py<|end_file_name|><|fim▁begin|># da vs turns module
import numpy as np
from scipy import optimize
import matplotlib.pyplot as pl
import glob, sys, os, time
from deskdb import SixDeskDB,tune_dir,mk_dir
import matplotlib
# ------------- basic functions -----------
def get_divisors(n):
"""finds the divisors of an integer number"""
large_divisors = []
for i in xrange(1, int(np.sqrt(n) + 1)):
if n % i is 0:
yield i
if i is not n / i:
large_divisors.insert(0, n / i)
for divisor in large_divisors:
yield divisor
def linear_fit(datx,daty,daterr):
'''Linear model fit with f(x)=p0+p1*x
(datx,daty): data, daterr: measurement error
return values (res,p0,p0err,p1,p1err):
- res: sum of residuals^2 normalized with the measurment error
- p0,p1: fit paramaeters
- p0err, p1err: error of fit parameters'''
fitfunc = lambda p,x: p[0]+p[1]*x#p[0]=Dinf, p[1]=b0
errfunc = lambda p,x,y,err: (y-fitfunc(p,x))/err
pinit = [0.1, 0.1]
#minimize
outfit=optimize.leastsq(errfunc, pinit,args=(datx,daty,daterr),full_output=1)
(p0,p1)=outfit[0]#(p[0],p[1])
var =outfit[1]#variance matrix
p0err =np.sqrt(var[0,0])#err p[0]
p1err =np.sqrt(var[1,1])#err p[1]
# res=sum((daty-fitfunc((p0,p1),datx))**2)/len(datx-2) #not weighted with error
res=sum((errfunc((p0,p1),datx,daty,daterr))**2)/len(datx)#weighted with error
return (res,p0,p0err,p1,p1err)
# ----------- functions necessary for the analysis -----------
#@profile
def get_min_turn_ang(s,t,a,it):
"""returns array with (angle,minimum sigma,sturn) of particles with lost turn number < it.
check if there is a particle with angle ang with lost turn number <it
if true: lost turn number and amplitude of the last stable particle is saved = particle "before" the particle with the smallest amplitude with nturns<it
if false: the smallest lost turn number and the largest amplitude is saved
"""
# s,t,a are ordered by angle,amplitude
angles,sigmas=t.shape# angles = number of angles, sigmas = number of amplitudes
ftype=[('angle',float),('sigma',float),('sturn',float)]
mta=np.zeros(angles,dtype=ftype)
# enumerate(a[:,0]) returns (0, a[0]), (1, a[1]), (2, a[2]), ... = iang, ang where iang = index of the array (0,1,2,...) for ang = angle (e.g. [1.5, ... , 1.5] , [3.0, ... ,3.0])
for iang,ang in enumerate(a[:,0]):
tang = t[iang]
sang = s[iang]
iturn = tang<it # select lost turn number < it
if(any(tang[iturn])):
sangit=sang[iturn].min()
argminit=sang.searchsorted(sangit) # get index of smallest amplitude with sturn<it - amplitudes are ordered ascending
mta[iang]=(ang,sang[argminit-1],tang[argminit-1])#last stable amplitude -> index argminit-1
else:
mta[iang]=(ang,sang.max(),tang.min())
return mta
def select_ang_surv(data,seed,nang):
"""returns data reduced to ((angmax+1)/nang)-1 angles -> nang being the divisor of angmax"""
angmax=len(data['angle'][:,0])#number of angles
print nang
if((nang not in list(get_divisors(angmax+1))) or ((angmax+1)/nang-1<3)):
print('%s is not a divisor of %s or two large (((angmax+1)/nang)-1<3)')%(nang,angmax+1)
sys.exit(0)
#define variables for only selection of angles
s,a,t=data['sigma'][nang::nang+1],data['angle'][nang::nang+1],data['sturn'][nang::nang+1]
ftype=[('angle',float),('sigma',float),('sturn',float)]
dataang=np.ndarray(np.shape(a),dtype=ftype)
dataang['sigma'],dataang['angle'],dataang['sturn']=s,a,t
return dataang
#@profile
def mk_da_vst(data,seed,tune,turnsl,turnstep):
"""returns 'seed','tunex','tuney','dawtrap','dastrap','dawsimp','dassimp',
'dawtraperr','dastraperr','dastraperrep','dastraperrepang',
'dastraperrepamp','dawsimperr','dassimperr','nturn','tlossmin',
'mtime'
the da is in steps of turnstep
das: integral over radius
das = 2/pi*int_0^(2pi)[r(theta)]dtheta=<r(theta)>
= 2/pi*dtheta*sum(a_i*r(theta_i))
daw: integral over phase space
daw = (int_0^(2pi)[(r(theta))^4*sin(2*theta)]dtheta)^1/4
= (dtheta*sum(a_i*r(theta_i)^4*sin(2*theta_i)))^1/4
trapezoidal rule (trap): a_i=(3/2,1, ... ,1,3/2)
simpson rule (simp): a_i=(55/24.,-1/6.,11/8.,1, ... 1,11/8.,-1/6.,55/24.)
numerical recipes open formulas 4.1.15 and 4.1.18
"""
mtime=time.time()
(tunex,tuney)=tune
s,a,t=data['sigma'],data['angle'],data['sturn']
tmax=np.max(t[s>0])#maximum number of turns
#set the 0 in t to tmax*100 in order to check if turnnumber<it (any(tang[tang<it])<it in get_min_turn_ang)
t[s==0]=tmax*100
angmax=len(a[:,0])#number of angles
angstep=np.pi/(2*(angmax+1))#step in angle in rad
ampstep=np.abs((s[s>0][1])-(s[s>0][0]))
ftype=[('seed',int),('tunex',float),('tuney',float),('turn_max',int),('dawtrap',float),('dastrap',float),('dawsimp',float),('dassimp',float),('dawtraperr',float),('dastraperr',float),('dastraperrep',float),('dastraperrepang',float),('dastraperrepamp',float),('dawsimperr',float),('dassimperr',float),('nturn',float),('tlossmin',float),('mtime',float)]
l_turnstep=len(np.arange(turnstep,tmax,turnstep))
daout=np.ndarray(l_turnstep,dtype=ftype)
for nm in daout.dtype.names:
daout[nm]=np.zeros(l_turnstep)
dacount=0
currentdawtrap=0
currenttlossmin=0
#define integration coefficients at beginning and end which are unequal to 1
ajtrap_s=np.array([3/2.])#Simpson rule
ajtrap_e=np.array([3/2.])
ajsimp_s=np.array([55/24.,-1/6.,11/8.])#Simpson rule
ajsimp_e=np.array([11/8.,-1/6.,55/24.])
warnsimp=True
for it in np.arange(turnstep,tmax,turnstep):
mta=get_min_turn_ang(s,t,a,it)
mta_angle=mta['angle']*np.pi/180#convert to rad
l_mta_angle=len(mta_angle)
mta_sigma=mta['sigma']
if(l_mta_angle>2):
# define coefficients for simpson rule (simp)
# ajtrap = [3/2.,1,....1,3/2.]
ajtrap=np.concatenate((ajtrap_s,np.ones(l_mta_angle-2),ajtrap_e))
else:
print('WARNING! mk_da_vst - You need at least 3 angles to calculate the da vs turns! Aborting!!!')
sys.exit(0)
if(l_mta_angle>6):
# define coefficients for simpson rule (simp)
# ajsimp = [55/24.,-1/6.,11/8.,1,....1,11/8.,-1/6.,55/24. ]
ajsimp=np.concatenate((ajsimp_s,np.ones(l_mta_angle-6),ajsimp_e))
calcsimp=True
else:
if(warnsimp):
print('WARNING! mk_da_vst - You need at least 7 angles to calculate the da vs turns with the simpson rule! da*simp* will be set to 0.')
warnsimp=False
calcsimp=False
# ---- trapezoidal rule (trap)
# integral
dawtrapint = ((ajtrap*(mta_sigma**4*np.sin(2*mta_angle))).sum())*angstep
dawtrap = (dawtrapint)**(1/4.)
dastrap = (2./np.pi)*(ajtrap*(mta_sigma)).sum()*angstep
# error
dawtraperrint = np.abs(((ajtrap*(2*(mta_sigma**3)*np.sin(2*mta_angle))).sum())*angstep*ampstep)
dawtraperr = np.abs(1/4.*dawtrapint**(-3/4.))*dawtraperrint
dastraperr = ampstep/2
dastraperrepang = ((np.abs(np.diff(mta_sigma))).sum())/(2*(angmax+1))
dastraperrepamp = ampstep/2
dastraperrep = np.sqrt(dastraperrepang**2+dastraperrepamp**2)
# ---- simpson rule (simp)
if(calcsimp):
# int
dawsimpint = (ajsimp*((mta_sigma**4)*np.sin(2*mta_angle))).sum()*angstep
dawsimp = (dawsimpint)**(1/4.)
dassimpint = (ajsimp*mta_sigma).sum()*angstep
dassimp = (2./np.pi)*dassimpint
# error
dawsimperrint = (ajsimp*(2*(mta_sigma**3)*np.sin(2*mta_angle))).sum()*angstep*ampstep
dawsimperr = np.abs(1/4.*dawsimpint**(-3/4.))*dawsimperrint
dassimperr = ampstep/2#simplified
else:
(dawsimp,dassimp,dawsimperr,dassimperr)=np.zeros(4)
tlossmin=np.min(mta['sturn'])
if(dawtrap!=currentdawtrap and it-turnstep >= 0 and tlossmin!=currenttlossmin):
daout[dacount]=(seed,tunex,tuney,turnsl,dawtrap,dastrap,dawsimp,dassimp,dawtraperr,dastraperr,dastraperrep,dastraperrepang,dastraperrepamp,dawsimperr,dassimperr,it-turnstep,tlossmin,mtime)
dacount=dacount+1
currentdawtrap =dawtrap
currenttlossmin=tlossmin
return daout[daout['dawtrap']>0]#delete 0 from errors
# ----------- functions to calculat the fit -----------
def get_fit_data(data,fitdat,fitdaterr,fitndrop,fitkap,b1):
'''linearize data for da vs turns fit according to model:
D(N) = Dinf+b0/(log(N^(exp(-b1))))^kappa'''
datx=1/(np.log(data['tlossmin'][fitndrop::]**np.exp(-b1))**fitkap)
# print (fitdat,fitdaterr)
daty=data[fitdat][fitndrop::]
if fitdaterr=='none':#case of no errors
daterr=np.ones(len(datx))
else:
daterr=data[fitdaterr][fitndrop::]
return datx,daty,daterr
def get_b1mean(db,tune,fitdat,fitdaterr,fitndrop,fitskap,fitekap,fitdkap):
'''returns (mean(b1),errmean(b1),std(b1)) over the seeds
with b1 being the fit parameter in:
D(N) = Dinf+b0/(log(N^(exp(-b1))))^kappa
and a linear relation is assumed between:
log(|b|)=log(|b0|)+b1*kappa <=> b=b0*exp(b1*kappa)
with b being the fit paramter in:
D(N) = Dinf+b/(log(N))^kappa
fitndrop=do not include first fitndrop data points
fitkap=kappa'''
if(not db.check_seeds()):
print('!!! Seeds are missing in database !!!')
ftype=[('seed',int),('res',float),('logb0',float),('logb0err',float),('b1',float),('b1err',float)]
lklog=np.zeros(len(db.get_db_seeds()),dtype=ftype)
ftype=[('kappa',float),('res',float),('dinf',float),('dinferr',float),('b',float),('berr',float)]
lkap=np.zeros(len(np.arange(fitskap,fitekap+fitdkap,fitdkap))-1,dtype=ftype)
ccs=0
for seed in db.get_db_seeds():
data=db.get_da_vst(seed,tune)
#start: scan over kappa
cck=0
for kap in np.arange(fitskap,fitekap+fitdkap,fitdkap):
if(abs(kap)>1.e-6):#for kappa=0: D(N)=Dinf+b/(log(N)^kappa)=D(N)=Dinf+b -> fit does not make sense
datx,daty,daterr=get_fit_data(data,fitdat,fitdaterr,fitndrop,kap,0)#fit D(N)=Dinf+b/(log(N)^kappa
lkap[cck]=(kap,)+linear_fit(datx,daty,daterr)
cck+=1
lklog[ccs]=(seed,)+linear_fit(lkap['kappa'],np.log(np.abs(lkap['b'])),1)#linear fit log(|b|)=log(|b0|)+b1*kappa for each seed
ccs+=1
return (np.mean(lklog['b1']),np.sqrt(np.mean(lklog['b1err']**2)),np.std(lklog['b1']))#error of mean value = sqrt(sum_i((1/n)*sigma_i**2))
def mk_da_vst_fit(db,tune,fitdat,fitdaterr,fitndrop,fitskap,fitekap,fitdkap):
'''1) a) fit D(N)=Dinf+b/(log(N))^kappa for all seeds and
scan range (skap,ekap,dkap)
b) assume linear dependence of b on kappa:
log(|b|)=log(|b0|)+b1*kappa
-> b1 for all seeds
c) calculate avg(b1) over all seeds
2) a) fit D(N)=Dinf+b0/(log(N)^(exp(-b1)))^kappa
for fixed b1=b1mean (obtained in 1))
and scan range (skap,ekap,dkap)
b) use (b0,kappa) with minimum residual'''
turnsl=db.env_var['turnsl']
mtime=time.time()
(tunex,tuney)=tune
print('calculating b1mean ...')
(b1mean,b1meanerr,b1std)=get_b1mean(db,tune,fitdat,fitdaterr,fitndrop,fitskap,fitekap,fitdkap)
print('average over %s seeds: b1mean=%s, b1meanerr=%s, b1std=%s'%(round(len(db.get_db_seeds())),round(b1mean,3),round(b1meanerr,3),round(b1std,3)))
print('start scan over kappa for fixed b1=%s to find kappa with minimum residual ...'%b1mean)
ftype=[('kappa',float),('dkappa',float),('res',float),('dinf',float),('dinferr',float),('b0',float),('b0err',float)]
lkap=np.zeros(len(np.arange(fitskap,fitekap+fitdkap,fitdkap))-1,dtype=ftype)#-1 as kappa=0 is not used
ftype=[('seed',float),('tunex',float),('tuney',float),('turn_max',int),('fitdat',np.str_, 30),('fitdaterr',np.str_, 30),('fitndrop',float),('kappa',float),('dkappa',float),('res',float),('dinf',float),('dinferr',float),('b0',float),('b0err',float),('b1mean',float),('b1meanerr',float),('b1std',float),('mtime',float)]
minkap=np.zeros(len(db.get_db_seeds()),dtype=ftype)
ccs=0
for seed in db.get_db_seeds():
data=db.get_da_vst(seed,tune)
#start: scan over kappa
cck=0
for kap in np.arange(fitskap,fitekap+fitdkap,fitdkap):
if(abs(kap)>1.e-6):#for kappa=0: D(N)=Dinf+b/(log(N)^kappa)=D(N)=Dinf+b -> fit does not make sense
datx,daty,daterr=get_fit_data(data,fitdat,fitdaterr,fitndrop,kap,b1mean)
lkap[cck]=(kap,fitdkap,)+linear_fit(datx,daty,daterr)
cck+=1
iminkap=np.argmin(lkap['res'])
minkap[ccs]=(seed,tunex,tuney,turnsl,fitdat,fitdaterr,fitndrop,)+tuple(lkap[iminkap])+(b1mean,b1meanerr,b1std,mtime,)
ccs+=1
print('... scan over kappa is finished!')
return minkap
# ----------- functions to reload and create DA.out files for previous scripts -----------
def save_daout_old(data,filename):
daoutold=data[['dawtrap','dastrap','dastraperrep','dastraperrepang','dastraperrepamp','nturn','tlossmin']]
np.savetxt(filename,daoutold,fmt='%.6f %.6f %.6f %.6f %.6f %d %d')
def reload_daout_old(filename):
ftype=[('dawtrap',float),('dastrap',float),('dastraperrep',float),('dastraperrepang',float),('dastraperrepamp',float),('nturn',float),('tlossmin',float)]
return np.loadtxt(filename,dtype=ftype,delimiter=' ')
def save_daout(data,filename):
daout=data[['seed','tunex','tuney','turn_max','dawtrap','dastrap','dawsimp','dassimp','dawtraperr','dastraperr','dastraperrep','dastraperrepang','dastraperrepamp','dawsimperr','dassimperr','nturn','tlossmin']]
np.savetxt(filename,daout,fmt='%d %.6f %.6f %d %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %.6f %d %d')
def save_davst_fit(data,filename):
fitdata=data[['seed','tunex','tuney','turn_max','fitdat','fitdaterr','fitndrop','kappa','dkappa','res','dinf','dinferr','b0','b0err','b1mean','b1meanerr','b1std']]
np.savetxt(filename,fitdata,fmt='%d %.5f %.5f %d %s %s %d %.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f')
def reload_daout(filename):
ftype=[('seed',int),('tunex',float),('tuney',float),('turn_max',int),('dawtrap',float),('dastrap',float),('dawsimp',float),('dassimp',float),('dawtraperr',float),('dastraperr',float),('dastraperrep',float),('dastraperrepang',float),('dastraperrepamp',float),('dawsimperr',float),('dassimperr',float),('nturn',float),('tlossmin',float),('mtime',float)]
return np.loadtxt(filename,dtype=ftype,delimiter=' ')
def save_dasurv(data,filename):
np.savetxt(filename,np.reshape(data,-1),fmt='%.8f %.8f %d')
def reload_dasurv(path):
ftype=[('angle', '<f8'), ('sigma', '<f8'), ('sturn', '<f8')]
data=np.loadtxt(glob.glob(path+'/dasurv.out*')[0],dtype=ftype,delimiter=' ')
angles=len(set(data['angle']))
return data.reshape(angles,-1)
def plot_surv_2d_stab(db,lbl,mksize,cl,seed,tune,ampmax):
'''survival plot: stable area of two studies'''
data=db.get_surv(seed,tune)
s,a,t=data['sigma'],data['angle'],data['sturn']
s,a,t=s[s>0],a[s>0],t[s>0]#delete 0 values
tmax=np.max(t)
sxstab=s[t==tmax]*np.cos(a[t==tmax]*np.pi/180)
systab=s[t==tmax]*np.sin(a[t==tmax]*np.pi/180)
pl.scatter(sxstab,systab,mksize,marker='o',color=cl,edgecolor='none',label=lbl)
pl.title('seed '+str(seed),fontsize=12)
pl.xlim([0,ampmax])
pl.ylim([0,ampmax])
pl.xlabel(r'Horizontal amplitude [$\sigma$]',labelpad=10,fontsize=12)
pl.ylabel(r'Vertical amplitude [$\sigma$]',labelpad=10,fontsize=12)
def plot_surv_2d_comp(db,dbcomp,lbl,complbl,seed,tune,ampmax):
'''survival plot: stable area of two studies'''
data=db.get_surv(seed,tune)
datacomp=dbcomp.get_surv(seed,tune)
pl.close('all')
pl.figure(figsize=(6,6))
plot_surv_2d_stab(db,lbl,10,'b',seed,tune,ampmax)
plot_surv_2d_stab(dbcomp,complbl,2,'r',seed,tune,ampmax)
pl.legend(loc='best')
def plot_comp_da_vst(db,dbcomp,ldat,ldaterr,lblname,complblname,seed,tune,ampmin,ampmax,tmax,slog,sfit,fitndrop):
"""plot dynamic aperture vs number of turns,
blue/green=simple average, red/orange=weighted average"""
pl.close('all')
pl.figure(figsize=(6,6))
for dbbb in [db,dbcomp]:
data=dbbb.get_da_vst(seed,tune)
if(dbbb.LHCDescrip==db.LHCDescrip):
lbl = lblname
fmtpl = 'bo'
fmtfit= 'b-'
if(dbbb.LHCDescrip==dbcomp.LHCDescrip):
lbl = complblname
fmtpl = 'ro'
fmtfit = 'r-'
# pl.errorbar(data[ldat[0]],data['tlossmin'],xerr=data[ldaterr[0]],fmt=fmtpl,markersize=2,label='%s %s'%(ldat[0],lbl))
pl.errorbar(data[ldat[0]],data['tlossmin'],xerr=data[ldaterr[0]],fmt=fmtpl,markersize=2,label='%s'%(lbl))
if(sfit):
fitdata=dbbb.get_da_vst_fit(seed,tune)
fitdata=fitdata[fitdata['fitdat']==ldat[0]]
fitdata=fitdata[fitdata['fitdaterr']==ldaterr[0]]
fitdata=fitdata[np.abs(fitdata['fitndrop']-float(fitndrop))<1.e-6]
if(len(fitdata)==1):
pl.plot(fitdata['dinf']+fitdata['b0']/(np.log(data['tlossmin']**np.exp(-fitdata['b1mean']))**fitdata['kappa']),data['tlossmin'],fmtfit)
else:
print('Warning: no fit data available or data ambigious!')
pl.title('seed '+str(seed),fontsize=16)
pl.xlim([ampmin,ampmax])
pl.xlabel(r'Dynamic aperture [$\sigma$]',labelpad=10,fontsize=16)
pl.ylabel(r'Number of turns',labelpad=15,fontsize=16)
plleg=pl.gca().legend(loc='best',fontsize=16)
for label in plleg.get_texts():
label.set_fontsize(12)
if(slog):
pl.ylim([5.e3,tmax])
pl.yscale('log')
else:
pl.ylim([0,tmax])
pl.gca().ticklabel_format(style='sci',axis='y',scilimits=(0,0))
def clean_dir_da_vst(db,files):
'''create directory structure and if force=true delete old files of da vs turns analysis'''
for seed in db.get_seeds():
for tune in db.get_db_tunes():
pp=db.mk_analysis_dir(seed,tune)# create directory
if(len(files)>0):#delete old plots and files
for filename in files:
ppf=os.path.join(pp,filename)
if(os.path.exists(ppf)): os.remove(ppf)
if(len(files)>0):
print('remove old {0} ... files in '+db.LHCDescrip).format(files)
# for error analysis - data is not saved in database but output files are generated
def RunDaVsTurnsAng(db,seed,tune,turnstep):
"""Da vs turns -- calculate da vs turns for divisors of angmax,
e.g. for angmax=29+1 for divisors [1, 2, 3, 5, 6] - last 2 [10,15] are omitted as the number of angles has to be larger than 3"""
# start analysis
try:
turnstep=int(float(turnstep))
except [ValueError,NameError,TypeError]:<|fim▁hole|> if(seed not in db.get_db_seeds()):
print('WARNING: Seed %s is missing in database !!!'%seed)
sys.exit(0)
if(tune not in db.get_db_tunes()):
print('WARNING: tune %s is missing in database !!!'%tune)
sys.exit(0)
turnsl=db.env_var['turnsl']#get turnsl for outputfile names
seed=int(seed)
print('analyzing seed {0} and tune {1}...').format(str(seed),str(tune))
dirname=db.mk_analysis_dir(seed,tune)#directory struct already created in clean_dir_da_vst, only get dir name (string) here
print('... get survival data')
dasurvtot= db.get_surv(seed,tune)
a=dasurvtot['angle']
angmax=len(a[:,0])#number of angles
#use only divisors nang with (angmax+1)/nang-1>=3 = minimum number of angles for trapezoidal rule
divsall=np.array(list(get_divisors(angmax+1)))
divs=divsall[(angmax+1)/divsall-1>2]
print('... number of angles: %s, divisors: %s'%(angmax,str(divs)))
for nang in divs:
dirnameang='%s/%s'%(dirname,nang)
mk_dir(dirnameang)
dasurv=select_ang_surv(dasurvtot,seed,nang)
print('... calculate da vs turns')
daout=mk_da_vst(dasurv,seed,tune,turnsl,turnstep)
save_daout(daout,dirnameang)
print('... save da vs turns data in {0}/DA.out').format(dirnameang)
# in analysis - putting the pieces together
def RunDaVsTurns(db,force,outfile,outfileold,turnstep,davstfit,fitdat,fitdaterr,fitndrop,fitskap,fitekap,fitdkap,outfilefit):
'''Da vs turns -- calculate da vs turns for study dbname, if davstfit=True also fit the data'''
#---- calculate the da vs turns
try:
turnstep=int(float(turnstep))
except [ValueError,NameError,TypeError]:
print('Error in RunDaVsTurns: turnstep must be an integer values!')
sys.exit(0)
if(not db.check_seeds()):
print('!!! Seeds are missing in database !!!')
turnsl=db.env_var['turnsl']#get turnsl for outputfile names
turnse=db.env_var['turnse']
for seed in db.get_db_seeds():
seed=int(seed)
print('analyzing seed {0} ...').format(str(seed))
for tune in db.get_db_tunes():
print('analyzing tune {0} ...').format(str(tune))
dirname=db.mk_analysis_dir(seed,tune)#directory struct already created in clean_dir_da_vst, only get dir name (string) here
print('... get survival data')
dasurv= db.get_surv(seed,tune)
if dasurv is None:
print("ERROR: survival data could not be retrieved due to "+
"and error in the database or tracking data. Skip "
"this seed %s"%(seed))
continue
print('... get da vs turns data')
daout = db.get_da_vst(seed,tune)
if(len(daout)>0):#reload data, if input data has changed redo the analysis
an_mtime=daout['mtime'].min()
res_mtime=db.execute('SELECT max(mtime) FROM six_results')[0][0]
if res_mtime>an_mtime or force is True:
files=('DA.%s.out DAsurv.%s.out DA.%s.png DAsurv.%s.png DAsurv_log.%s.png DAsurv_comp.%s.png DAsurv_comp_log.%s.png'%(turnse,turnse,turnse,turnse,turnse,turnse,turnse)).split()+['DA.out','DAsurv.out','DA.png','DAsurv.png','DAsurv_log.png','DAsurv_comp.png','DAsurv_comp_log.png']
clean_dir_da_vst(db,files)# create directory structure and delete old files
print('... input data has changed or force=True - recalculate da vs turns')
daout=mk_da_vst(dasurv,seed,tune,turnsl,turnstep)
print('.... save data in database')
#check if old table name da_vsturn exists, if yes delete it
if(db.check_table('da_vsturn')):
print('... delete old table da_vsturn - table will be substituted by new table da_vst')
db.execute("DROP TABLE da_vsturn")
db.st_da_vst(daout,recreate=True)
else:#create data
print('... calculate da vs turns')
daout=mk_da_vst(dasurv,seed,tune,turnsl,turnstep)
print('.... save data in database')
db.st_da_vst(daout,recreate=False)
if(outfile):# create dasurv.out and da.out files
fnsurv='%s/DAsurv.%s.out'%(dirname,turnse)
save_dasurv(dasurv,fnsurv)
print('... save survival data in {0}').format(fnsurv)
fndaout='%s/DA.%s.out'%(dirname,turnse)
save_daout(daout,fndaout)
print('... save da vs turns data in {0}').format(fndaout)
if(outfileold):
fndaoutold='%s/DAold.%s.out'%(dirname,turnse)
save_daout_old(daout,fndaoutold)
print('... save da vs turns (old data format) data in {0}').format(fndaoutold)
#---- fit the data
if(davstfit):
if(fitdat in ['dawtrap','dastrap','dawsimp','dassimp']):
if(fitdaterr in ['none','dawtraperr','dastraperr','dastraperrep','dastraperrepang','dastraperrepamp','dawsimperr','dassimperr']):
try:
fitndrop=int(float(fitndrop))
except [ValueError,NameError,TypeError]:
print('Error in RunDaVsTurns: fitndrop must be an integer values! - Aborting!')
sys.exit(0)
try:
fitskap=float(fitskap)
fitekap=float(fitekap)
fitdkap=float(fitdkap)
except [ValueError,NameError,TypeError]:
print('Error in RunDaVsTurns: fitskap,fitekap and fitdkap must be an float values! - Aborting!')
sys.exit(0)
if((np.arange(fitskap,fitekap+fitdkap,fitdkap)).any()):
for tune in db.get_db_tunes():
print('fit da vs turns for tune {0} ...').format(str(tune))
fitdaout=mk_da_vst_fit(db,tune,fitdat,fitdaterr,fitndrop,fitskap,fitekap,fitdkap)
print('.... save fitdata in database')
db.st_da_vst_fit(fitdaout,recreate=False)
if(outfilefit):
(tunex,tuney)=tune
sixdesktunes="%g_%g"%(tunex,tuney)
fndot='%s/DAfit.%s.%s.%s.%s.%s.plot'%(db.mk_analysis_dir(),db.LHCDescrip,sixdesktunes,turnse,fitdat,fitdaterr)
save_davst_fit(fitdaout,fndot)
print('... save da vs turns fit data in {0}').format(fndot)
else:
print('Error in RunDaVsTurns: empty scan range for fitkap!')
else:
print("Error in -fitopt: <dataerr> has to be 'none','dawtraperr','dastraperr','dastraperrep','dastraperrepang','dastraperrepamp','dawsimperr' or 'dassimperr' - Aborting!")
sys.exit(0)
else:
print("Error in -fitopt: <data> has to be 'dawtrap','dastrap','dawsimp' or 'dassimp' - Aborting!")
sys.exit(0)
def PlotDaVsTurns(db,ldat,ldaterr,ampmaxsurv,ampmindavst,ampmaxdavst,tmax,plotlog,plotfit,fitndrop):
'''plot survival plots and da vs turns for list of data ldat and associated error ldaterr'''
turnsl=db.env_var['turnsl']
turnse=db.env_var['turnse']
print('Da vs turns -- create survival and da vs turns plots')
try:
ampmaxsurv =float(ampmaxsurv)
ampmindavst=float(ampmindavst)
ampmaxdavst=float(ampmaxdavst)
except [ValueError,NameError,TypeError]:
print('Error in PlotDaVsTurns: ampmaxsurv and amprangedavst must be float values!')
sys.exit(0)
#remove all files
if(plotlog):
files=('DA_log.png DAsurv.png DA_log.%s.png DAsurv.%s.png'%(turnse,turnse)).split()
else:
files=('DA.png DAsurv.png DA.%s.png DAsurv.%s.png'%(turnse,turnse)).split()
clean_dir_da_vst(db,files)# create directory structure and delete old files if force=true
if(not db.check_seeds()):
print('!!! Seeds are missing in database !!!')
for seed in db.get_db_seeds():
seed=int(seed)
for tune in db.get_db_tunes():
dirname=db.mk_analysis_dir(seed,tune)#directory struct already created in clean_dir_da_vst, only get dir name (string) here
pl.close('all')
pl.figure(figsize=(6,6))
db.plot_surv_2d(seed,tune,ampmaxsurv)#suvival plot
pl.savefig('%s/DAsurv.%s.png'%(dirname,turnse))
print('... saving plot %s/DAsurv.%s.png'%(dirname,turnse))
db.plot_da_vst(seed,tune,ldat,ldaterr,ampmindavst,ampmaxdavst,tmax,plotlog,plotfit,fitndrop)#da vs turns plot
if(plotlog==True):
pl.savefig('%s/DA_log.%s.png'%(dirname,turnse))
print('... saving plot %s/DA_log.%s.png'%(dirname,turnse))
else:
pl.savefig('%s/DA.%s.png'%(dirname,turnse))
print('... saving plot %s/DA.%s.png'%(dirname,turnse))
def PlotCompDaVsTurns(db,dbcomp,ldat,ldaterr,lblname,complblname,ampmaxsurv,ampmindavst,ampmaxdavst,tmax,plotlog,plotfit,fitndrop):
'''Comparison of two studies: survival plots (area of stable particles) and Da vs turns plots'''
matplotlib.rcParams.update({'font.size': 16})
turnsldb =db.env_var['turnsl']
turnsedb =db.env_var['turnse']
turnsldbcomp=dbcomp.env_var['turnsl']
turnsedbcomp=dbcomp.env_var['turnse']
if(not turnsldb==turnsldbcomp):
print('Warning! Maximum turn number turn_max of %s and %s differ!'%(db.LHCDescrip,dbcomp.LHCDescrip))
try:
ampmaxsurv=float(ampmaxsurv)
ampmindavst=float(ampmindavst)
ampmaxdavst=float(ampmaxdavst)
tmax=int(float(tmax))
except ValueError,NameError:
print('Error in PlotCompDaVsTurns: ampmaxsurv and amprangedavst must be float values and tmax an integer value!')
sys.exit(0)
#remove all files
if(plotlog):
files=('DA_comp_log.png DAsurv_comp.png DA_comp_log.%s.png DAsurv_comp.%s.png'%(turnsedb,turnsedb)).split()
else:
files=('DA_comp.png DAsurv_comp.png DA_comp.%s.png DAsurv_comp.%s.png'%(turnsedb,turnsedb)).split()
clean_dir_da_vst(db,files)# create directory structure and delete old files if force=true
# start analysis
if(not db.check_seeds()):
print('Seeds are missing in database!')
for seed in db.get_db_seeds():
seed=int(seed)
for tune in db.get_db_tunes():
if(seed in dbcomp.get_db_seeds() and tune in db.get_db_tunes()):
dirname=db.mk_analysis_dir(seed,tune)#directories already created with
pl.close('all')
plot_surv_2d_comp(db,dbcomp,lblname,complblname,seed,tune,ampmaxsurv)
pl.savefig('%s/DAsurv_comp.%s.png'%(dirname,turnsedb))
print('... saving plot %s/DAsurv_comp.%s.png'%(dirname,turnsedb))
plot_comp_da_vst(db,dbcomp,ldat,ldaterr,lblname,complblname,seed,tune,ampmindavst,ampmaxdavst,tmax,plotlog,plotfit,fitndrop)
if(plotlog==True):
pl.savefig('%s/DA_comp_log.%s.png'%(dirname,turnsedb),bbox_inches='tight')
print('... saving plot %s/DA_comp_log.%s.png'%(dirname,turnsedb))
else:
pl.savefig('%s/DA_comp.%s.png'%(dirname,turnsedb),bbox_inches='tight')
print('... saving plot %s/DA_comp.%s.png'%(dirname,turnsedb))<|fim▁end|>
|
print('Error in RunDaVsTurns: turnstep must be integer values!')
sys.exit(0)
|
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>from django.test import SimpleTestCase
class MaintenanceModeTestCase(SimpleTestCase):
def test_maintenance_mode_enabled_home_page(self):
with self.settings(MAINTENANCE_MODE=True):
response = self.client.get("/", follow=True)
self.assertEqual(503, response.status_code)
self.assertIn("This service is down for maintenance", response.content)
self.assertEqual([("http://testserver/maintenance", 302)], response.redirect_chain)
def test_maintenance_mode_enabled_maintenance_page(self):
with self.settings(MAINTENANCE_MODE=True):
response = self.client.get("/maintenance", follow=False)
self.assertEqual(503, response.status_code)
self.assertIn("This service is down for maintenance", response.content)
def test_maintenance_mode_disabled_home_page(self):
with self.settings(MAINTENANCE_MODE=False):
response = self.client.get("/", follow=True)
self.assertEqual(200, response.status_code)
self.assertNotIn("This service is down for maintenance", response.content)
def test_maintenance_mode_disabled_maintenance_page(self):
with self.settings(MAINTENANCE_MODE=False):
response = self.client.get("/maintenance", follow=True)
self.assertEqual(200, response.status_code)<|fim▁hole|><|fim▁end|>
|
self.assertEqual(("http://testserver/", 302), response.redirect_chain[0])
self.assertNotIn("This service is down for maintenance", response.content)
|
<|file_name|>iri2uri.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Converts an IRI to a URI."""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"<|fim▁hole|>import urllib.parse
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
#
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
# / %xD0000-DFFFD / %xE1000-EFFFD
escape_range = [
(0xA0, 0xD7FF),
(0xE000, 0xF8FF),
(0xF900, 0xFDCF),
(0xFDF0, 0xFFEF),
(0x10000, 0x1FFFD),
(0x20000, 0x2FFFD),
(0x30000, 0x3FFFD),
(0x40000, 0x4FFFD),
(0x50000, 0x5FFFD),
(0x60000, 0x6FFFD),
(0x70000, 0x7FFFD),
(0x80000, 0x8FFFD),
(0x90000, 0x9FFFD),
(0xA0000, 0xAFFFD),
(0xB0000, 0xBFFFD),
(0xC0000, 0xCFFFD),
(0xD0000, 0xDFFFD),
(0xE1000, 0xEFFFD),
(0xF0000, 0xFFFFD),
(0x100000, 0x10FFFD),
]
def encode(c):
retval = c
i = ord(c)
for low, high in escape_range:
if i < low:
break
if i >= low and i <= high:
retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
break
return retval
def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
if isinstance(uri, str):
(scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
authority = authority.encode("idna").decode("utf-8")
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
uri = "".join([encode(c) for c in uri])
return uri
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
"ftp://ftp.is.co.za/rfc/rfc1808.txt",
"http://www.ietf.org/rfc/rfc2396.txt",
"ldap://[2001:db8::7]/c=GB?objectClass?one",
"mailto:[email protected]",
"news:comp.infosystems.www.servers.unix",
"tel:+1-816-555-1212",
"telnet://192.0.2.16:80/",
"urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
"""Test that the right type of escaping is done for each part of the URI."""
self.assertEqual(
"http://xn--o3h.com/%E2%98%84",
iri2uri("http://\N{COMET}.com/\N{COMET}"),
)
self.assertEqual(
"http://bitworking.org/?fred=%E2%98%84",
iri2uri("http://bitworking.org/?fred=\N{COMET}"),
)
self.assertEqual(
"http://bitworking.org/#%E2%98%84",
iri2uri("http://bitworking.org/#\N{COMET}"),
)
self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
self.assertEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
)
self.assertEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
)
self.assertNotEqual(
"/fred?bar=%E2%98%9A#%E2%98%84",
iri2uri(
"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
),
)
unittest.main()<|fim▁end|>
|
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
|
<|file_name|>PrevioLex.java<|end_file_name|><|fim▁begin|>/* The following code was generated by JFlex 1.6.1 */
package com.jim_project.interprete.parser.previo;
import com.jim_project.interprete.parser.AnalizadorLexico;
/**
* This class is a scanner generated by
* <a href="http://www.jflex.de/">JFlex</a> 1.6.1
* from the specification file <tt>C:/Users/alber_000/Documents/NetBeansProjects/tfg-int-rpretes/jim/src/main/java/com/jim_project/interprete/parser/previo/lexico.l</tt>
*/
public class PrevioLex extends AnalizadorLexico {
/** This character denotes the end of file */
public static final int YYEOF = -1;
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 16384;
/** lexical states */
public static final int YYINITIAL = 0;
/**
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
* at the beginning of a line
* l is of the form l = 2*k, k a non negative integer
*/
private static final int ZZ_LEXSTATE[] = {
0, 0
};
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\11\0\1\3\1\2\1\51\1\3\1\1\22\0\1\3\1\16\1\0"+
"\1\5\1\0\1\20\2\0\3\20\1\15\1\20\1\14\1\0\1\20"+
"\1\10\11\7\2\0\1\13\1\17\3\0\3\6\1\50\1\42\1\24"+
"\1\30\1\40\1\23\2\4\1\41\1\4\1\47\1\31\1\44\3\4"+
"\1\32\2\4\1\37\1\11\1\12\1\11\1\20\1\0\1\20\3\0"+
"\3\6\1\46\1\36\1\22\1\25\1\34\1\21\2\4\1\35\1\4"+
"\1\45\1\26\1\43\3\4\1\27\2\4\1\33\1\11\1\12\1\11"+
"\12\0\1\51\u1fa2\0\1\51\1\51\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\udfe6\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\1\0\1\1\2\2\1\1\1\2\1\3\2\4\2\5"+
"\1\1\2\6\1\1\1\6\6\1\1\3\2\1\1\3"+
"\1\7\1\3\1\5\1\10\1\11\1\12\1\0\1\13"+
"\10\7\1\14\4\7\1\15\2\7\1\16\1\7\1\17"+
"\1\7\1\20";
private static int [] zzUnpackAction() {
int [] result = new int[55];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\52\0\124\0\52\0\176\0\250\0\322\0\374"+
"\0\52\0\u0126\0\176\0\u0150\0\u017a\0\u01a4\0\u01ce\0\52"+
"\0\u01f8\0\u0222\0\u024c\0\u0276\0\u02a0\0\u02ca\0\u02f4\0\u031e"+
"\0\u0348\0\u0372\0\176\0\u039c\0\u03c6\0\52\0\52\0\52"+
"\0\u03f0\0\176\0\u041a\0\u0444\0\u046e\0\u0498\0\u04c2\0\u04ec"+
"\0\u0516\0\u0540\0\52\0\u056a\0\u0594\0\u05be\0\u05e8\0\176"+
"\0\u0612\0\u063c\0\176\0\u0666\0\176\0\u0690\0\176";
private static int [] zzUnpackRowMap() {
int [] result = new int[55];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\1\2\1\3\2\4\1\5\1\6\1\7\1\10\1\11"+
"\1\12\1\13\1\14\1\15\1\16\1\17\1\2\1\20"+
"\1\21\1\5\1\22\1\5\1\23\2\5\1\24\2\5"+
"\1\25\1\5\1\26\1\27\1\30\1\5\1\31\1\32"+
"\3\5\1\7\1\5\1\7\55\0\1\4\53\0\1\33"+
"\1\0\1\33\2\0\2\33\6\0\30\33\1\0\1\6"+
"\1\3\1\4\47\6\4\0\1\33\1\0\1\33\1\34"+
"\1\0\2\33\6\0\30\33\10\0\2\10\45\0\1\33"+
"\1\0\1\33\1\35\1\0\2\33\6\0\30\33\15\0"+
"\1\36\51\0\1\37\52\0\1\40\53\0\1\41\36\0"+
"\1\33\1\0\1\33\2\0\2\33\6\0\1\33\1\42"+
"\26\33\5\0\1\33\1\0\1\33\2\0\2\33\6\0"+
"\3\33\1\42\24\33\5\0\1\33\1\0\1\33\2\0"+
"\2\33\6\0\5\33\1\43\22\33\5\0\1\33\1\0"+
"\1\33\2\0\2\33\6\0\10\33\1\44\17\33\5\0"+
"\1\33\1\0\1\33\2\0\2\33\6\0\13\33\1\45"+
"\14\33\5\0\1\33\1\0\1\33\2\0\2\33\6\0"+
"\5\33\1\46\22\33\5\0\1\33\1\0\1\33\1\34"+
"\1\0\2\33\6\0\24\33\1\47\3\33\5\0\1\33"+
"\1\0\1\33\2\0\2\33\6\0\17\33\1\50\10\33"+
"\5\0\1\33\1\0\1\33\2\0\2\33\6\0\10\33"+
"\1\51\17\33\5\0\1\33\1\0\1\33\1\34\1\0"+
"\2\33\6\0\26\33\1\52\1\33\10\0\2\34\50\0"+
"\2\35\44\0\1\41\4\0\1\53\45\0\1\33\1\0"+
"\1\33\2\0\2\33\6\0\6\33\1\54\21\33\5\0"+
"\1\33\1\0\1\33\2\0\2\33\6\0\11\33\1\55"+
"\16\33\5\0\1\33\1\0\1\33\2\0\2\33\6\0"+
"\1\56\27\33\5\0\1\33\1\0\1\33\2\0\2\33"+
"\6\0\5\33\1\57\22\33\5\0\1\33\1\0\1\33"+
"\2\0\2\33\6\0\25\33\1\60\2\33\5\0\1\33"+
"\1\0\1\33\2\0\2\33\6\0\2\33\1\61\25\33"+
"\5\0\1\33\1\0\1\33\2\0\2\33\6\0\10\33"+
"\1\62\17\33\5\0\1\33\1\0\1\33\2\0\2\33"+
"\6\0\27\33\1\60\5\0\1\33\1\0\1\33\2\0"+
"\2\33\6\0\5\33\1\63\22\33\5\0\1\33\1\0"+
"\1\33\2\0\2\33\6\0\10\33\1\63\17\33\5\0"+
"\1\33\1\0\1\33\2\0\2\33\6\0\14\33\1\64"+
"\13\33\5\0\1\33\1\0\1\33\2\0\2\33\6\0"+
"\22\33\1\65\5\33\5\0\1\33\1\0\1\33\2\0"+
"\2\33\6\0\20\33\1\66\7\33\5\0\1\33\1\0"+
"\1\33\2\0\2\33\6\0\23\33\1\65\4\33\5\0"+
"\1\33\1\0\1\33\2\0\2\33\6\0\15\33\1\67"+
"\12\33\5\0\1\33\1\0\1\33\2\0\2\33\6\0"+
"\21\33\1\67\6\33\1\0";
private static int [] zzUnpackTrans() {
int [] result = new int[1722];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unknown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\1\0\1\11\1\1\1\11\4\1\1\11\6\1\1\11"+
"\15\1\3\11\1\0\11\1\1\11\14\1";
private static int [] zzUnpackAttribute() {
int [] result = new int[55];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the input device */
private java.io.Reader zzReader;
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private char zzBuffer[] = new char[ZZ_BUFFERSIZE];
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/** number of newlines encountered up to the start of the matched text */
private int yyline;
/** the number of characters up to the start of the matched text */
private int yychar;
/**
* the number of characters from the last newline up to the start of the
* matched text
*/
private int yycolumn;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/** denotes if the user-EOF-code has already been executed */
private boolean zzEOFDone;
/**
* The number of occupied positions in zzBuffer beyond zzEndRead.
* When a lead/high surrogate has been read from the input stream
* into the final zzBuffer position, this will have a value of 1;
* otherwise, it will have a value of 0.
*/
private int zzFinalHighSurrogate = 0;
/* user code: */
private PrevioParser yyparser;
/**
* Constructor de clase.
* @param r Referencia al lector de entrada.
* @param p Referencia al analizador sintáctico.
*/
public PrevioLex(java.io.Reader r, PrevioParser p) {
this(r);
yyparser = p;
}
/**
* Creates a new scanner
*
* @param in the java.io.Reader to read input from.
*/
public PrevioLex(java.io.Reader in) {
this.zzReader = in;
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x110000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 184) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
/**
* Refills the input buffer.
*
* @return <code>false</code>, iff there was new input.
*
* @exception java.io.IOException if any I/O-Error occurs
*/
private boolean zzRefill() throws java.io.IOException {
/* first: make room (if you can) */
if (zzStartRead > 0) {
zzEndRead += zzFinalHighSurrogate;
zzFinalHighSurrogate = 0;
System.arraycopy(zzBuffer, zzStartRead,
zzBuffer, 0,
zzEndRead-zzStartRead);
/* translate stored positions */
zzEndRead-= zzStartRead;
zzCurrentPos-= zzStartRead;
zzMarkedPos-= zzStartRead;
zzStartRead = 0;
}
/* is the buffer big enough? */
if (zzCurrentPos >= zzBuffer.length - zzFinalHighSurrogate) {
/* if not: blow it up */
char newBuffer[] = new char[zzBuffer.length*2];
System.arraycopy(zzBuffer, 0, newBuffer, 0, zzBuffer.length);
zzBuffer = newBuffer;
zzEndRead += zzFinalHighSurrogate;
zzFinalHighSurrogate = 0;
}
/* fill the buffer with new input */
int requested = zzBuffer.length - zzEndRead;
int numRead = zzReader.read(zzBuffer, zzEndRead, requested);
/* not supposed to occur according to specification of java.io.Reader */
if (numRead == 0) {
throw new java.io.IOException("Reader returned 0 characters. See JFlex examples for workaround.");
}
if (numRead > 0) {
zzEndRead += numRead;
/* If numRead == requested, we might have requested to few chars to
encode a full Unicode character. We assume that a Reader would
otherwise never return half characters. */
if (numRead == requested) {
if (Character.isHighSurrogate(zzBuffer[zzEndRead - 1])) {
--zzEndRead;
zzFinalHighSurrogate = 1;
}
}
/* potentially more input available */
return false;
}<|fim▁hole|> }
/**
* Closes the input stream.
*/
public final void yyclose() throws java.io.IOException {
zzAtEOF = true; /* indicate end of file */
zzEndRead = zzStartRead; /* invalidate buffer */
if (zzReader != null)
zzReader.close();
}
/**
* Resets the scanner to read from a new input stream.
* Does not close the old reader.
*
* All internal variables are reset, the old input stream
* <b>cannot</b> be reused (internal buffer is discarded and lost).
* Lexical state is set to <tt>ZZ_INITIAL</tt>.
*
* Internal scan buffer is resized down to its initial length, if it has grown.
*
* @param reader the new input stream
*/
public final void yyreset(java.io.Reader reader) {
zzReader = reader;
zzAtBOL = true;
zzAtEOF = false;
zzEOFDone = false;
zzEndRead = zzStartRead = 0;
zzCurrentPos = zzMarkedPos = 0;
zzFinalHighSurrogate = 0;
yyline = yychar = yycolumn = 0;
zzLexicalState = YYINITIAL;
if (zzBuffer.length > ZZ_BUFFERSIZE)
zzBuffer = new char[ZZ_BUFFERSIZE];
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final String yytext() {
return new String( zzBuffer, zzStartRead, zzMarkedPos-zzStartRead );
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBuffer[zzStartRead+pos];
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Contains user EOF-code, which will be executed exactly once,
* when the end of file is reached
*/
private void zzDoEOF() throws java.io.IOException {
if (!zzEOFDone) {
zzEOFDone = true;
yyclose();
}
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public int yylex() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
char [] zzBufferL = zzBuffer;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = ZZ_LEXSTATE[zzLexicalState];
// set up zzAction for empty match case:
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
}
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL) {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL);
zzCurrentPosL += Character.charCount(zzInput);
}
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL);
zzCurrentPosL += Character.charCount(zzInput);
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
zzDoEOF();
{ return 0; }
}
else {
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 1:
{ yyparser.programa().error().deCaracterNoReconocido(yytext());
}
case 17: break;
case 2:
{
}
case 18: break;
case 3:
{ yyparser.yylval = new PrevioParserVal(yytext());
return PrevioParser.ETIQUETA;
}
case 19: break;
case 4:
{ yyparser.yylval = new PrevioParserVal(yytext());
return PrevioParser.NUMERO;
}
case 20: break;
case 5:
{ yyparser.yylval = new PrevioParserVal(yytext());
return PrevioParser.VARIABLE;
}
case 21: break;
case 6:
{ return yycharat(0);
}
case 22: break;
case 7:
{ yyparser.yylval = new PrevioParserVal(yytext());
return PrevioParser.IDMACRO;
}
case 23: break;
case 8:
{ return PrevioParser.FLECHA;
}
case 24: break;
case 9:
{ return PrevioParser.DECREMENTO;
}
case 25: break;
case 10:
{ return PrevioParser.INCREMENTO;
}
case 26: break;
case 11:
{ return PrevioParser.IF;
}
case 27: break;
case 12:
{ return PrevioParser.DISTINTO;
}
case 28: break;
case 13:
{ return PrevioParser.END;
}
case 29: break;
case 14:
{ return PrevioParser.GOTO;
}
case 30: break;
case 15:
{ return PrevioParser.LOOP;
}
case 31: break;
case 16:
{ return PrevioParser.WHILE;
}
case 32: break;
default:
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}<|fim▁end|>
|
/* numRead < 0 ==> end of stream */
return true;
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import sys
import glob
import numpy
try:
from setuptools import setup
from setuptools import Extension
except ImportError:
from distutils.core import setup
from distutils.extension import Extension
#
# Force `setup_requires` stuff like Cython to be installed before proceeding
#
from setuptools.dist import Distribution
Distribution(dict(setup_requires='Cython'))
try:
from Cython.Distutils import build_ext
except ImportError:
print("Could not import Cython.Distutils. Install `cython` and rerun.")
sys.exit(1)
# from distutils.core import setup, Extension
# from Cython.Distutils import build_ext
# Build extensions
module1 = Extension( name = "openpiv.process",
sources = ["openpiv/src/process.pyx"],
include_dirs = [numpy.get_include()],
)
module2 = Extension( name = "openpiv.lib",
sources = ["openpiv/src/lib.pyx"],
include_dirs = [numpy.get_include()],
)
# a list of the extension modules that we want to distribute
ext_modules = [module1, module2]
# Package data are those filed 'strictly' needed by the program
# to function correctly. Images, default configuration files, et cetera.
package_data = [ 'data/defaults-processing-parameters.cfg',
'data/ui_resources.qrc',
'data/images/*.png',
'data/icons/*.png',
]
# data files are other files which are not required by the program but
# we want to ditribute as well, for example documentation.
data_files = [ ('openpiv/examples/tutorial-part1', glob.glob('openpiv/examples/tutorial-part1/*') ),
('openpiv/examples/masking_tutorial', glob.glob('openpiv/examples/masking_tutorial/*') ),
('openpiv/docs/openpiv/examples/example1', glob.glob('openpiv/docs/examples/example1/*') ),
('openpiv/docs/openpiv/examples/gurney-flap', glob.glob('openpiv/docs/examples/gurney-flap/*') ),
('openpiv/docs/openpiv', ['README.md'] ),
('openpiv/data/ui', glob.glob('openpiv/data/ui/*.ui') ),
]
# packages that we want to distribute. THis is how
# we have divided the openpiv package.
packages = ['openpiv', 'openpiv.ui']
setup( name = "OpenPIV",
version = "0.20.1",
author = "OpenPIV contributors",
author_email = "[email protected]",
description = "An open source software for PIV data analysis",
license = "GPL v3",
url = "http://www.openpiv.net",
long_description = """OpenPIV is a set of open source algorithms and methods
for the state-of-the-art experimental tool
of Particle Image Velocimetry (PIV) which
are free, open, and easy to operate.""",
ext_modules = ext_modules,
packages = packages,<|fim▁hole|> package_data = {'': package_data},
data_files = data_files,
install_requires = ['numpy','cython']
)<|fim▁end|>
|
cmdclass = {'build_ext': build_ext},
|
<|file_name|>treemap.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An ordered map and set implemented as self-balancing binary search
//! trees. The only requirement for the types is that the key implements
//! `Ord`.
use std::cmp::Ordering;
use std::fmt::Show;
use std::fmt;
use std::iter::Peekable;
use std::iter;
use std::mem::{replace, swap};
use std::ptr;
// This is implemented as an AA tree, which is a simplified variation of
// a red-black tree where red (horizontal) nodes can only be added
// as a right child. The time complexity is the same, and re-balancing
// operations are more frequent but also cheaper.
// Future improvements:
// range search - O(log n) retrieval of an iterator from some key
// (possibly) implement the overloads Python does for sets:
// * intersection: &
// * difference: -
// * symmetric difference: ^
// * union: |
// These would be convenient since the methods work like `each`
#[allow(missing_doc)]
#[deriving(Clone)]
pub struct TreeMap<K, V> {
root: Option<Box<TreeNode<K, V>>>,
length: uint
}
impl<K: PartialEq + Ord, V: PartialEq> PartialEq for TreeMap<K, V> {
fn eq(&self, other: &TreeMap<K, V>) -> bool {
self.len() == other.len() &&
self.iter().zip(other.iter()).all(|(a, b)| a == b)
}
}
// Lexicographical comparison
fn lt<K: PartialOrd + Ord, V: PartialOrd>(a: &TreeMap<K, V>,
b: &TreeMap<K, V>) -> bool {
// the Zip iterator is as long as the shortest of a and b.
for ((key_a, value_a), (key_b, value_b)) in a.iter().zip(b.iter()) {
if *key_a < *key_b { return true; }
if *key_a > *key_b { return false; }
if *value_a < *value_b { return true; }
if *value_a > *value_b { return false; }
}
a.len() < b.len()
}
impl<K: PartialOrd + Ord, V: PartialOrd> PartialOrd for TreeMap<K, V> {
#[inline]
fn lt(&self, other: &TreeMap<K, V>) -> bool { lt(self, other) }
}
impl<K: Ord + Show, V: Show> Show for TreeMap<K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, r"\{"));
for (i, (k, v)) in self.iter().enumerate() {
if i != 0 { try!(write!(f, ", ")); }
try!(write!(f, "{}: {}", *k, *v));
}
write!(f, r"\}")
}
}
impl<K: Ord, V> Container for TreeMap<K, V> {
fn len(&self) -> uint { self.length }
}
impl<K: Ord, V> Mutable for TreeMap<K, V> {
fn clear(&mut self) {
self.root = None;
self.length = 0
}
}
impl<K: Ord, V> Map<K, V> for TreeMap<K, V> {
fn find<'a>(&'a self, key: &K) -> Option<&'a V> {
let mut current: &'a Option<Box<TreeNode<K, V>>> = &self.root;
loop {
match *current {
Some(ref r) => {
match key.cmp(&r.key) {
Less => current = &r.left,
Greater => current = &r.right,
Equal => return Some(&r.value)
}
}
None => return None
}
}
}
}
impl<K: Ord, V> MutableMap<K, V> for TreeMap<K, V> {
#[inline]
fn find_mut<'a>(&'a mut self, key: &K) -> Option<&'a mut V> {
find_mut(&mut self.root, key)
}
fn swap(&mut self, key: K, value: V) -> Option<V> {
let ret = insert(&mut self.root, key, value);
if ret.is_none() { self.length += 1 }
ret
}
fn pop(&mut self, key: &K) -> Option<V> {
let ret = remove(&mut self.root, key);
if ret.is_some() { self.length -= 1 }
ret
}
}
impl<K: Ord, V> TreeMap<K, V> {
/// Create an empty TreeMap
pub fn new() -> TreeMap<K, V> { TreeMap{root: None, length: 0} }
/// Get a lazy iterator over the key-value pairs in the map.
/// Requires that it be frozen (immutable).
pub fn iter<'a>(&'a self) -> Entries<'a, K, V> {
Entries {
stack: vec!(),
node: deref(&self.root),
remaining_min: self.length,
remaining_max: self.length
}
}
/// Get a lazy reverse iterator over the key-value pairs in the map.
/// Requires that it be frozen (immutable).
pub fn rev_iter<'a>(&'a self) -> RevEntries<'a, K, V> {
RevEntries{iter: self.iter()}
}
/// Get a lazy forward iterator over the key-value pairs in the
/// map, with the values being mutable.
pub fn mut_iter<'a>(&'a mut self) -> MutEntries<'a, K, V> {
MutEntries {
stack: vec!(),
node: mut_deref(&mut self.root),
remaining_min: self.length,
remaining_max: self.length
}
}
/// Get a lazy reverse iterator over the key-value pairs in the
/// map, with the values being mutable.
pub fn mut_rev_iter<'a>(&'a mut self) -> RevMutEntries<'a, K, V> {
RevMutEntries{iter: self.mut_iter()}
}
/// Get a lazy iterator that consumes the treemap.
pub fn move_iter(self) -> MoveEntries<K, V> {
let TreeMap { root: root, length: length } = self;
let stk = match root {
None => vec!(),
Some(box tn) => vec!(tn)
};
MoveEntries {
stack: stk,
remaining: length
}
}
}
// range iterators.
macro_rules! bound_setup {
// initialiser of the iterator to manipulate
($iter:expr,
// whether we are looking for the lower or upper bound.
$is_lower_bound:expr) => {
{
let mut iter = $iter;
loop {
if !iter.node.is_null() {
let node_k = unsafe {&(*iter.node).key};
match k.cmp(node_k) {
Less => iter.traverse_left(),
Greater => iter.traverse_right(),
Equal => {
if $is_lower_bound {
iter.traverse_complete();
return iter;
} else {
iter.traverse_right()
}
}
}
} else {
iter.traverse_complete();
return iter;
}
}
}
}
}
impl<K: Ord, V> TreeMap<K, V> {
/// Get a lazy iterator that should be initialized using
/// `traverse_left`/`traverse_right`/`traverse_complete`.
fn iter_for_traversal<'a>(&'a self) -> Entries<'a, K, V> {
Entries {
stack: vec!(),
node: deref(&self.root),
remaining_min: 0,
remaining_max: self.length
}
}
/// Return a lazy iterator to the first key-value pair whose key is not less than `k`
/// If all keys in map are less than `k` an empty iterator is returned.
pub fn lower_bound<'a>(&'a self, k: &K) -> Entries<'a, K, V> {
bound_setup!(self.iter_for_traversal(), true)
}
/// Return a lazy iterator to the first key-value pair whose key is greater than `k`
/// If all keys in map are not greater than `k` an empty iterator is returned.
pub fn upper_bound<'a>(&'a self, k: &K) -> Entries<'a, K, V> {
bound_setup!(self.iter_for_traversal(), false)
}
/// Get a lazy iterator that should be initialized using
/// `traverse_left`/`traverse_right`/`traverse_complete`.
fn mut_iter_for_traversal<'a>(&'a mut self) -> MutEntries<'a, K, V> {
MutEntries {
stack: vec!(),
node: mut_deref(&mut self.root),
remaining_min: 0,
remaining_max: self.length
}
}
/// Return a lazy value iterator to the first key-value pair (with
/// the value being mutable) whose key is not less than `k`.
///
/// If all keys in map are less than `k` an empty iterator is
/// returned.
pub fn mut_lower_bound<'a>(&'a mut self, k: &K) -> MutEntries<'a, K, V> {
bound_setup!(self.mut_iter_for_traversal(), true)
}
/// Return a lazy iterator to the first key-value pair (with the
/// value being mutable) whose key is greater than `k`.
///
/// If all keys in map are not greater than `k` an empty iterator
/// is returned.
pub fn mut_upper_bound<'a>(&'a mut self, k: &K) -> MutEntries<'a, K, V> {
bound_setup!(self.mut_iter_for_traversal(), false)
}
}
/// Lazy forward iterator over a map
pub struct Entries<'a, K, V> {
stack: Vec<&'a TreeNode<K, V>>,
// See the comment on MutEntries; this is just to allow
// code-sharing (for this immutable-values iterator it *could* very
// well be Option<&'a TreeNode<K,V>>).
node: *TreeNode<K, V>,
remaining_min: uint,
remaining_max: uint
}
/// Lazy backward iterator over a map
pub struct RevEntries<'a, K, V> {
iter: Entries<'a, K, V>,
}
/// Lazy forward iterator over a map that allows for the mutation of
/// the values.
pub struct MutEntries<'a, K, V> {
stack: Vec<&'a mut TreeNode<K, V>>,
// Unfortunately, we require some unsafe-ness to get around the
// fact that we would be storing a reference *into* one of the
// nodes in the stack.
//
// As far as the compiler knows, this would let us invalidate the
// reference by assigning a new value to this node's position in
// its parent, which would cause this current one to be
// deallocated so this reference would be invalid. (i.e. the
// compilers complaints are 100% correct.)
//
// However, as far as you humans reading this code know (or are
// about to know, if you haven't read far enough down yet), we are
// only reading from the TreeNode.{left,right} fields. the only
// thing that is ever mutated is the .value field (although any
// actual mutation that happens is done externally, by the
// iterator consumer). So, don't be so concerned, rustc, we've got
// it under control.
//
// (This field can legitimately be null.)
node: *mut TreeNode<K, V>,
remaining_min: uint,
remaining_max: uint
}
/// Lazy backward iterator over a map
pub struct RevMutEntries<'a, K, V> {
iter: MutEntries<'a, K, V>,
}
// FIXME #5846 we want to be able to choose between &x and &mut x
// (with many different `x`) below, so we need to optionally pass mut
// as a tt, but the only thing we can do with a `tt` is pass them to
// other macros, so this takes the `& <mutability> <operand>` token
// sequence and forces their evaluation as an expression.
macro_rules! addr { ($e:expr) => { $e }}
// putting an optional mut into type signatures
macro_rules! item { ($i:item) => { $i }}
macro_rules! define_iterator {
($name:ident,
$rev_name:ident,
// the function to go from &m Option<Box<TreeNode>> to *m TreeNode
deref = $deref:ident,
// see comment on `addr!`, this is just an optional `mut`, but
// there's no support for 0-or-1 repeats.
addr_mut = $($addr_mut:tt)*
) => {
// private methods on the forward iterator (item!() for the
// addr_mut in the next_ return value)
item!(impl<'a, K, V> $name<'a, K, V> {
#[inline(always)]
fn next_(&mut self, forward: bool) -> Option<(&'a K, &'a $($addr_mut)* V)> {
while !self.stack.is_empty() || !self.node.is_null() {
if !self.node.is_null() {
let node = unsafe {addr!(& $($addr_mut)* *self.node)};
{
let next_node = if forward {
addr!(& $($addr_mut)* node.left)
} else {
addr!(& $($addr_mut)* node.right)
};
self.node = $deref(next_node);
}
self.stack.push(node);
} else {
let node = self.stack.pop().unwrap();
let next_node = if forward {
addr!(& $($addr_mut)* node.right)
} else {
addr!(& $($addr_mut)* node.left)
};
self.node = $deref(next_node);
self.remaining_max -= 1;
if self.remaining_min > 0 {
self.remaining_min -= 1;
}
return Some((&node.key, addr!(& $($addr_mut)* node.value)));
}
}
None
}
/// traverse_left, traverse_right and traverse_complete are
/// used to initialize Entries/MutEntries
/// pointing to element inside tree structure.
///
/// They should be used in following manner:
/// - create iterator using TreeMap::[mut_]iter_for_traversal
/// - find required node using `traverse_left`/`traverse_right`
/// (current node is `Entries::node` field)
/// - complete initialization with `traverse_complete`
///
/// After this, iteration will start from `self.node`. If
/// `self.node` is None iteration will start from last
/// node from which we traversed left.
#[inline]
fn traverse_left(&mut self) {
let node = unsafe {addr!(& $($addr_mut)* *self.node)};
self.node = $deref(addr!(& $($addr_mut)* node.left));
self.stack.push(node);
}
#[inline]
fn traverse_right(&mut self) {
let node = unsafe {addr!(& $($addr_mut)* *self.node)};
self.node = $deref(addr!(& $($addr_mut)* node.right));
}
#[inline]
fn traverse_complete(&mut self) {
if !self.node.is_null() {
unsafe {
self.stack.push(addr!(& $($addr_mut)* *self.node));
}
self.node = ptr::RawPtr::null();
}
}
})
// the forward Iterator impl.
item!(impl<'a, K, V> Iterator<(&'a K, &'a $($addr_mut)* V)> for $name<'a, K, V> {
/// Advance the iterator to the next node (in order) and return a
/// tuple with a reference to the key and value. If there are no
/// more nodes, return `None`.
fn next(&mut self) -> Option<(&'a K, &'a $($addr_mut)* V)> {
self.next_(true)
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.remaining_min, Some(self.remaining_max))
}
})
// the reverse Iterator impl.
item!(impl<'a, K, V> Iterator<(&'a K, &'a $($addr_mut)* V)> for $rev_name<'a, K, V> {
fn next(&mut self) -> Option<(&'a K, &'a $($addr_mut)* V)> {
self.iter.next_(false)
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
self.iter.size_hint()
}
})
}
} // end of define_iterator
define_iterator! {
Entries,
RevEntries,
deref = deref,
// immutable, so no mut
addr_mut =
}
define_iterator! {
MutEntries,
RevMutEntries,
deref = mut_deref,
addr_mut = mut
}
fn deref<'a, K, V>(node: &'a Option<Box<TreeNode<K, V>>>) -> *TreeNode<K, V> {
match *node {
Some(ref n) => {
let n: &TreeNode<K, V> = *n;
n as *TreeNode<K, V>
}
None => ptr::null()
}
}
fn mut_deref<K, V>(x: &mut Option<Box<TreeNode<K, V>>>)
-> *mut TreeNode<K, V> {
match *x {
Some(ref mut n) => {
let n: &mut TreeNode<K, V> = *n;
n as *mut TreeNode<K, V>
}
None => ptr::mut_null()
}
}
/// Lazy forward iterator over a map that consumes the map while iterating
pub struct MoveEntries<K, V> {
stack: Vec<TreeNode<K, V>>,
remaining: uint
}
impl<K, V> Iterator<(K, V)> for MoveEntries<K,V> {
#[inline]
fn next(&mut self) -> Option<(K, V)> {
while !self.stack.is_empty() {
let TreeNode {
key: key,
value: value,
left: left,
right: right,
level: level
} = self.stack.pop().unwrap();
match left {
Some(box left) => {
let n = TreeNode {
key: key,
value: value,
left: None,
right: right,
level: level
};
self.stack.push(n);
self.stack.push(left);
}
None => {
match right {
Some(box right) => self.stack.push(right),
None => ()
}
self.remaining -= 1;
return Some((key, value))
}
}
}
None
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.remaining, Some(self.remaining))
}
}
impl<'a, T> Iterator<&'a T> for SetItems<'a, T> {
#[inline]
fn next(&mut self) -> Option<&'a T> {
self.iter.next().map(|(value, _)| value)
}
}
impl<'a, T> Iterator<&'a T> for RevSetItems<'a, T> {
#[inline]
fn next(&mut self) -> Option<&'a T> {
self.iter.next().map(|(value, _)| value)
}
}
/// A implementation of the `Set` trait on top of the `TreeMap` container. The
/// only requirement is that the type of the elements contained ascribes to the
/// `Ord` trait.
#[deriving(Clone)]
pub struct TreeSet<T> {
map: TreeMap<T, ()>
}
impl<T: PartialEq + Ord> PartialEq for TreeSet<T> {
#[inline]
fn eq(&self, other: &TreeSet<T>) -> bool { self.map == other.map }
}
impl<T: PartialOrd + Ord> PartialOrd for TreeSet<T> {
#[inline]
fn lt(&self, other: &TreeSet<T>) -> bool { self.map < other.map }
}
impl<T: Ord + Show> Show for TreeSet<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, r"\{"));
for (i, x) in self.iter().enumerate() {
if i != 0 { try!(write!(f, ", ")); }
try!(write!(f, "{}", *x));
}
write!(f, r"\}")
}
}
impl<T: Ord> Container for TreeSet<T> {
#[inline]
fn len(&self) -> uint { self.map.len() }
}
impl<T: Ord> Mutable for TreeSet<T> {
#[inline]
fn clear(&mut self) { self.map.clear() }
}
impl<T: Ord> Set<T> for TreeSet<T> {
#[inline]
fn contains(&self, value: &T) -> bool {
self.map.contains_key(value)
}
fn is_disjoint(&self, other: &TreeSet<T>) -> bool {
self.intersection(other).next().is_none()
}
fn is_subset(&self, other: &TreeSet<T>) -> bool {
let mut x = self.iter();
let mut y = other.iter();
let mut a = x.next();
let mut b = y.next();
while a.is_some() {
if b.is_none() {
return false;
}
let a1 = a.unwrap();
let b1 = b.unwrap();
match b1.cmp(a1) {
Less => (),
Greater => return false,
Equal => a = x.next(),
}
b = y.next();
}
true
}
}
impl<T: Ord> MutableSet<T> for TreeSet<T> {
#[inline]
fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()) }
#[inline]
fn remove(&mut self, value: &T) -> bool { self.map.remove(value) }
}
impl<T: Ord> TreeSet<T> {
/// Create an empty TreeSet
#[inline]
pub fn new() -> TreeSet<T> { TreeSet{map: TreeMap::new()} }
/// Get a lazy iterator over the values in the set.
/// Requires that it be frozen (immutable).
#[inline]
pub fn iter<'a>(&'a self) -> SetItems<'a, T> {
SetItems{iter: self.map.iter()}
}
/// Get a lazy iterator over the values in the set.
/// Requires that it be frozen (immutable).
#[inline]
pub fn rev_iter<'a>(&'a self) -> RevSetItems<'a, T> {
RevSetItems{iter: self.map.rev_iter()}
}
/// Get a lazy iterator that consumes the set.
#[inline]
pub fn move_iter(self) -> MoveSetItems<T> {
self.map.move_iter().map(|(value, _)| value)
}
/// Get a lazy iterator pointing to the first value not less than `v` (greater or equal).
/// If all elements in the set are less than `v` empty iterator is returned.
#[inline]
pub fn lower_bound<'a>(&'a self, v: &T) -> SetItems<'a, T> {
SetItems{iter: self.map.lower_bound(v)}
}
/// Get a lazy iterator pointing to the first value greater than `v`.
/// If all elements in the set are not greater than `v` empty iterator is returned.
#[inline]
pub fn upper_bound<'a>(&'a self, v: &T) -> SetItems<'a, T> {
SetItems{iter: self.map.upper_bound(v)}
}
/// Visit the values (in-order) representing the difference
pub fn difference<'a>(&'a self, other: &'a TreeSet<T>) -> DifferenceItems<'a, T> {
DifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()}
}
/// Visit the values (in-order) representing the symmetric difference
pub fn symmetric_difference<'a>(&'a self, other: &'a TreeSet<T>)
-> SymDifferenceItems<'a, T> {
SymDifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()}
}
/// Visit the values (in-order) representing the intersection
pub fn intersection<'a>(&'a self, other: &'a TreeSet<T>)
-> IntersectionItems<'a, T> {
IntersectionItems{a: self.iter().peekable(), b: other.iter().peekable()}
}
/// Visit the values (in-order) representing the union
pub fn union<'a>(&'a self, other: &'a TreeSet<T>) -> UnionItems<'a, T> {
UnionItems{a: self.iter().peekable(), b: other.iter().peekable()}
}
}
/// Lazy forward iterator over a set
pub struct SetItems<'a, T> {
iter: Entries<'a, T, ()>
}
/// Lazy backward iterator over a set
pub struct RevSetItems<'a, T> {
iter: RevEntries<'a, T, ()>
}
/// Lazy forward iterator over a set that consumes the set while iterating
pub type MoveSetItems<T> = iter::Map<'static, (T, ()), T, MoveEntries<T, ()>>;
/// Lazy iterator producing elements in the set difference (in-order)
pub struct DifferenceItems<'a, T> {
a: Peekable<&'a T, SetItems<'a, T>>,
b: Peekable<&'a T, SetItems<'a, T>>,
}
/// Lazy iterator producing elements in the set symmetric difference (in-order)
pub struct SymDifferenceItems<'a, T> {
a: Peekable<&'a T, SetItems<'a, T>>,
b: Peekable<&'a T, SetItems<'a, T>>,
}
/// Lazy iterator producing elements in the set intersection (in-order)
pub struct IntersectionItems<'a, T> {
a: Peekable<&'a T, SetItems<'a, T>>,
b: Peekable<&'a T, SetItems<'a, T>>,
}
/// Lazy iterator producing elements in the set intersection (in-order)
pub struct UnionItems<'a, T> {
a: Peekable<&'a T, SetItems<'a, T>>,
b: Peekable<&'a T, SetItems<'a, T>>,
}
/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>,
short: Ordering, long: Ordering) -> Ordering {
match (x, y) {
(None , _ ) => short,
(_ , None ) => long,
(Some(x1), Some(y1)) => x1.cmp(y1),
}
}
impl<'a, T: Ord> Iterator<&'a T> for DifferenceItems<'a, T> {
fn next(&mut self) -> Option<&'a T> {
loop {
match cmp_opt(self.a.peek(), self.b.peek(), Less, Less) {
Less => return self.a.next(),
Equal => { self.a.next(); self.b.next(); }
Greater => { self.b.next(); }
}
}
}
}
impl<'a, T: Ord> Iterator<&'a T> for SymDifferenceItems<'a, T> {
fn next(&mut self) -> Option<&'a T> {
loop {
match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
Less => return self.a.next(),
Equal => { self.a.next(); self.b.next(); }
Greater => return self.b.next(),
}
}
}
}
impl<'a, T: Ord> Iterator<&'a T> for IntersectionItems<'a, T> {
fn next(&mut self) -> Option<&'a T> {
loop {
let o_cmp = match (self.a.peek(), self.b.peek()) {
(None , _ ) => None,
(_ , None ) => None,
(Some(a1), Some(b1)) => Some(a1.cmp(b1)),
};
match o_cmp {
None => return None,
Some(Less) => { self.a.next(); }
Some(Equal) => { self.b.next(); return self.a.next() }
Some(Greater) => { self.b.next(); }
}
}
}
}
impl<'a, T: Ord> Iterator<&'a T> for UnionItems<'a, T> {
fn next(&mut self) -> Option<&'a T> {
loop {
match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
Less => return self.a.next(),
Equal => { self.b.next(); return self.a.next() }
Greater => return self.b.next(),
}
}
}
}
// Nodes keep track of their level in the tree, starting at 1 in the
// leaves and with a red child sharing the level of the parent.
#[deriving(Clone)]
struct TreeNode<K, V> {
key: K,
value: V,
left: Option<Box<TreeNode<K, V>>>,
right: Option<Box<TreeNode<K, V>>>,
level: uint
}
impl<K: Ord, V> TreeNode<K, V> {
/// Creates a new tree node.
#[inline]
pub fn new(key: K, value: V) -> TreeNode<K, V> {
TreeNode{key: key, value: value, left: None, right: None, level: 1}
}
}
// Remove left horizontal link by rotating right
fn skew<K: Ord, V>(node: &mut Box<TreeNode<K, V>>) {
if node.left.as_ref().map_or(false, |x| x.level == node.level) {
let mut save = node.left.take_unwrap();
swap(&mut node.left, &mut save.right); // save.right now None
swap(node, &mut save);
node.right = Some(save);
}
}
// Remove dual horizontal link by rotating left and increasing level of
// the parent
fn split<K: Ord, V>(node: &mut Box<TreeNode<K, V>>) {
if node.right.as_ref().map_or(false,
|x| x.right.as_ref().map_or(false, |y| y.level == node.level)) {
let mut save = node.right.take_unwrap();
swap(&mut node.right, &mut save.left); // save.left now None
save.level += 1;
swap(node, &mut save);
node.left = Some(save);
}
}
fn find_mut<'r, K: Ord, V>(node: &'r mut Option<Box<TreeNode<K, V>>>,
key: &K)
-> Option<&'r mut V> {
match *node {
Some(ref mut x) => {
match key.cmp(&x.key) {
Less => find_mut(&mut x.left, key),
Greater => find_mut(&mut x.right, key),
Equal => Some(&mut x.value),
}
}
None => None
}
}
fn insert<K: Ord, V>(node: &mut Option<Box<TreeNode<K, V>>>,
key: K, value: V) -> Option<V> {
match *node {
Some(ref mut save) => {
match key.cmp(&save.key) {
Less => {
let inserted = insert(&mut save.left, key, value);
skew(save);
split(save);
inserted
}
Greater => {
let inserted = insert(&mut save.right, key, value);
skew(save);
split(save);
inserted
}
Equal => {
save.key = key;
Some(replace(&mut save.value, value))
}
}
}
None => {
*node = Some(box TreeNode::new(key, value));
None
}
}
}
fn remove<K: Ord, V>(node: &mut Option<Box<TreeNode<K, V>>>,
key: &K) -> Option<V> {
fn heir_swap<K: Ord, V>(node: &mut Box<TreeNode<K, V>>,
child: &mut Option<Box<TreeNode<K, V>>>) {
// *could* be done without recursion, but it won't borrow check
for x in child.mut_iter() {
if x.right.is_some() {
heir_swap(node, &mut x.right);
} else {
swap(&mut node.key, &mut x.key);
swap(&mut node.value, &mut x.value);
}
}
}
match *node {
None => {
return None; // bottom of tree
}
Some(ref mut save) => {
let (ret, rebalance) = match key.cmp(&save.key) {
Less => (remove(&mut save.left, key), true),
Greater => (remove(&mut save.right, key), true),
Equal => {
if save.left.is_some() {
if save.right.is_some() {
let mut left = save.left.take_unwrap();
if left.right.is_some() {
heir_swap(save, &mut left.right);
} else {
swap(&mut save.key, &mut left.key);
swap(&mut save.value, &mut left.value);
}
save.left = Some(left);
(remove(&mut save.left, key), true)
} else {
let new = save.left.take_unwrap();
let box TreeNode{value, ..} = replace(save, new);
*save = save.left.take_unwrap();
(Some(value), true)
}
} else if save.right.is_some() {
let new = save.right.take_unwrap();
let box TreeNode{value, ..} = replace(save, new);
(Some(value), true)
} else {
(None, false)
}
}
};
if rebalance {
let left_level = save.left.as_ref().map_or(0, |x| x.level);
let right_level = save.right.as_ref().map_or(0, |x| x.level);
// re-balance, if necessary
if left_level < save.level - 1 || right_level < save.level - 1 {
save.level -= 1;
if right_level > save.level {
for x in save.right.mut_iter() { x.level = save.level }
}
skew(save);
for right in save.right.mut_iter() {
skew(right);
for x in right.right.mut_iter() { skew(x) }
}
split(save);
for x in save.right.mut_iter() { split(x) }
}
return ret;
}
}
}
return match node.take() {
Some(box TreeNode{value, ..}) => Some(value), None => fail!()
};
}
impl<K: Ord, V> FromIterator<(K, V)> for TreeMap<K, V> {
fn from_iter<T: Iterator<(K, V)>>(iter: T) -> TreeMap<K, V> {
let mut map = TreeMap::new();
map.extend(iter);
map
}
}
impl<K: Ord, V> Extendable<(K, V)> for TreeMap<K, V> {
#[inline]
fn extend<T: Iterator<(K, V)>>(&mut self, mut iter: T) {
for (k, v) in iter {
self.insert(k, v);
}
}
}
impl<T: Ord> FromIterator<T> for TreeSet<T> {
fn from_iter<Iter: Iterator<T>>(iter: Iter) -> TreeSet<T> {
let mut set = TreeSet::new();
set.extend(iter);
set
}
}
impl<T: Ord> Extendable<T> for TreeSet<T> {
#[inline]
fn extend<Iter: Iterator<T>>(&mut self, mut iter: Iter) {
for elem in iter {
self.insert(elem);
}
}
}
#[cfg(test)]
mod test_treemap {
use super::{TreeMap, TreeNode};
use std::rand::Rng;
use std::rand;
#[test]
fn find_empty() {
let m: TreeMap<int,int> = TreeMap::new();
assert!(m.find(&5) == None);
}
#[test]
fn find_not_found() {
let mut m = TreeMap::new();
assert!(m.insert(1, 2));
assert!(m.insert(5, 3));
assert!(m.insert(9, 3));
assert_eq!(m.find(&2), None);
}
#[test]
fn test_find_mut() {
let mut m = TreeMap::new();
assert!(m.insert(1, 12));
assert!(m.insert(2, 8));
assert!(m.insert(5, 14));
let new = 100;
match m.find_mut(&5) {
None => fail!(), Some(x) => *x = new
}
assert_eq!(m.find(&5), Some(&new));
}
#[test]
fn insert_replace() {
let mut m = TreeMap::new();
assert!(m.insert(5, 2));
assert!(m.insert(2, 9));
assert!(!m.insert(2, 11));
assert_eq!(m.find(&2).unwrap(), &11);
}
#[test]
fn test_clear() {
let mut m = TreeMap::new();
m.clear();
assert!(m.insert(5, 11));
assert!(m.insert(12, -3));
assert!(m.insert(19, 2));
m.clear();
assert!(m.find(&5).is_none());
assert!(m.find(&12).is_none());
assert!(m.find(&19).is_none());
assert!(m.is_empty());
}
#[test]
fn u8_map() {
let mut m = TreeMap::new();
let k1 = "foo".as_bytes();
let k2 = "bar".as_bytes();
let v1 = "baz".as_bytes();
let v2 = "foobar".as_bytes();
m.insert(k1.clone(), v1.clone());
m.insert(k2.clone(), v2.clone());
assert_eq!(m.find(&k2), Some(&v2));
assert_eq!(m.find(&k1), Some(&v1));
}
fn check_equal<K: PartialEq + Ord, V: PartialEq>(ctrl: &[(K, V)],
map: &TreeMap<K, V>) {
assert_eq!(ctrl.is_empty(), map.is_empty());
for x in ctrl.iter() {
let &(ref k, ref v) = x;
assert!(map.find(k).unwrap() == v)
}
for (map_k, map_v) in map.iter() {
let mut found = false;
for x in ctrl.iter() {
let &(ref ctrl_k, ref ctrl_v) = x;
if *map_k == *ctrl_k {
assert!(*map_v == *ctrl_v);
found = true;
break;
}
}
assert!(found);
}
}
fn check_left<K: Ord, V>(node: &Option<Box<TreeNode<K, V>>>,
parent: &Box<TreeNode<K, V>>) {
match *node {
Some(ref r) => {
assert_eq!(r.key.cmp(&parent.key), Less);
assert!(r.level == parent.level - 1); // left is black
check_left(&r.left, r);
check_right(&r.right, r, false);
}
None => assert!(parent.level == 1) // parent is leaf
}
}
fn check_right<K: Ord, V>(node: &Option<Box<TreeNode<K, V>>>,
parent: &Box<TreeNode<K, V>>,
parent_red: bool) {
match *node {
Some(ref r) => {
assert_eq!(r.key.cmp(&parent.key), Greater);
let red = r.level == parent.level;
if parent_red { assert!(!red) } // no dual horizontal links
// Right red or black
assert!(red || r.level == parent.level - 1);
check_left(&r.left, r);
check_right(&r.right, r, red);
}
None => assert!(parent.level == 1) // parent is leaf
}
}
fn check_structure<K: Ord, V>(map: &TreeMap<K, V>) {
match map.root {
Some(ref r) => {
check_left(&r.left, r);
check_right(&r.right, r, false);
}
None => ()
}
}
#[test]
fn test_rand_int() {
let mut map: TreeMap<int,int> = TreeMap::new();
let mut ctrl = vec![];
check_equal(ctrl.as_slice(), &map);
assert!(map.find(&5).is_none());
let mut rng: rand::IsaacRng = rand::SeedableRng::from_seed(&[42]);
for _ in range(0, 3) {
for _ in range(0, 90) {
let k = rng.gen();
let v = rng.gen();
if !ctrl.iter().any(|x| x == &(k, v)) {
assert!(map.insert(k, v));
ctrl.push((k, v));
check_structure(&map);
check_equal(ctrl.as_slice(), &map);
}
}
for _ in range(0, 30) {
let r = rng.gen_range(0, ctrl.len());
let (key, _) = ctrl.remove(r).unwrap();
assert!(map.remove(&key));
check_structure(&map);
check_equal(ctrl.as_slice(), &map);
}
}
}
#[test]
fn test_len() {
let mut m = TreeMap::new();
assert!(m.insert(3, 6));
assert_eq!(m.len(), 1);
assert!(m.insert(0, 0));
assert_eq!(m.len(), 2);
assert!(m.insert(4, 8));
assert_eq!(m.len(), 3);
assert!(m.remove(&3));
assert_eq!(m.len(), 2);
assert!(!m.remove(&5));
assert_eq!(m.len(), 2);
assert!(m.insert(2, 4));
assert_eq!(m.len(), 3);
assert!(m.insert(1, 2));
assert_eq!(m.len(), 4);
}
#[test]
fn test_iterator() {
let mut m = TreeMap::new();
assert!(m.insert(3, 6));
assert!(m.insert(0, 0));
assert!(m.insert(4, 8));
assert!(m.insert(2, 4));
assert!(m.insert(1, 2));
let mut n = 0;
for (k, v) in m.iter() {
assert_eq!(*k, n);
assert_eq!(*v, n * 2);
n += 1;
}
assert_eq!(n, 5);
}
#[test]
fn test_interval_iteration() {
let mut m = TreeMap::new();
for i in range(1, 100) {
assert!(m.insert(i * 2, i * 4));
}
for i in range(1, 198) {
let mut lb_it = m.lower_bound(&i);
let (&k, &v) = lb_it.next().unwrap();
let lb = i + i % 2;
assert_eq!(lb, k);
assert_eq!(lb * 2, v);
let mut ub_it = m.upper_bound(&i);
let (&k, &v) = ub_it.next().unwrap();
let ub = i + 2 - i % 2;
assert_eq!(ub, k);
assert_eq!(ub * 2, v);
}
let mut end_it = m.lower_bound(&199);
assert_eq!(end_it.next(), None);
}
#[test]
fn test_rev_iter() {
let mut m = TreeMap::new();
assert!(m.insert(3, 6));
assert!(m.insert(0, 0));
assert!(m.insert(4, 8));
assert!(m.insert(2, 4));
assert!(m.insert(1, 2));
let mut n = 4;
for (k, v) in m.rev_iter() {
assert_eq!(*k, n);
assert_eq!(*v, n * 2);
n -= 1;
}
}
#[test]
fn test_mut_iter() {
let mut m = TreeMap::new();
for i in range(0u, 10) {
assert!(m.insert(i, 100 * i));
}
for (i, (&k, v)) in m.mut_iter().enumerate() {
*v += k * 10 + i; // 000 + 00 + 0, 100 + 10 + 1, ...
}
for (&k, &v) in m.iter() {
assert_eq!(v, 111 * k);
}
}
#[test]
fn test_mut_rev_iter() {
let mut m = TreeMap::new();
for i in range(0u, 10) {
assert!(m.insert(i, 100 * i));
}
for (i, (&k, v)) in m.mut_rev_iter().enumerate() {
*v += k * 10 + (9 - i); // 900 + 90 + (9 - 0), 800 + 80 + (9 - 1), ...
}
for (&k, &v) in m.iter() {
assert_eq!(v, 111 * k);
}
}
#[test]
fn test_mut_interval_iter() {
let mut m_lower = TreeMap::new();
let mut m_upper = TreeMap::new();
for i in range(1, 100) {
assert!(m_lower.insert(i * 2, i * 4));
assert!(m_upper.insert(i * 2, i * 4));
}
for i in range(1, 199) {
let mut lb_it = m_lower.mut_lower_bound(&i);
let (&k, v) = lb_it.next().unwrap();
let lb = i + i % 2;
assert_eq!(lb, k);
*v -= k;
}
for i in range(0, 198) {
let mut ub_it = m_upper.mut_upper_bound(&i);
let (&k, v) = ub_it.next().unwrap();
let ub = i + 2 - i % 2;
assert_eq!(ub, k);
*v -= k;
}
assert!(m_lower.mut_lower_bound(&199).next().is_none());
assert!(m_upper.mut_upper_bound(&198).next().is_none());
assert!(m_lower.iter().all(|(_, &x)| x == 0));
assert!(m_upper.iter().all(|(_, &x)| x == 0));
}
#[test]
fn test_eq() {
let mut a = TreeMap::new();
let mut b = TreeMap::new();
assert!(a == b);
assert!(a.insert(0, 5));
assert!(a != b);
assert!(b.insert(0, 4));
assert!(a != b);
assert!(a.insert(5, 19));
assert!(a != b);
assert!(!b.insert(0, 5));
assert!(a != b);
assert!(b.insert(5, 19));
assert!(a == b);
}
#[test]
fn test_lt() {
let mut a = TreeMap::new();
let mut b = TreeMap::new();
assert!(!(a < b) && !(b < a));
assert!(b.insert(0, 5));
assert!(a < b);
assert!(a.insert(0, 7));
assert!(!(a < b) && b < a);
assert!(b.insert(-2, 0));
assert!(b < a);
assert!(a.insert(-5, 2));
assert!(a < b);
assert!(a.insert(6, 2));
assert!(a < b && !(b < a));
}
#[test]
fn test_ord() {
let mut a = TreeMap::new();
let mut b = TreeMap::new();
assert!(a <= b && a >= b);
assert!(a.insert(1, 1));
assert!(a > b && a >= b);
assert!(b < a && b <= a);
assert!(b.insert(2, 2));
assert!(b > a && b >= a);
assert!(a < b && a <= b);
}
#[test]
fn test_show() {
let mut map: TreeMap<int, int> = TreeMap::new();
let empty: TreeMap<int, int> = TreeMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{}", map);
assert!(map_str == "{1: 2, 3: 4}".to_string());
assert_eq!(format!("{}", empty), "{}".to_string());
}
#[test]
fn test_lazy_iterator() {
let mut m = TreeMap::new();
let (x1, y1) = (2, 5);
let (x2, y2) = (9, 12);
let (x3, y3) = (20, -3);
let (x4, y4) = (29, 5);
let (x5, y5) = (103, 3);
assert!(m.insert(x1, y1));
assert!(m.insert(x2, y2));
assert!(m.insert(x3, y3));
assert!(m.insert(x4, y4));
assert!(m.insert(x5, y5));
let m = m;
let mut a = m.iter();
assert_eq!(a.next().unwrap(), (&x1, &y1));
assert_eq!(a.next().unwrap(), (&x2, &y2));
assert_eq!(a.next().unwrap(), (&x3, &y3));
assert_eq!(a.next().unwrap(), (&x4, &y4));
assert_eq!(a.next().unwrap(), (&x5, &y5));
assert!(a.next().is_none());
let mut b = m.iter();
let expected = [(&x1, &y1), (&x2, &y2), (&x3, &y3), (&x4, &y4),
(&x5, &y5)];
let mut i = 0;
for x in b {
assert_eq!(expected[i], x);
i += 1;
if i == 2 {
break
}
}
for x in b {
assert_eq!(expected[i], x);
i += 1;
}
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: TreeMap<int, int> = xs.iter().map(|&x| x).collect();
for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v));
}
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use super::TreeMap;
use deque::bench::{insert_rand_n, insert_seq_n, find_rand_n, find_seq_n};
// Find seq
#[bench]
pub fn insert_rand_100(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
insert_rand_n(100, &mut m, b);
}
#[bench]
pub fn insert_rand_10_000(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
insert_rand_n(10_000, &mut m, b);
}
// Insert seq
#[bench]
pub fn insert_seq_100(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
insert_seq_n(100, &mut m, b);
}
#[bench]
pub fn insert_seq_10_000(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
insert_seq_n(10_000, &mut m, b);
}
// Find rand
#[bench]
pub fn find_rand_100(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
find_rand_n(100, &mut m, b);
}
#[bench]
pub fn find_rand_10_000(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();<|fim▁hole|> find_rand_n(10_000, &mut m, b);
}
// Find seq
#[bench]
pub fn find_seq_100(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
find_seq_n(100, &mut m, b);
}
#[bench]
pub fn find_seq_10_000(b: &mut Bencher) {
let mut m : TreeMap<uint,uint> = TreeMap::new();
find_seq_n(10_000, &mut m, b);
}
}
#[cfg(test)]
mod test_set {
use super::{TreeMap, TreeSet};
#[test]
fn test_clear() {
let mut s = TreeSet::new();
s.clear();
assert!(s.insert(5));
assert!(s.insert(12));
assert!(s.insert(19));
s.clear();
assert!(!s.contains(&5));
assert!(!s.contains(&12));
assert!(!s.contains(&19));
assert!(s.is_empty());
}
#[test]
fn test_disjoint() {
let mut xs = TreeSet::new();
let mut ys = TreeSet::new();
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(5));
assert!(ys.insert(11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(7));
assert!(xs.insert(19));
assert!(xs.insert(4));
assert!(ys.insert(2));
assert!(ys.insert(-11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(ys.insert(7));
assert!(!xs.is_disjoint(&ys));
assert!(!ys.is_disjoint(&xs));
}
#[test]
fn test_subset_and_superset() {
let mut a = TreeSet::new();
assert!(a.insert(0));
assert!(a.insert(5));
assert!(a.insert(11));
assert!(a.insert(7));
let mut b = TreeSet::new();
assert!(b.insert(0));
assert!(b.insert(7));
assert!(b.insert(19));
assert!(b.insert(250));
assert!(b.insert(11));
assert!(b.insert(200));
assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(!b.is_superset(&a));
assert!(b.insert(5));
assert!(a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(b.is_superset(&a));
}
#[test]
fn test_iterator() {
let mut m = TreeSet::new();
assert!(m.insert(3));
assert!(m.insert(0));
assert!(m.insert(4));
assert!(m.insert(2));
assert!(m.insert(1));
let mut n = 0;
for x in m.iter() {
assert_eq!(*x, n);
n += 1
}
}
#[test]
fn test_rev_iter() {
let mut m = TreeSet::new();
assert!(m.insert(3));
assert!(m.insert(0));
assert!(m.insert(4));
assert!(m.insert(2));
assert!(m.insert(1));
let mut n = 4;
for x in m.rev_iter() {
assert_eq!(*x, n);
n -= 1;
}
}
#[test]
fn test_move_iter() {
let s: TreeSet<int> = range(0, 5).collect();
let mut n = 0;
for x in s.move_iter() {
assert_eq!(x, n);
n += 1;
}
}
#[test]
fn test_move_iter_size_hint() {
let s: TreeSet<int> = vec!(0, 1).move_iter().collect();
let mut it = s.move_iter();
assert_eq!(it.size_hint(), (2, Some(2)));
assert!(it.next() != None);
assert_eq!(it.size_hint(), (1, Some(1)));
assert!(it.next() != None);
assert_eq!(it.size_hint(), (0, Some(0)));
assert_eq!(it.next(), None);
}
#[test]
fn test_clone_eq() {
let mut m = TreeSet::new();
m.insert(1);
m.insert(2);
assert!(m.clone() == m);
}
fn check(a: &[int],
b: &[int],
expected: &[int],
f: |&TreeSet<int>, &TreeSet<int>, f: |&int| -> bool| -> bool) {
let mut set_a = TreeSet::new();
let mut set_b = TreeSet::new();
for x in a.iter() { assert!(set_a.insert(*x)) }
for y in b.iter() { assert!(set_b.insert(*y)) }
let mut i = 0;
f(&set_a, &set_b, |x| {
assert_eq!(*x, expected[i]);
i += 1;
true
});
assert_eq!(i, expected.len());
}
#[test]
fn test_intersection() {
fn check_intersection(a: &[int], b: &[int], expected: &[int]) {
check(a, b, expected, |x, y, f| x.intersection(y).advance(f))
}
check_intersection([], [], []);
check_intersection([1, 2, 3], [], []);
check_intersection([], [1, 2, 3], []);
check_intersection([2], [1, 2, 3], [2]);
check_intersection([1, 2, 3], [2], [2]);
check_intersection([11, 1, 3, 77, 103, 5, -5],
[2, 11, 77, -9, -42, 5, 3],
[3, 5, 11, 77]);
}
#[test]
fn test_difference() {
fn check_difference(a: &[int], b: &[int], expected: &[int]) {
check(a, b, expected, |x, y, f| x.difference(y).advance(f))
}
check_difference([], [], []);
check_difference([1, 12], [], [1, 12]);
check_difference([], [1, 2, 3, 9], []);
check_difference([1, 3, 5, 9, 11],
[3, 9],
[1, 5, 11]);
check_difference([-5, 11, 22, 33, 40, 42],
[-12, -5, 14, 23, 34, 38, 39, 50],
[11, 22, 33, 40, 42]);
}
#[test]
fn test_symmetric_difference() {
fn check_symmetric_difference(a: &[int], b: &[int],
expected: &[int]) {
check(a, b, expected, |x, y, f| x.symmetric_difference(y).advance(f))
}
check_symmetric_difference([], [], []);
check_symmetric_difference([1, 2, 3], [2], [1, 3]);
check_symmetric_difference([2], [1, 2, 3], [1, 3]);
check_symmetric_difference([1, 3, 5, 9, 11],
[-2, 3, 9, 14, 22],
[-2, 1, 5, 11, 14, 22]);
}
#[test]
fn test_union() {
fn check_union(a: &[int], b: &[int],
expected: &[int]) {
check(a, b, expected, |x, y, f| x.union(y).advance(f))
}
check_union([], [], []);
check_union([1, 2, 3], [2], [1, 2, 3]);
check_union([2], [1, 2, 3], [1, 2, 3]);
check_union([1, 3, 5, 9, 11, 16, 19, 24],
[-2, 1, 5, 9, 13, 19],
[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]);
}
#[test]
fn test_zip() {
let mut x = TreeSet::new();
x.insert(5u);
x.insert(12u);
x.insert(11u);
let mut y = TreeSet::new();
y.insert("foo");
y.insert("bar");
let x = x;
let y = y;
let mut z = x.iter().zip(y.iter());
// FIXME: #5801: this needs a type hint to compile...
let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&5u, &("bar")));
let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&11u, &("foo")));
let result: Option<(&uint, & &'static str)> = z.next();
assert!(result.is_none());
}
#[test]
fn test_swap() {
let mut m = TreeMap::new();
assert_eq!(m.swap(1, 2), None);
assert_eq!(m.swap(1, 3), Some(2));
assert_eq!(m.swap(1, 4), Some(3));
}
#[test]
fn test_pop() {
let mut m = TreeMap::new();
m.insert(1, 2);
assert_eq!(m.pop(&1), Some(2));
assert_eq!(m.pop(&1), None);
}
#[test]
fn test_from_iter() {
let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: TreeSet<int> = xs.iter().map(|&x| x).collect();
for x in xs.iter() {
assert!(set.contains(x));
}
}
#[test]
fn test_show() {
let mut set: TreeSet<int> = TreeSet::new();
let empty: TreeSet<int> = TreeSet::new();
set.insert(1);
set.insert(2);
let set_str = format!("{}", set);
assert!(set_str == "{1, 2}".to_string());
assert_eq!(format!("{}", empty), "{}".to_string());
}
}<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use chrono::{offset::Utc, DateTime};
use diesel::{self, pg::PgConnection};
mod password;
use self::password::Password;
pub use self::password::{
CreationError as PasswordCreationError, PlaintextPassword, ValidationError, VerificationError,
};
use schema::local_auth;
use user::{AuthenticatedUser, UnauthenticatedUser, UnverifiedUser};
/// `LocalAuth` can be queried from the database, but is only really usable as a tool to "log in" a
/// user.
#[derive(Debug, Queryable, QueryableByName)]
#[table_name = "local_auth"]
pub struct LocalAuth {
id: i32,
password: Password,
user_id: i32, // foreign key to User
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
impl LocalAuth {
pub fn id(&self) -> i32 {
self.id
}
pub fn created_at(&self) -> DateTime<Utc> {
self.created_at
}
pub fn user_id(&self) -> i32 {
self.user_id
}
/// Log In a user, given an `UnauthenticatedUser` and a `PlaintextPassword`.
///
/// This method ensures first that the `UnauthenticatedUser` is the same user that this
/// `LocalAuth` is associated with, and then continues to verify the `PlaintextPassword`
/// against this type's `Password`. Upon succesful password verification, an
/// `AuthenticatedUser` is created.
pub(crate) fn log_in(
self,
user: UnauthenticatedUser,
password: PlaintextPassword,
) -> Result<AuthenticatedUser, VerificationError> {
use self::password::Verify;
if self.user_id != user.id {
return Err(VerificationError::Process);
}
self.password.verify(password).map(|_| AuthenticatedUser {
id: user.id,
primary_email: user.primary_email,
created_at: user.created_at,
updated_at: user.updated_at,
})
}
}
/// This type exists to create new `LocalAuth` record in the database.
#[derive(Insertable)]
#[table_name = "local_auth"]
pub struct NewLocalAuth {
password: Password,
created_at: DateTime<Utc>,
user_id: i32,
}
impl NewLocalAuth {
/// Insert into the database
pub fn insert(self, conn: &PgConnection) -> Result<LocalAuth, diesel::result::Error> {
use diesel::prelude::*;
diesel::insert_into(local_auth::table)
.values(&self)
.get_result(conn)
}
/// Create a `NewLocalAuth`
pub fn new(
user: &UnverifiedUser,
password: PlaintextPassword,
) -> Result<Self, PasswordCreationError> {
use self::password::Validate;
let password = password.validate()?;
NewLocalAuth::create(user, password)
}
/// Create a `NewLocalAuth` with a redundant password to check for consistency.
pub fn new_from_two(
user: &UnverifiedUser,
password: PlaintextPassword,
password2: PlaintextPassword,
) -> Result<Self, PasswordCreationError> {
use self::password::Validate;
let password = password
.validate()
.and_then(|password| password.compare(password2))?;
NewLocalAuth::create(user, password)
}
fn create(
user: &UnverifiedUser,
password: PlaintextPassword,
) -> Result<Self, PasswordCreationError> {
use self::password::Create;
let password = Password::create(password)?;
Ok(NewLocalAuth {
password: password,
created_at: Utc::now(),
user_id: user.id,
})
}
}
#[cfg(test)]
mod tests {
use super::NewLocalAuth;
use test_helper::*;
#[test]<|fim▁hole|> with_local_auth(conn, &user, password, |_| Ok(()))
})
})
}
#[test]
fn dont_create_local_auth_with_invalid_password() {
with_connection(|conn| {
with_unverified_user(conn, |user| {
let password = create_plaintext_password("short")?;
let local_auth = NewLocalAuth::new(&user, password);
assert!(
local_auth.is_err(),
"Should not have created local auth with bad password"
);
Ok(())
})
})
}
#[test]
fn dont_create_local_auth_with_mismatched_passwords() {
with_connection(|conn| {
with_unverified_user(conn, |user| {
let p1 = create_plaintext_password("agoodpassword")?;
let p2 = create_plaintext_password("abadpassword")?;
let local_auth = NewLocalAuth::new_from_two(&user, p1, p2);
assert!(
local_auth.is_err(),
"Should not have created LocalAuth from mismatched passwords"
);
Ok(())
})
})
}
}<|fim▁end|>
|
fn create_local_auth() {
with_connection(|conn| {
with_unverified_user(conn, |user| {
let password = "testpass";
|
<|file_name|>json.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="../_import/_fibjs.d.ts" />
/**
* @description json 编码与解码模块
* 引用方式:<|fim▁hole|> * ```JavaScript
* var encoding = require('encoding');
* var json = encoding.json;
* ```
* 或者
* ```JavaScript
* var json = require('json');
* ```
*
*/
declare module 'json' {
/**
* @description 以 json 格式编码变量
* @param data 要编码的变量
* @return 返回编码的字符串
*
*/
function encode(data: any): string;
/**
* @description 以 json 方式解码字符串为一个变量
* @param data 要解码的字符串
* @return 返回解码的变量
*
*/
function decode(data: string): any;
}<|fim▁end|>
| |
<|file_name|>pkcs7.py<|end_file_name|><|fim▁begin|>import binascii
import StringIO
class PKCS7Encoder(object):
'''
RFC 2315: PKCS#7 page 21
Some content-encryption algorithms assume the
input length is a multiple of k octets, where k > 1, and
let the application define a method for handling inputs
whose lengths are not a multiple of k octets. For such
algorithms, the method shall be to pad the input at the
trailing end with k - (l mod k) octets all having value k -
(l mod k), where l is the length of the input. In other
words, the input is padded at the trailing end with one of
the following strings:
01 -- if l mod k = k-1
02 02 -- if l mod k = k-2
.
.
.<|fim▁hole|>
The padding can be removed unambiguously since all input is
padded and no padding string is a suffix of another. This
padding method is well-defined if and only if k < 256;
methods for larger k are an open issue for further study.
'''
def __init__(self, k=16):
self.k = k
## @param text The padded text for which the padding is to be removed.
# @exception ValueError Raised when the input padding is missing or corrupt.
def decode(self, text):
'''
Remove the PKCS#7 padding from a text string
'''
nl = len(text)
val = int(binascii.hexlify(text[-1]), 16)
if val > self.k:
raise ValueError('Input is not padded or padding is corrupt')
l = nl - val
return text[:l]
## @param text The text to encode.
def encode(self, text):
'''
Pad an input string according to PKCS#7
'''
l = len(text)
output = StringIO.StringIO()
val = self.k - (l % self.k)
for _ in xrange(val):
output.write('%02x' % val)
return text + binascii.unhexlify(output.getvalue())<|fim▁end|>
|
k k ... k k -- if l mod k = 0
|
<|file_name|>Ice.cpp<|end_file_name|><|fim▁begin|>//
// Ice.cpp
// Alchemy
//
// Created by Kyounghwan on 2014. 3. 1..
// 1206 Ice 2 - - 30 1 sq. - 마법 Slow 50% 확률로 3초간 Slow. 1102 1101 1
//
#include "Ice.h"
/* ALCHEMY PARAMETER */
#define DEFAULT_INDEX 0
#define LOOP 0
#define TWEEN_EASING_MAX_INDEX 0
/* OPTION */
#define AP 30
Ice::Ice(unsigned char index)
:Alchemy(Alchemy::resource_table[index])
{
m_hp = 0;
m_ap = AP;
}
Ice::~Ice()
{}
Alchemy* Ice::create(PEObject* obj)
{
Ice* pIce = new Ice(obj->PE_getResourceIndex());
return pIce;
}
void Ice::PE_initAnimation()
{
ArmatureAnimation* ani;
init(m_name.c_str());
setAnchorPoint(Vec2(0.5f, 0.0f));
ani = getAnimation();
ani->playWithIndex(DEFAULT_INDEX, -1, -1);
}
bool Ice::PE_update(unsigned int flag) {
Vec2 index = getPosIndex();
Vec2 tower_pos = getPosition();
Rect tower_box = getBoundingBox();
int idx_x = index.x;
int idx_y = index.y;
int monster_count = 0;
for(int y=idx_y-1; y<idx_y+2; y++)
{
if(y<0 || y>ROW_NUM-1) continue;
int monster_num = m_pCollision->m_monsters_matrix[idx_x].size();
monster_count += monster_num;
if(monster_num>0)
{
for(int i=0; i<monster_num; i++)
<|fim▁hole|> {
obj->Hit(m_ap);
if(rand()%2 == 1)
{
obj->slow(0.5, 3);
}
}
}
}
}
return false;
}<|fim▁end|>
|
{
Monster* obj;
obj = m_pCollision->m_monsters_matrix[idx_x][i];
if(tower_box.intersectsRect(obj->getBoundingBox()))
|
<|file_name|>test_cfunctions.py<|end_file_name|><|fim▁begin|>from sympy import symbols, Symbol, exp, log, pi, Rational, S
from sympy.codegen.cfunctions import (
expm1, log1p, exp2, log2, fma, log10, Sqrt, Cbrt, hypot
)
from sympy.core.function import expand_log
def test_expm1():
# Eval
assert expm1(0) == 0
x = Symbol('x', real=True, finite=True)
# Expand and rewrite
assert expm1(x).expand(func=True) - exp(x) == -1
assert expm1(x).rewrite('tractable') - exp(x) == -1
assert expm1(x).rewrite('exp') - exp(x) == -1
# Precision
assert not ((exp(1e-10).evalf() - 1) - 1e-10 - 5e-21) < 1e-22 # for comparison
assert abs(expm1(1e-10).evalf() - 1e-10 - 5e-21) < 1e-22
# Properties
assert expm1(x).is_real
assert expm1(x).is_finite
# Diff
assert expm1(42*x).diff(x) - 42*exp(42*x) == 0
assert expm1(42*x).diff(x) - expm1(42*x).expand(func=True).diff(x) == 0
def test_log1p():
# Eval
assert log1p(0) == 0
d = S(10)
assert expand_log(log1p(d**-1000) - log(d**1000 + 1) + log(d**1000)) == 0
x = Symbol('x', real=True, finite=True)
# Expand and rewrite
assert log1p(x).expand(func=True) - log(x + 1) == 0
assert log1p(x).rewrite('tractable') - log(x + 1) == 0
assert log1p(x).rewrite('log') - log(x + 1) == 0
# Precision
assert not abs(log(1e-99 + 1).evalf() - 1e-99) < 1e-100 # for comparison
assert abs(expand_log(log1p(1e-99)).evalf() - 1e-99) < 1e-100
# Properties
assert log1p(-2**(-S(1)/2)).is_real
assert not log1p(-1).is_finite
assert log1p(pi).is_finite
assert not log1p(x).is_positive
assert log1p(Symbol('y', positive=True)).is_positive
assert not log1p(x).is_zero
assert log1p(Symbol('z', zero=True)).is_zero
assert not log1p(x).is_nonnegative
assert log1p(Symbol('o', nonnegative=True)).is_nonnegative
# Diff
assert log1p(42*x).diff(x) - 42/(42*x + 1) == 0
assert log1p(42*x).diff(x) - log1p(42*x).expand(func=True).diff(x) == 0
def test_exp2():
# Eval
assert exp2(2) == 4
x = Symbol('x', real=True, finite=True)
# Expand
assert exp2(x).expand(func=True) - 2**x == 0
# Diff
assert exp2(42*x).diff(x) - 42*exp2(42*x)*log(2) == 0
assert exp2(42*x).diff(x) - exp2(42*x).diff(x) == 0
def test_log2():
# Eval
assert log2(8) == 3
assert log2(pi) != log(pi)/log(2) # log2 should *save* (CPU) instructions
x = Symbol('x', real=True, finite=True)
assert log2(x) != log(x)/log(2)
assert log2(2**x) == x
# Expand
assert log2(x).expand(func=True) - log(x)/log(2) == 0
# Diff
assert log2(42*x).diff() - 1/(log(2)*x) == 0
assert log2(42*x).diff() - log2(42*x).expand(func=True).diff(x) == 0
def test_fma():
x, y, z = symbols('x y z')
# Expand
assert fma(x, y, z).expand(func=True) - x*y - z == 0
expr = fma(17*x, 42*y, 101*z)
# Diff
assert expr.diff(x) - expr.expand(func=True).diff(x) == 0
assert expr.diff(y) - expr.expand(func=True).diff(y) == 0
assert expr.diff(z) - expr.expand(func=True).diff(z) == 0
assert expr.diff(x) - 17*42*y == 0
assert expr.diff(y) - 17*42*x == 0
assert expr.diff(z) - 101 == 0
def test_log10():
x = Symbol('x')
# Expand
assert log10(x).expand(func=True) - log(x)/log(10) == 0
# Diff
assert log10(42*x).diff(x) - 1/(log(10)*x) == 0
assert log10(42*x).diff(x) - log10(42*x).expand(func=True).diff(x) == 0
def test_Cbrt():
x = Symbol('x')
# Expand
assert Cbrt(x).expand(func=True) - x**Rational(1, 3) == 0
# Diff
assert Cbrt(42*x).diff(x) - 42*(42*x)**(Rational(1, 3) - 1)/3 == 0
assert Cbrt(42*x).diff(x) - Cbrt(42*x).expand(func=True).diff(x) == 0
def test_Sqrt():
x = Symbol('x')
# Expand
assert Sqrt(x).expand(func=True) - x**Rational(1, 2) == 0
# Diff
assert Sqrt(42*x).diff(x) - 42*(42*x)**(Rational(1, 2) - 1)/2 == 0
assert Sqrt(42*x).diff(x) - Sqrt(42*x).expand(func=True).diff(x) == 0<|fim▁hole|>
def test_hypot():
x, y = symbols('x y')
# Expand
assert hypot(x, y).expand(func=True) - (x**2 + y**2)**Rational(1, 2) == 0
# Diff
assert hypot(17*x, 42*y).diff(x).expand(func=True) - hypot(17*x, 42*y).expand(func=True).diff(x) == 0
assert hypot(17*x, 42*y).diff(y).expand(func=True) - hypot(17*x, 42*y).expand(func=True).diff(y) == 0
assert hypot(17*x, 42*y).diff(x).expand(func=True) - 2*17*17*x*((17*x)**2 + (42*y)**2)**Rational(-1, 2)/2 == 0
assert hypot(17*x, 42*y).diff(y).expand(func=True) - 2*42*42*y*((17*x)**2 + (42*y)**2)**Rational(-1, 2)/2 == 0<|fim▁end|>
| |
<|file_name|>sv.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="sv" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<source>About LMMS</source>
<translation>Om LMMS</translation>
</message>
<message>
<source>Version %1 (%2/%3, Qt %4, %5)</source>
<translation>Version %1 (%2/%3, Qt %4, %5)</translation>
</message>
<message>
<source>About</source>
<translation>Om</translation>
</message>
<message>
<source>LMMS - easy music production for everyone</source>
<translation>LMMS - enkel musikproduktion för alla</translation>
</message>
<message>
<source>Authors</source>
<translation>Medverkande</translation>
</message>
<message>
<source>Translation</source>
<translation>Översättning</translation>
</message>
<message>
<source>Current language not translated (or native English).
If you're interested in translating LMMS in another language or want to improve existing translations, you're welcome to help us! Simply contact the maintainer!</source>
<translation type="unfinished"/>
</message>
<message>
<source>License</source>
<translation>Licens</translation>
</message>
<message>
<source>LMMS</source>
<translation>LMMS</translation>
</message>
<message>
<source>Involved</source>
<translation>Involverad</translation>
</message>
<message>
<source>Contributors ordered by number of commits:</source>
<translation>Medverkande, ordnade efter mängd bidrag:</translation>
</message>
<message>
<source>Copyright © %1</source>
<translation>Copyright © %1</translation>
</message>
<message>
<source><html><head/><body><p><a href="https://lmms.io"><span style=" text-decoration: underline; color:#0000ff;">https://lmms.io</span></a></p></body></html></source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AmplifierControlDialog</name>
<message>
<source>VOL</source>
<translation>VOL</translation>
</message>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>PAN</source>
<translation>PANORERA</translation>
</message>
<message>
<source>Panning:</source>
<translation>Panorering:</translation>
</message>
<message>
<source>LEFT</source>
<translation>VÄNSTER</translation>
</message>
<message>
<source>Left gain:</source>
<translation>Vänster förstärkning:</translation>
</message>
<message>
<source>RIGHT</source>
<translation>HÖGER</translation>
</message>
<message>
<source>Right gain:</source>
<translation>Höger förstärkning:</translation>
</message>
</context>
<context>
<name>AmplifierControls</name>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Left gain</source>
<translation>Vänster förstärkning</translation>
</message>
<message>
<source>Right gain</source>
<translation>Höger förstärkning</translation>
</message>
</context>
<context>
<name>AudioAlsaSetupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
<message>
<source>CHANNELS</source>
<translation>KANALER</translation>
</message>
</context>
<context>
<name>AudioFileProcessorView</name>
<message>
<source>Open other sample</source>
<translation>Öppna annan ljudfil</translation>
</message>
<message>
<source>Click here, if you want to open another audio-file. A dialog will appear where you can select your file. Settings like looping-mode, start and end-points, amplify-value, and so on are not reset. So, it may not sound like the original sample.</source>
<translation>Klicka här för att öppna en annan ljudfil. En dialog visas där du väljer din fil. Inställningar som looping, start och slutpunkter, amplifiering och sådant omställs inte. Därför låter det kanske inte som originalfilen.</translation>
</message>
<message>
<source>Reverse sample</source>
<translation>Spela baklänges</translation>
</message>
<message>
<source>If you enable this button, the whole sample is reversed. This is useful for cool effects, e.g. a reversed crash.</source>
<translation>Den här knappen gör att ljudfilen spelas baklänges. Den kan användas för intressanta effeker t.ex. en baklänges cymbal.</translation>
</message>
<message>
<source>Amplify:</source>
<translation>Förstärkning:</translation>
</message>
<message>
<source>With this knob you can set the amplify ratio. When you set a value of 100% your sample isn't changed. Otherwise it will be amplified up or down (your actual sample-file isn't touched!)</source>
<translation>Med detta vred ställer du in förstärkningen. Vid 100% blir det ingen skillnad. Annars blir din ljudfil mer eller mindre högljudd, men originalfilen förändras inte.</translation>
</message>
<message>
<source>Startpoint:</source>
<translation>Startpunkt:</translation>
</message>
<message>
<source>Endpoint:</source>
<translation>Slutpunkt:</translation>
</message>
<message>
<source>Continue sample playback across notes</source>
<translation>Forsätt spela ljudfil över noter</translation>
</message>
<message>
<source>Enabling this option makes the sample continue playing across different notes - if you change pitch, or the note length stops before the end of the sample, then the next note played will continue where it left off. To reset the playback to the start of the sample, insert a note at the bottom of the keyboard (< 20 Hz)</source>
<translation>Denna inställningen gör att ljudfilen förtsätter spela över noter. Om en not avslutas före ljudfilen är slut fortsätter nästa not där den förra slutade. Om du vill starta från början av ljudfilen innan den spelat färdigt, placera en not på botten av pianot (vid 20Hz)</translation>
</message>
<message>
<source>Disable loop</source>
<translation>Avaktivera looping</translation>
</message>
<message>
<source>This button disables looping. The sample plays only once from start to end. </source>
<translation>Den här knappen avaktiverar looping. Ljudfilen spelas bara en gång från start till slut.</translation>
</message>
<message>
<source>Enable loop</source>
<translation>Aktivera looping</translation>
</message>
<message>
<source>This button enables forwards-looping. The sample loops between the end point and the loop point.</source>
<translation>Den här knappen aktiverar looping. Ljudfilen loopar mellan slutpunkten och looppunkten.</translation>
</message>
<message>
<source>This button enables ping-pong-looping. The sample loops backwards and forwards between the end point and the loop point.</source>
<translation>Den här knappen aktiverar "ping-pong" looping. Ljudfilen spelar från start till slut, och sen tilbaks, och fortsäter så.</translation>
</message>
<message>
<source>With this knob you can set the point where AudioFileProcessor should begin playing your sample. </source>
<translation>Med den här vreden ställer du in vartifrån ljudfilen ska börja spela.</translation>
</message>
<message>
<source>With this knob you can set the point where AudioFileProcessor should stop playing your sample. </source>
<translation>Med den här vreden ställer du in vart ljudfilen slutar spela.</translation>
</message>
<message>
<source>Loopback point:</source>
<translation>Loopback punkt:</translation>
</message>
<message>
<source>With this knob you can set the point where the loop starts. </source>
<translation>Den här vreden ställer in vart loopen startar.</translation>
</message>
</context>
<context>
<name>AudioFileProcessorWaveView</name>
<message>
<source>Sample length:</source>
<translation>Ljudfilens längd:</translation>
</message>
</context>
<context>
<name>AudioJack</name>
<message>
<source>JACK client restarted</source>
<translation>JACK klienten omstartades</translation>
</message>
<message>
<source>LMMS was kicked by JACK for some reason. Therefore the JACK backend of LMMS has been restarted. You will have to make manual connections again.</source>
<translation>LMMS blev bortkopplat från JACK. LMMS JACK backend omstartades därfor. Du får manuellt koppla om igen.</translation>
</message>
<message>
<source>JACK server down</source>
<translation>JACK-server nerstängd</translation>
</message>
<message>
<source>The JACK server seems to have been shutdown and starting a new instance failed. Therefore LMMS is unable to proceed. You should save your project and restart JACK and LMMS.</source>
<translation>JACK-servern stängdes ned och det gick inte starta en ny. LMMS kan inte fortsätta. Du bör spara ditt projekt och starta om både JACK och LMMS.</translation>
</message>
<message>
<source>CLIENT-NAME</source>
<translation>KLIENT-NAMN</translation>
</message>
<message>
<source>CHANNELS</source>
<translation>KANALER</translation>
</message>
</context>
<context>
<name>AudioOss::setupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
<message>
<source>CHANNELS</source>
<translation>KANALER</translation>
</message>
</context>
<context>
<name>AudioPortAudio::setupWidget</name>
<message>
<source>BACKEND</source>
<translation>BACKEND</translation>
</message>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
</context>
<context>
<name>AudioPulseAudio::setupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
<message>
<source>CHANNELS</source>
<translation>KANALER</translation>
</message>
</context>
<context>
<name>AudioSdl::setupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
</context>
<context>
<name>AudioSndio::setupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
<message>
<source>CHANNELS</source>
<translation>KANALER</translation>
</message>
</context>
<context>
<name>AudioSoundIo::setupWidget</name>
<message>
<source>BACKEND</source>
<translation>BAKÄNDE</translation>
</message>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
</context>
<context>
<name>AutomatableModel</name>
<message>
<source>&Reset (%1%2)</source>
<translation>&Nollställ (%1%2)</translation>
</message>
<message>
<source>&Copy value (%1%2)</source>
<translation>Kopiera värde (%1%2)</translation>
</message>
<message>
<source>&Paste value (%1%2)</source>
<translation>&Klistra in värde (%1%2)</translation>
</message>
<message>
<source>Edit song-global automation</source>
<translation>Redigera global automation</translation>
</message>
<message>
<source>Connected to %1</source>
<translation>Kopplad till %1</translation>
</message>
<message>
<source>Connected to controller</source>
<translation>Kopplad till controller</translation>
</message>
<message>
<source>Edit connection...</source>
<translation>Redigera koppling...</translation>
</message>
<message>
<source>Remove connection</source>
<translation>Ta bort koppling</translation>
</message>
<message>
<source>Connect to controller...</source>
<translation>Koppla till kontroller...</translation>
</message>
<message>
<source>Remove song-global automation</source>
<translation>Ta bort global automation</translation>
</message>
<message>
<source>Remove all linked controls</source>
<translation>Ta bort alla kopplade kontroller</translation>
</message>
</context>
<context>
<name>AutomationEditor</name>
<message>
<source>Please open an automation pattern with the context menu of a control!</source>
<translation>Öppna ett automationsmönster ifrån en kontrollers kontextmeny!</translation>
</message>
<message>
<source>Values copied</source>
<translation>Värden kopierade</translation>
</message>
<message>
<source>All selected values were copied to the clipboard.</source>
<translation>Alla valda värden blev kopierade till urklipp.</translation>
</message>
</context>
<context>
<name>AutomationEditorWindow</name>
<message>
<source>Play/pause current pattern (Space)</source>
<translation>Spela/pausa aktuellt mönster (Mellanslag)</translation>
</message>
<message>
<source>Click here if you want to play the current pattern. This is useful while editing it. The pattern is automatically looped when the end is reached.</source>
<translation>Klicka här för att spela det aktuella mönstret, detta är användbart när man redigerar. Mönstret spelas från början igen när det nått sitt slut.</translation>
</message>
<message>
<source>Stop playing of current pattern (Space)</source>
<translation>Sluta spela aktuellt mönster (mellanslag)</translation>
</message>
<message>
<source>Click here if you want to stop playing of the current pattern.</source>
<translation>Klicka här för att stoppa uppspelning av de aktuella mönstret.</translation>
</message>
<message>
<source>Draw mode (Shift+D)</source>
<translation>Ritläge (Shift+D)</translation>
</message>
<message>
<source>Erase mode (Shift+E)</source>
<translation>Suddläge (Shift+E)</translation>
</message>
<message>
<source>Flip vertically</source>
<translation>Spegla vertikalt</translation>
</message>
<message>
<source>Flip horizontally</source>
<translation>Spegla horizontellt</translation>
</message>
<message>
<source>Click here and the pattern will be inverted.The points are flipped in the y direction. </source>
<translation>Klicka här för att spegla mönstret. Punkterna förflyttas på y-axeln</translation>
</message>
<message>
<source>Click here and the pattern will be reversed. The points are flipped in the x direction.</source>
<translation>Klicka här för att spegla mönstret. Punkterna förflyttas på x-axeln</translation>
</message>
<message>
<source>Click here and draw-mode will be activated. In this mode you can add and move single values. This is the default mode which is used most of the time. You can also press 'Shift+D' on your keyboard to activate this mode.</source>
<translation>Klicka här för att aktivera ritläget. I detta läget kan du lägga till och förflytta individuella värden. Det här är standardläget. Det går också att trycka 'Shift+D' på tangentborded för att aktivera detta läget.</translation>
</message>
<message>
<source>Click here and erase-mode will be activated. In this mode you can erase single values. You can also press 'Shift+E' on your keyboard to activate this mode.</source>
<translation>Klicka här för att aktivera suddläget. I detta läget kan du ta bort individuella värden. Det går också att trycka 'Shift+E' på tangentborded för att aktivera detta läget.</translation>
</message>
<message>
<source>Discrete progression</source>
<translation>Diskret talföljd</translation>
</message>
<message>
<source>Linear progression</source>
<translation>Linjär talföljd</translation>
</message>
<message>
<source>Cubic Hermite progression</source>
<translation>Cubic Hermite talföljd</translation>
</message>
<message>
<source>Tension value for spline</source>
<translation>Spänning i mönstrets spline</translation>
</message>
<message>
<source>A higher tension value may make a smoother curve but overshoot some values. A low tension value will cause the slope of the curve to level off at each control point.</source>
<translation>Högre spänning ger en mjuk kurva som ibland missar individuella punkter. Med lägre spänning planar kurvan ut nära punkterna.</translation>
</message>
<message>
<source>Click here to choose discrete progressions for this automation pattern. The value of the connected object will remain constant between control points and be set immediately to the new value when each control point is reached.</source>
<translation>Klicka här för att aktivera diskret talföljd. Värdet är konstant mella kontroll punkter och ändras direkt när en ny kontrollpunkt nås.</translation>
</message>
<message>
<source>Click here to choose linear progressions for this automation pattern. The value of the connected object will change at a steady rate over time between control points to reach the correct value at each control point without a sudden change.</source>
<translation>Klicka här för att aktivera linjär talföljd. Värdet ändras vid en stadig takt mellan kontrollpunkter för att gradvis nå nästa värde.</translation>
</message>
<message>
<source>Click here to choose cubic hermite progressions for this automation pattern. The value of the connected object will change in a smooth curve and ease in to the peaks and valleys.</source>
<translation>Klicka här för att aktivera cubic hermite talföljd. Värdet följer en mjuk kurva mellan kontrollpunkter.</translation>
</message>
<message>
<source>Cut selected values (%1+X)</source>
<translation>Klipp ut valda värden (%1+X)</translation>
</message>
<message>
<source>Copy selected values (%1+C)</source>
<translation>Kopiera valda värden (%1+C)</translation>
</message>
<message>
<source>Paste values from clipboard (%1+V)</source>
<translation>Klistra värden (%1+V)</translation>
</message>
<message>
<source>Click here and selected values will be cut into the clipboard. You can paste them anywhere in any pattern by clicking on the paste button.</source>
<translation>Klicka här för att klippa de valda värderna. Du kan sen klistra dem var som helst genom att klicka på klistra knappen.</translation>
</message>
<message>
<source>Click here and selected values will be copied into the clipboard. You can paste them anywhere in any pattern by clicking on the paste button.</source>
<translation>Klicka här för att kopiera de valda värderna. Du kan sedan klistra dem var som helst genom att klicka på klistra knappen.</translation>
</message>
<message>
<source>Click here and the values from the clipboard will be pasted at the first visible measure.</source>
<translation>Klicka här för att klistra kopierade värderna vid den första synliga metern.</translation>
</message>
<message>
<source>Tension: </source>
<translation>Spänning: </translation>
</message>
<message>
<source>Automation Editor - no pattern</source>
<translation>Redigera Automation - inget automationsmönster</translation>
</message>
<message>
<source>Automation Editor - %1</source>
<translation>Redigera Automation - %1</translation>
</message>
<message>
<source>Edit actions</source>
<translation>Redigera åtgärder</translation>
</message>
<message>
<source>Interpolation controls</source>
<translation>Interpoleringskontroller</translation>
</message>
<message>
<source>Timeline controls</source>
<translation>Tidslinjekontroller</translation>
</message>
<message>
<source>Zoom controls</source>
<translation>Zoomningskontroller</translation>
</message>
<message>
<source>Quantization controls</source>
<translation type="unfinished"/>
</message>
<message>
<source>Model is already connected to this pattern.</source>
<translation>Modellen är redan ansluten till det här mönstret.</translation>
</message>
</context>
<context>
<name>AutomationPattern</name>
<message>
<source>Drag a control while pressing <%1></source>
<translation>Dra en kontroll samtidigt som du håller <%1></translation>
</message>
</context>
<context>
<name>AutomationPatternView</name>
<message>
<source>double-click to open this pattern in automation editor</source>
<translation>dubbelklicka för att öppna det här automationsmönstret för redigering</translation>
</message>
<message>
<source>Open in Automation editor</source>
<translation>Redigera automationsmönster</translation>
</message>
<message>
<source>Clear</source>
<translation>Rensa</translation>
</message>
<message>
<source>Reset name</source>
<translation>Nollställ namn</translation>
</message>
<message>
<source>Change name</source>
<translation>Byt namn</translation>
</message>
<message>
<source>%1 Connections</source>
<translation>%1 Kopplingar</translation>
</message>
<message>
<source>Disconnect "%1"</source>
<translation>Avkoppla "%1"</translation>
</message>
<message>
<source>Set/clear record</source>
<translation type="unfinished"/>
</message>
<message>
<source>Flip Vertically (Visible)</source>
<translation>Spegla Vertikalt (Synligt)</translation>
</message>
<message>
<source>Flip Horizontally (Visible)</source>
<translation>Spegla Horizontellt (Synligt)</translation>
</message>
<message>
<source>Model is already connected to this pattern.</source>
<translation>Modellen är redan ansluten till det här mönstret.</translation>
</message>
</context>
<context>
<name>AutomationTrack</name>
<message>
<source>Automation track</source>
<translation>Automationsspår</translation>
</message>
</context>
<context>
<name>BBEditor</name>
<message>
<source>Beat+Bassline Editor</source>
<translation>Redigera Trummor+Bas</translation>
</message>
<message>
<source>Play/pause current beat/bassline (Space)</source>
<translation>Spela/pause Trummor+Bas</translation>
</message>
<message>
<source>Stop playback of current beat/bassline (Space)</source>
<translation>Avsluta uppspelning av trummor/bas</translation>
</message>
<message>
<source>Click here to play the current beat/bassline. The beat/bassline is automatically looped when its end is reached.</source>
<translation>Klicka här för att spela trummor/bas. Mönstret loopar när det nåt sitt slut.</translation>
</message>
<message>
<source>Click here to stop playing of current beat/bassline.</source>
<translation>Klicka här för att sluta spela trummor/bas.</translation>
</message>
<message>
<source>Add beat/bassline</source>
<translation>Lägg till trummor/bas</translation>
</message>
<message>
<source>Add automation-track</source>
<translation>Lägg till automationsspår</translation>
</message>
<message>
<source>Remove steps</source>
<translation>Ta bort steg</translation>
</message>
<message>
<source>Add steps</source>
<translation>Lägg till steg</translation>
</message>
<message>
<source>Beat selector</source>
<translation>Taktväljare</translation>
</message>
<message>
<source>Track and step actions</source>
<translation>Spår och stegåtgärder</translation>
</message>
<message>
<source>Clone Steps</source>
<translation>Klona steg</translation>
</message>
<message>
<source>Add sample-track</source>
<translation>Lägg till ljudfils-spår</translation>
</message>
</context>
<context>
<name>BBTCOView</name>
<message>
<source>Open in Beat+Bassline-Editor</source>
<translation>Redigera Trummor+Bas</translation>
</message>
<message>
<source>Reset name</source>
<translation>Nollställ namn</translation>
</message>
<message>
<source>Change name</source>
<translation>Byt namn</translation>
</message>
<message>
<source>Change color</source>
<translation>Byt färg</translation>
</message>
<message>
<source>Reset color to default</source>
<translation>Byt färg till standard</translation>
</message>
</context>
<context>
<name>BBTrack</name>
<message>
<source>Beat/Bassline %1</source>
<translation>Trum/Basmönster %1</translation>
</message>
<message>
<source>Clone of %1</source>
<translation>Kopia av %1</translation>
</message>
</context>
<context>
<name>BassBoosterControlDialog</name>
<message>
<source>FREQ</source>
<translation>FREQ</translation>
</message>
<message>
<source>Frequency:</source>
<translation>Frekvens:</translation>
</message>
<message>
<source>GAIN</source>
<translation>FÖRST</translation>
</message>
<message>
<source>Gain:</source>
<translation>Förstärkning:</translation>
</message>
<message>
<source>RATIO</source>
<translation>RATIO</translation>
</message>
<message>
<source>Ratio:</source>
<translation>Ratio:</translation>
</message>
</context>
<context>
<name>BassBoosterControls</name>
<message>
<source>Frequency</source>
<translation>Frekvens</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Ratio</source>
<translation>Ratio</translation>
</message>
</context>
<context>
<name>BitcrushControlDialog</name>
<message>
<source>IN</source>
<translation>IN</translation>
</message>
<message>
<source>OUT</source>
<translation>UT</translation>
</message>
<message>
<source>GAIN</source>
<translation>FÖRST</translation>
</message>
<message>
<source>Input Gain:</source>
<translation>Input Förstärkning:</translation>
</message>
<message>
<source>NOIS</source>
<translation>NOIS</translation>
</message>
<message>
<source>Input Noise:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Output Gain:</source>
<translation>Output Förstärkning</translation>
</message>
<message>
<source>CLIP</source>
<translation>KLIPP</translation>
</message>
<message>
<source>Output Clip:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Rate</source>
<translation>Värdera</translation>
</message>
<message>
<source>Rate Enabled</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable samplerate-crushing</source>
<translation type="unfinished"/>
</message>
<message>
<source>Depth</source>
<translation>Djup</translation>
</message>
<message>
<source>Depth Enabled</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable bitdepth-crushing</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sample rate:</source>
<translation type="unfinished"/>
</message>
<message>
<source>STD</source>
<translation>STD</translation>
</message>
<message>
<source>Stereo difference:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Levels</source>
<translation>Nivåer</translation>
</message>
<message>
<source>Levels:</source>
<translation>Nivåer:</translation>
</message>
</context>
<context>
<name>CaptionMenu</name>
<message>
<source>&Help</source>
<translation>&Hjälp</translation>
</message>
<message>
<source>Help (not available)</source>
<translation>Hjälp (inte tillgängligt)</translation>
</message>
</context>
<context>
<name>CarlaInstrumentView</name>
<message>
<source>Show GUI</source>
<translation>Visa användargränssnitt</translation>
</message>
<message>
<source>Click here to show or hide the graphical user interface (GUI) of Carla.</source>
<translation>Klicka här för att visa eller gömma användargränssnittet för Carla.</translation>
</message>
</context>
<context>
<name>Controller</name>
<message>
<source>Controller %1</source>
<translation>Kontroller %1</translation>
</message>
</context>
<context>
<name>ControllerConnectionDialog</name>
<message>
<source>Connection Settings</source>
<translation>Kopplingsinställningar</translation>
</message>
<message>
<source>MIDI CONTROLLER</source>
<translation>MIDI-KONTROLLER</translation>
</message>
<message>
<source>Input channel</source>
<translation>Inputkanal</translation>
</message>
<message>
<source>CHANNEL</source>
<translation>KANAL</translation>
</message>
<message>
<source>Input controller</source>
<translation>Inputkontroller</translation>
</message>
<message>
<source>CONTROLLER</source>
<translation>KONTROLLER</translation>
</message>
<message>
<source>Auto Detect</source>
<translation>Upptäck Automatiskt</translation>
</message>
<message>
<source>MIDI-devices to receive MIDI-events from</source>
<translation>MIDI-enheter att ta emot MIDI-events från</translation>
</message>
<message>
<source>USER CONTROLLER</source>
<translation>ANVÄNDARKONTROLLER</translation>
</message>
<message>
<source>MAPPING FUNCTION</source>
<translation type="unfinished"/>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
<message>
<source>LMMS</source>
<translation>LMMS</translation>
</message>
<message>
<source>Cycle Detected.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ControllerRackView</name>
<message>
<source>Controller Rack</source>
<translation>Kontrollrack</translation>
</message>
<message>
<source>Add</source>
<translation>Lägg till</translation>
</message>
<message>
<source>Confirm Delete</source>
<translation>Bekräfta Borttagning</translation>
</message>
<message>
<source>Confirm delete? There are existing connection(s) associated with this controller. There is no way to undo.</source>
<translation>Vill du verkligen ta bort? Det finns kopplingar till den här kontrollern, och operationen går inte ångra.</translation>
</message>
</context>
<context>
<name>ControllerView</name>
<message>
<source>Controls</source>
<translation>Kontroller</translation>
</message>
<message>
<source>Controllers are able to automate the value of a knob, slider, and other controls.</source>
<translation>Kontroller kan automatisera värdet på en vred, reglage, och andra kontroller</translation>
</message>
<message>
<source>Rename controller</source>
<translation>Byt namn på kontroller</translation>
</message>
<message>
<source>Enter the new name for this controller</source>
<translation>Skriv nya namnet på kontrollern</translation>
</message>
<message>
<source>&Remove this controller</source>
<translation>&Ta bort den här kontrollen</translation>
</message>
<message>
<source>Re&name this controller</source>
<translation>Döp& om den här kontrollern</translation>
</message>
<message>
<source>LFO</source>
<translation>LFO</translation>
</message>
</context>
<context>
<name>CrossoverEQControlDialog</name>
<message>
<source>Band 1/2 Crossover:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Band 2/3 Crossover:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Band 3/4 Crossover:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Band 1 Gain:</source>
<translation>Band 1 Förstärkn.:</translation>
</message>
<message>
<source>Band 2 Gain:</source>
<translation>Band 2 Förstärkn.:</translation>
</message>
<message>
<source>Band 3 Gain:</source>
<translation>Band 3 Förstärkn.:</translation>
</message>
<message>
<source>Band 4 Gain:</source>
<translation>Band 4 Förstärkn.:</translation>
</message>
<message>
<source>Band 1 Mute</source>
<translation>Band 1 Tyst</translation>
</message>
<message>
<source>Mute Band 1</source>
<translation>Tysta Band 1</translation>
</message>
<message>
<source>Band 2 Mute</source>
<translation>Band 2 Tyst</translation>
</message>
<message>
<source>Mute Band 2</source>
<translation>Tysta Band 2</translation>
</message>
<message>
<source>Band 3 Mute</source>
<translation>Band 3 Tyst</translation>
</message>
<message>
<source>Mute Band 3</source>
<translation>Tysta Band 3</translation>
</message>
<message>
<source>Band 4 Mute</source>
<translation>Band 4 Tyst</translation>
</message>
<message>
<source>Mute Band 4</source>
<translation>Tysta Band 4</translation>
</message>
</context>
<context>
<name>DelayControls</name>
<message>
<source>Delay Samples</source>
<translation>Fördröj Samplingar</translation>
</message>
<message>
<source>Feedback</source>
<translation>Återkoppling</translation>
</message>
<message>
<source>Lfo Frequency</source>
<translation>Lfo-frekvens</translation>
</message>
<message>
<source>Lfo Amount</source>
<translation>Lfo-mängd</translation>
</message>
<message>
<source>Output gain</source>
<translation>Utgångsförstärkning</translation>
</message>
</context>
<context>
<name>DelayControlsDialog</name>
<message>
<source>Lfo Amt</source>
<translation type="unfinished"/>
</message>
<message>
<source>Delay Time</source>
<translation>Tidsfördröjning</translation>
</message>
<message>
<source>Feedback Amount</source>
<translation>Återgivningsmängd</translation>
</message>
<message>
<source>Lfo</source>
<translation>Lfo</translation>
</message>
<message>
<source>Out Gain</source>
<translation>Ut-förstärkning</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>DELAY</source>
<translation type="unfinished"/>
</message>
<message>
<source>FDBK</source>
<translation type="unfinished"/>
</message>
<message>
<source>RATE</source>
<translation type="unfinished"/>
</message>
<message>
<source>AMNT</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>DualFilterControlDialog</name>
<message>
<source>Filter 1 enabled</source>
<translation>Filter 1 aktiverat</translation>
</message>
<message>
<source>Filter 2 enabled</source>
<translation>Filter 2 aktiverat</translation>
</message>
<message>
<source>Click to enable/disable Filter 1</source>
<translation>Klicka för att aktivera/inaktivera Filter 1</translation>
</message>
<message>
<source>Click to enable/disable Filter 2</source>
<translation>Klicka för att aktivera/inaktivera Filter 2</translation>
</message>
<message>
<source>FREQ</source>
<translation>FREKV.</translation>
</message>
<message>
<source>Cutoff frequency</source>
<translation>Cutoff frekvens</translation>
</message>
<message>
<source>RESO</source>
<translation>RESO</translation>
</message>
<message>
<source>Resonance</source>
<translation>Resonans</translation>
</message>
<message>
<source>GAIN</source>
<translation>FÖRST.</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>MIX</source>
<translation>MIX</translation>
</message>
<message>
<source>Mix</source>
<translation>Mix</translation>
</message>
</context>
<context>
<name>DualFilterControls</name>
<message>
<source>Filter 1 enabled</source>
<translation>Filter 1 aktiverat</translation>
</message>
<message>
<source>Filter 1 type</source>
<translation>Filter 1 typ</translation>
</message>
<message>
<source>Cutoff 1 frequency</source>
<translation>Cutoff 1 frekvens</translation>
</message>
<message>
<source>Q/Resonance 1</source>
<translation>Q/Resonans 1</translation>
</message>
<message>
<source>Gain 1</source>
<translation>Förstärkning 1</translation>
</message>
<message>
<source>Mix</source>
<translation>Mix</translation>
</message>
<message>
<source>Filter 2 enabled</source>
<translation>Filter 2 aktiverat</translation>
</message>
<message>
<source>Filter 2 type</source>
<translation>Filter 2 typ</translation>
</message>
<message>
<source>Cutoff 2 frequency</source>
<translation>Cutoff 2 frekvens</translation>
</message>
<message>
<source>Q/Resonance 2</source>
<translation>Q/Resonans 2</translation>
</message>
<message>
<source>Gain 2</source>
<translation>Förstärkning 2</translation>
</message>
<message>
<source>LowPass</source>
<translation>Lågpass</translation>
</message>
<message>
<source>HiPass</source>
<translation>Högpass</translation>
</message>
<message>
<source>BandPass csg</source>
<translation type="unfinished"/>
</message>
<message>
<source>BandPass czpg</source>
<translation type="unfinished"/>
</message>
<message>
<source>Notch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Allpass</source>
<translation type="unfinished"/>
</message>
<message>
<source>Moog</source>
<translation>Moog</translation>
</message>
<message>
<source>2x LowPass</source>
<translation>2x Lågpass</translation>
</message>
<message>
<source>RC LowPass 12dB</source>
<translation>RC Lågpass 12dB</translation>
</message>
<message>
<source>RC BandPass 12dB</source>
<translation>RC BandPass 12dB</translation>
</message>
<message>
<source>RC HighPass 12dB</source>
<translation>RC Högpass 12dB</translation>
</message>
<message>
<source>RC LowPass 24dB</source>
<translation>RC Lågpass 24dB</translation>
</message>
<message>
<source>RC BandPass 24dB</source>
<translation>RC BandPass 24dB</translation>
</message>
<message>
<source>RC HighPass 24dB</source>
<translation>RC Högpass 24dB</translation>
</message>
<message>
<source>Vocal Formant Filter</source>
<translation type="unfinished"/>
</message>
<message>
<source>2x Moog</source>
<translation type="unfinished"/>
</message>
<message>
<source>SV LowPass</source>
<translation>SV Lågpass</translation>
</message>
<message>
<source>SV BandPass</source>
<translation>SV BandPass</translation>
</message>
<message>
<source>SV HighPass</source>
<translation>SV Högpass</translation>
</message>
<message>
<source>SV Notch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fast Formant</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tripole</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>Editor</name>
<message>
<source>Play (Space)</source>
<translation>Play (Mellanslag)</translation>
</message>
<message>
<source>Stop (Space)</source>
<translation>Stop (Mellanslag)</translation>
</message>
<message>
<source>Record</source>
<translation>Spela in</translation>
</message>
<message>
<source>Record while playing</source>
<translation>Spela in under uppspelningen</translation>
</message>
<message>
<source>Transport controls</source>
<translation>Transportkontroller</translation>
</message>
</context>
<context>
<name>Effect</name>
<message>
<source>Effect enabled</source>
<translation>Effekt aktiverad</translation>
</message>
<message>
<source>Wet/Dry mix</source>
<translation>Blöt/Torr mix</translation>
</message>
<message>
<source>Gate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decay</source>
<translation>Decay</translation>
</message>
</context>
<context>
<name>EffectChain</name>
<message>
<source>Effects enabled</source>
<translation>Effekter aktiverade</translation>
</message>
</context>
<context>
<name>EffectRackView</name>
<message>
<source>EFFECTS CHAIN</source>
<translation>EFFEKTKEDJA</translation>
</message>
<message>
<source>Add effect</source>
<translation>Lägg till effekt</translation>
</message>
</context>
<context>
<name>EffectSelectDialog</name>
<message>
<source>Add effect</source>
<translation>Lägg till effekt</translation>
</message>
<message>
<source>Name</source>
<translation>Namn</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Description</source>
<translation>Beskrivning</translation>
</message>
<message>
<source>Author</source>
<translation>Författare</translation>
</message>
</context>
<context>
<name>EffectView</name>
<message>
<source>Toggles the effect on or off.</source>
<translation>Slår på eller av effekten.</translation>
</message>
<message>
<source>On/Off</source>
<translation>På/Av</translation>
</message>
<message>
<source>W/D</source>
<translation>W/D</translation>
</message>
<message>
<source>Wet Level:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Wet/Dry knob sets the ratio between the input signal and the effect signal that forms the output.</source>
<translation type="unfinished"/>
</message>
<message>
<source>DECAY</source>
<translation>DECAY</translation>
</message>
<message>
<source>Time:</source>
<translation>Tid:</translation>
</message>
<message>
<source>The Decay knob controls how many buffers of silence must pass before the plugin stops processing. Smaller values will reduce the CPU overhead but run the risk of clipping the tail on delay and reverb effects.</source>
<translation type="unfinished"/>
</message>
<message>
<source>GATE</source>
<translation>GATE</translation>
</message>
<message>
<source>Gate:</source>
<translation>Gate:</translation>
</message>
<message>
<source>The Gate knob controls the signal level that is considered to be 'silence' while deciding when to stop processing signals.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Controls</source>
<translation>Kontroller</translation>
</message>
<message>
<source>Effect plugins function as a chained series of effects where the signal will be processed from top to bottom.
The On/Off switch allows you to bypass a given plugin at any point in time.
The Wet/Dry knob controls the balance between the input signal and the effected signal that is the resulting output from the effect. The input for the stage is the output from the previous stage. So, the 'dry' signal for effects lower in the chain contains all of the previous effects.
The Decay knob controls how long the signal will continue to be processed after the notes have been released. The effect will stop processing signals when the volume has dropped below a given threshold for a given length of time. This knob sets the 'given length of time'. Longer times will require more CPU, so this number should be set low for most effects. It needs to be bumped up for effects that produce lengthy periods of silence, e.g. delays.
The Gate knob controls the 'given threshold' for the effect's auto shutdown. The clock for the 'given length of time' will begin as soon as the processed signal level drops below the level specified with this knob.
The Controls button opens a dialog for editing the effect's parameters.
Right clicking will bring up a context menu where you can change the order in which the effects are processed or delete an effect altogether.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Move &up</source>
<translation>Flytta &upp</translation>
</message>
<message>
<source>Move &down</source>
<translation>Flytta &ner</translation>
</message>
<message>
<source>&Remove this plugin</source>
<translation>&Ta bort det här insticksprogrammet</translation>
</message>
</context>
<context>
<name>EnvelopeAndLfoParameters</name>
<message>
<source>Predelay</source>
<translation>För-fördröjning</translation>
</message>
<message>
<source>Attack</source>
<translation>Attack</translation>
</message>
<message>
<source>Hold</source>
<translation>Hold</translation>
</message>
<message>
<source>Decay</source>
<translation>Decay</translation>
</message>
<message>
<source>Sustain</source>
<translation>Sustain</translation>
</message>
<message>
<source>Release</source>
<translation>Release</translation>
</message>
<message>
<source>Modulation</source>
<translation>Modulering</translation>
</message>
<message>
<source>LFO Predelay</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Attack</source>
<translation>LFO-Attack</translation>
</message>
<message>
<source>LFO speed</source>
<translation>LFO-hastighet</translation>
</message>
<message>
<source>LFO Modulation</source>
<translation>LFO-Modulering</translation>
</message>
<message>
<source>LFO Wave Shape</source>
<translation>LFO-vågform</translation>
</message>
<message>
<source>Freq x 100</source>
<translation>Frekv. x 100</translation>
</message>
<message>
<source>Modulate Env-Amount</source>
<translation>Modulera Env-mängd</translation>
</message>
</context>
<context>
<name>EnvelopeAndLfoView</name>
<message>
<source>DEL</source>
<translation>RADERA</translation>
</message>
<message>
<source>Predelay:</source>
<translation>För-fördröjning:</translation>
</message>
<message>
<source>Use this knob for setting predelay of the current envelope. The bigger this value the longer the time before start of actual envelope.</source>
<translation type="unfinished"/>
</message>
<message>
<source>ATT</source>
<translation>ATT</translation>
</message>
<message>
<source>Attack:</source>
<translation>Attack:</translation>
</message>
<message>
<source>Use this knob for setting attack-time of the current envelope. The bigger this value the longer the envelope needs to increase to attack-level. Choose a small value for instruments like pianos and a big value for strings.</source>
<translation type="unfinished"/>
</message>
<message>
<source>HOLD</source>
<translation>HOLD</translation>
</message>
<message>
<source>Hold:</source>
<translation>Hold:</translation>
</message>
<message>
<source>Use this knob for setting hold-time of the current envelope. The bigger this value the longer the envelope holds attack-level before it begins to decrease to sustain-level.</source>
<translation type="unfinished"/>
</message>
<message>
<source>DEC</source>
<translation>DEC</translation>
</message>
<message>
<source>Decay:</source>
<translation>Decay:</translation>
</message>
<message>
<source>Use this knob for setting decay-time of the current envelope. The bigger this value the longer the envelope needs to decrease from attack-level to sustain-level. Choose a small value for instruments like pianos.</source>
<translation type="unfinished"/>
</message>
<message>
<source>SUST</source>
<translation>SUST</translation>
</message>
<message>
<source>Sustain:</source>
<translation>Sustain:</translation>
</message>
<message>
<source>Use this knob for setting sustain-level of the current envelope. The bigger this value the higher the level on which the envelope stays before going down to zero.</source>
<translation type="unfinished"/>
</message>
<message>
<source>REL</source>
<translation>REL</translation>
</message>
<message>
<source>Release:</source>
<translation>Release:</translation>
</message>
<message>
<source>Use this knob for setting release-time of the current envelope. The bigger this value the longer the envelope needs to decrease from sustain-level to zero. Choose a big value for soft instruments like strings.</source>
<translation type="unfinished"/>
</message>
<message>
<source>AMT</source>
<translation>MÄNGD</translation>
</message>
<message>
<source>Modulation amount:</source>
<translation>Moduleringsmängd:</translation>
</message>
<message>
<source>Use this knob for setting modulation amount of the current envelope. The bigger this value the more the according size (e.g. volume or cutoff-frequency) will be influenced by this envelope.</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO predelay:</source>
<translation>LFO-för-fördröjning:</translation>
</message>
<message>
<source>Use this knob for setting predelay-time of the current LFO. The bigger this value the the time until the LFO starts to oscillate.</source>
<translation>Använd denna ratt för att ställa för-fördröjningen för aktuell LFO. Ju högre värdet är desto längre tid tar det innan LFO'n börjar oscillera.</translation>
</message>
<message>
<source>LFO- attack:</source>
<translation>LFO-attack:</translation>
</message>
<message>
<source>Use this knob for setting attack-time of the current LFO. The bigger this value the longer the LFO needs to increase its amplitude to maximum.</source>
<translation>Använd denna ratt för att ställa attack-tiden för aktuell LFO. Ju högre värdet är desto längre tid tar det för LFO'n att nå sin maximala amplitud.</translation>
</message>
<message>
<source>SPD</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO speed:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use this knob for setting speed of the current LFO. The bigger this value the faster the LFO oscillates and the faster will be your effect.</source>
<translation>Använd denna ratt för att ställa hastigheten för aktuell LFO. Ju högre värdet är desto snabbare oscillerar LFO'n och desto snabbare är effekten.</translation>
</message>
<message>
<source>Use this knob for setting modulation amount of the current LFO. The bigger this value the more the selected size (e.g. volume or cutoff-frequency) will be influenced by this LFO.</source>
<translation>Använd denna ratt för att ställa mängden modulering för aktuell LFO. Ju högre värdet är desto större valt värde (volym eller cutoff-frekvens) kommer influeras av denna LFO.</translation>
</message>
<message>
<source>Click here for a sine-wave.</source>
<translation>Klicka här för sinusvåg.</translation>
</message>
<message>
<source>Click here for a triangle-wave.</source>
<translation>Klicka här för triangelvåg.</translation>
</message>
<message>
<source>Click here for a saw-wave for current.</source>
<translation>Klicka här för sågtandsvåg för aktuell.</translation>
</message>
<message>
<source>Click here for a square-wave.</source>
<translation>Klicka här för fyrkantvåg</translation>
</message>
<message>
<source>Click here for a user-defined wave. Afterwards, drag an according sample-file onto the LFO graph.</source>
<translation type="unfinished"/>
</message>
<message>
<source>FREQ x 100</source>
<translation>FREKV. x 100</translation>
</message>
<message>
<source>Click here if the frequency of this LFO should be multiplied by 100.</source>
<translation>Klicka här för att multiplicera frekvensen för denna LFO med 100.</translation>
</message>
<message>
<source>multiply LFO-frequency by 100</source>
<translation>multiplicera LFO-frekvensen med 100</translation>
</message>
<message>
<source>MODULATE ENV-AMOUNT</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to make the envelope-amount controlled by this LFO.</source>
<translation type="unfinished"/>
</message>
<message>
<source>control envelope-amount by this LFO</source>
<translation type="unfinished"/>
</message>
<message>
<source>ms/LFO:</source>
<translation>ms/LFO:</translation>
</message>
<message>
<source>Hint</source>
<translation>Ledtråd</translation>
</message>
<message>
<source>Drag a sample from somewhere and drop it in this window.</source>
<translation>Dra en ljudfil till det här fönstret.</translation>
</message>
<message>
<source>Click here for random wave.</source>
<translation>Klicka här för en slumpmässig vågform.</translation>
</message>
</context>
<context>
<name>EqControls</name>
<message>
<source>Input gain</source>
<translation>Ingångsförstärkning</translation>
</message>
<message>
<source>Output gain</source>
<translation>Utgångsförstärkning</translation>
</message>
<message>
<source>Low shelf gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 1 gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 2 gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 3 gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 4 gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>High Shelf gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP res</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low Shelf res</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 1 BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 2 BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 3 BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 4 BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>High Shelf res</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP res</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low Shelf freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 1 freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 2 freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 3 freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 4 freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>High shelf freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP active</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low shelf active</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 1 active</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 2 active</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 3 active</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 4 active</source>
<translation type="unfinished"/>
</message>
<message>
<source>High shelf active</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP active</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP 12</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP 24</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP 48</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP 12</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP 24</source>
<translation type="unfinished"/>
</message>
<message>
<source>HP 48</source>
<translation type="unfinished"/>
</message>
<message>
<source>low pass type</source>
<translation>Lågpass-typ</translation>
</message>
<message>
<source>high pass type</source>
<translation>Högpass-typ</translation>
</message>
<message>
<source>Analyse IN</source>
<translation>Analysera IN</translation>
</message>
<message>
<source>Analyse OUT</source>
<translation>Analysera UT</translation>
</message>
</context>
<context>
<name>EqControlsDialog</name>
<message>
<source>HP</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low Shelf</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak 4</source>
<translation type="unfinished"/>
</message>
<message>
<source>High Shelf</source>
<translation type="unfinished"/>
</message>
<message>
<source>LP</source>
<translation type="unfinished"/>
</message>
<message>
<source>In Gain</source>
<translation>In-förstärkning</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Out Gain</source>
<translation>Ut-förstärkning</translation>
</message>
<message>
<source>Bandwidth: </source>
<translation>Bandbredd:</translation>
</message>
<message>
<source>Resonance : </source>
<translation>Resonans:</translation>
</message>
<message>
<source>Frequency:</source>
<translation>Frekvens:</translation>
</message>
<message>
<source>lp grp</source>
<translation type="unfinished"/>
</message>
<message>
<source>hp grp</source>
<translation type="unfinished"/>
</message>
<message>
<source> Octave</source>
<translation>Oktav</translation>
</message>
</context>
<context>
<name>EqHandle</name>
<message>
<source>Reso: </source>
<translation>Reso.:</translation>
</message>
<message>
<source>BW: </source>
<translation type="unfinished"/>
</message>
<message>
<source>Freq: </source>
<translation>Frekv.:</translation>
</message>
</context>
<context>
<name>ExportProjectDialog</name>
<message>
<source>Export project</source>
<translation>Exportera projekt</translation>
</message>
<message>
<source>Output</source>
<translation>Utgång</translation>
</message>
<message>
<source>File format:</source>
<translation>Fil-format:</translation>
</message>
<message>
<source>Samplerate:</source>
<translation>Samplingshastighet:</translation>
</message>
<message>
<source>44100 Hz</source>
<translation>44100 Hz</translation>
</message>
<message>
<source>48000 Hz</source>
<translation>48000 Hz</translation>
</message>
<message>
<source>88200 Hz</source>
<translation>88200 Hz</translation>
</message>
<message>
<source>96000 Hz</source>
<translation>96000 Hz</translation>
</message>
<message>
<source>192000 Hz</source>
<translation>192000 Hz</translation>
</message>
<message>
<source>Bitrate:</source>
<translation>Bit-hastighet:</translation>
</message>
<message>
<source>64 KBit/s</source>
<translation>64 KBit/s</translation>
</message>
<message>
<source>128 KBit/s</source>
<translation>128 KBit/s</translation>
</message>
<message>
<source>160 KBit/s</source>
<translation>160 KBit/s</translation>
</message>
<message>
<source>192 KBit/s</source>
<translation>192 KBit/s</translation>
</message>
<message>
<source>256 KBit/s</source>
<translation>256 KBit/s</translation>
</message>
<message>
<source>320 KBit/s</source>
<translation>320 KBit/s</translation>
</message>
<message>
<source>Depth:</source>
<translation>Djup:</translation>
</message>
<message>
<source>16 Bit Integer</source>
<translation type="unfinished"/>
</message>
<message>
<source>32 Bit Float</source>
<translation type="unfinished"/>
</message>
<message>
<source>Please note that not all of the parameters above apply for all file formats.</source>
<translation>Alla inställningar ovan gäller för samtliga filformat</translation>
</message>
<message>
<source>Quality settings</source>
<translation>Kvalitetsinställningar</translation>
</message>
<message>
<source>Interpolation:</source>
<translation>Interpolering:</translation>
</message>
<message>
<source>Zero Order Hold</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sinc Fastest</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sinc Medium (recommended)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sinc Best (very slow!)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Oversampling (use with care!):</source>
<translation>Översampling (använd varsamt!):</translation>
</message>
<message>
<source>1x (None)</source>
<translation>1x (Ingen)</translation>
</message>
<message>
<source>2x</source>
<translation>2x</translation>
</message>
<message>
<source>4x</source>
<translation>4x</translation>
</message>
<message>
<source>8x</source>
<translation>8x</translation>
</message>
<message>
<source>Start</source>
<translation>Starta</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
<message>
<source>Export as loop (remove end silence)</source>
<translation>Exportera som loop (ta bort slut-tystnad)</translation>
</message>
<message>
<source>Export between loop markers</source>
<translation>Exportera mellan loop-markeringar</translation>
</message>
<message>
<source>Could not open file</source>
<translation>Kunde inte öppna fil</translation>
</message>
<message>
<source>Export project to %1</source>
<translation>Exportera projekt till %1</translation>
</message>
<message>
<source>Error</source>
<translation>Fel</translation>
</message>
<message>
<source>Error while determining file-encoder device. Please try to choose a different output format.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Rendering: %1%</source>
<translation>Renderar: %1%</translation>
</message>
<message>
<source>Could not open file %1 for writing.
Please make sure you have write permission to the file and the directory containing the file and try again!</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>Fader</name>
<message>
<source>Please enter a new value between %1 and %2:</source>
<translation>Ange ett nytt värde mellan %1 och %2:</translation>
</message>
</context>
<context>
<name>FileBrowser</name>
<message>
<source>Browser</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileBrowserTreeWidget</name>
<message>
<source>Send to active instrument-track</source>
<translation>Skicka till aktivt instrument-spår</translation>
</message>
<message>
<source>Open in new instrument-track/B+B Editor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Loading sample</source>
<translation>Laddar ljudfil</translation>
</message>
<message>
<source>Please wait, loading sample for preview...</source>
<translation>Ljudfilen laddas för förhandslyssning...</translation>
</message>
<message>
<source>--- Factory files ---</source>
<translation>--- Grundfiler ---</translation>
</message>
<message>
<source>Open in new instrument-track/Song Editor</source>
<translation>Öppna i nytt instrument-spår/Sång Editor</translation>
</message>
<message>
<source>Error</source>
<translation>Fel</translation>
</message>
<message>
<source>does not appear to be a valid</source>
<translation>verkar inte vara en giltig</translation>
</message>
<message>
<source>file</source>
<translation>fil</translation>
</message>
</context>
<context>
<name>FlangerControls</name>
<message>
<source>Delay Samples</source>
<translation>Fördröj Samplingar</translation>
</message>
<message>
<source>Lfo Frequency</source>
<translation>Lfo-frekvens</translation>
</message>
<message>
<source>Seconds</source>
<translation>Sekunder</translation>
</message>
<message>
<source>Regen</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise</source>
<translation>Brus</translation>
</message>
<message>
<source>Invert</source>
<translation>Invertera</translation>
</message>
</context>
<context>
<name>FlangerControlsDialog</name>
<message>
<source>Delay Time:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Feedback Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>White Noise Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>DELAY</source>
<translation type="unfinished"/>
</message>
<message>
<source>RATE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Rate:</source>
<translation type="unfinished"/>
</message>
<message>
<source>AMNT</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>FDBK</source>
<translation type="unfinished"/>
</message>
<message>
<source>NOISE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Invert</source>
<translation>Invertera</translation>
</message>
</context>
<context>
<name>FxLine</name>
<message>
<source>Channel send amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>The FX channel receives input from one or more instrument tracks.
It in turn can be routed to multiple other FX channels. LMMS automatically takes care of preventing infinite loops for you and doesn't allow making a connection that would result in an infinite loop.
In order to route the channel to another channel, select the FX channel and click on the "send" button on the channel you want to send to. The knob under the send button controls the level of signal that is sent to the channel.
You can remove and move FX channels in the context menu, which is accessed by right-clicking the FX channel.
</source>
<translation type="unfinished"/>
</message>
<message>
<source>Move &left</source>
<translation>Flytta &vänster</translation>
</message>
<message>
<source>Move &right</source>
<translation>Flytta &höger</translation>
</message>
<message>
<source>Rename &channel</source>
<translation>Byt namn på &kanal</translation>
</message>
<message>
<source>R&emove channel</source>
<translation>T&a bort kanal</translation>
</message>
<message>
<source>Remove &unused channels</source>
<translation>Ta bort &oanvända kanaler</translation>
</message>
</context>
<context>
<name>FxMixer</name>
<message>
<source>Master</source>
<translation>Master</translation>
</message>
<message>
<source>FX %1</source>
<translation>FX %1</translation>
</message>
</context>
<context>
<name>FxMixerView</name>
<message>
<source>FX-Mixer</source>
<translation>FX-Mixer</translation>
</message>
<message>
<source>FX Fader %1</source>
<translation>FX Fader %1</translation>
</message>
<message>
<source>Mute</source>
<translation>Tysta</translation>
</message>
<message>
<source>Mute this FX channel</source>
<translation>Tysta denna FX-kanal</translation>
</message>
<message>
<source>Solo</source>
<translation>Solo</translation>
</message>
<message>
<source>Solo FX channel</source>
<translation>FX-kanal Solo</translation>
</message>
</context>
<context>
<name>FxRoute</name>
<message>
<source>Amount to send from channel %1 to channel %2</source>
<translation>Mängd att skicka från kanal %1 till kanal %2</translation>
</message>
</context>
<context>
<name>GigInstrument</name>
<message>
<source>Bank</source>
<translation>Bank</translation>
</message>
<message>
<source>Patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
</context>
<context>
<name>GigInstrumentView</name>
<message>
<source>Open other GIG file</source>
<translation>Öppna en annan GIG-fil</translation>
</message>
<message>
<source>Click here to open another GIG file</source>
<translation>Klicka här för att öppna en annan GIG-fil</translation>
</message>
<message>
<source>Choose the patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to change which patch of the GIG file to use</source>
<translation type="unfinished"/>
</message>
<message>
<source>Change which instrument of the GIG file is being played</source>
<translation>Välj vilket instrument i GIG-filen som ska spelas</translation>
</message>
<message>
<source>Which GIG file is currently being used</source>
<translation>Vilken GIG-fil används för närvarande</translation>
</message>
<message>
<source>Which patch of the GIG file is currently being used</source>
<translation>Vilken del av GIG-filen används för närvarande</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Factor to multiply samples by</source>
<translation>Faktor att multiplicera samplingar med</translation>
</message>
<message>
<source>Open GIG file</source>
<translation>Öppna GIG-fil</translation>
</message>
<message>
<source>GIG Files (*.gig)</source>
<translation>GIG-filer (*.gig)</translation>
</message>
</context>
<context>
<name>GuiApplication</name>
<message>
<source>Working directory</source>
<translation>Arbetskatalog</translation>
</message>
<message>
<source>The LMMS working directory %1 does not exist. Create it now? You can change the directory later via Edit -> Settings.</source>
<translation>Arbetskatalogen %1 för LMMS existerar inte. Vill du skapa detta nu? Du kan ändra katalog senare under Redigera -> Inställningar.</translation>
</message>
<message>
<source>Preparing UI</source>
<translation>Förbereder användargränssnitt</translation>
</message>
<message>
<source>Preparing song editor</source>
<translation>Förbereder sång-editor</translation>
</message>
<message>
<source>Preparing mixer</source>
<translation>Förbereder mixer</translation>
</message>
<message>
<source>Preparing controller rack</source>
<translation>Förbereder kontrollrack</translation>
</message>
<message>
<source>Preparing project notes</source>
<translation>Förbereder projektanteckningar</translation>
</message>
<message>
<source>Preparing beat/bassline editor</source>
<translation>Förbereder Takt/Bas-editor</translation>
</message>
<message>
<source>Preparing piano roll</source>
<translation>Förbereder pianorulle</translation>
</message>
<message>
<source>Preparing automation editor</source>
<translation>Förbereder automations-editor</translation>
</message>
</context>
<context>
<name>InstrumentFunctionArpeggio</name>
<message>
<source>Arpeggio</source>
<translation>Arpeggio</translation>
</message>
<message>
<source>Arpeggio type</source>
<translation>Arpeggio-typ</translation>
</message>
<message>
<source>Arpeggio range</source>
<translation>Arpeggio-omfång</translation>
</message>
<message>
<source>Arpeggio time</source>
<translation>Arpeggio-tid</translation>
</message>
<message>
<source>Arpeggio gate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Arpeggio direction</source>
<translation>Arpeggio-riktning</translation>
</message>
<message>
<source>Arpeggio mode</source>
<translation>Arpeggio-typ</translation>
</message>
<message>
<source>Up</source>
<translation>Upp</translation>
</message>
<message>
<source>Down</source>
<translation>Ner</translation>
</message>
<message>
<source>Up and down</source>
<translation>Upp och ner</translation>
</message>
<message>
<source>Random</source>
<translation>Slumpmässig</translation>
</message>
<message>
<source>Free</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sort</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Down and up</source>
<translation>Ner och upp</translation>
</message>
<message>
<source>Skip rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Miss rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cycle steps</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentFunctionArpeggioView</name>
<message>
<source>ARPEGGIO</source>
<translation>ARPEGGIO</translation>
</message>
<message>
<source>An arpeggio is a method playing (especially plucked) instruments, which makes the music much livelier. The strings of such instruments (e.g. harps) are plucked like chords. The only difference is that this is done in a sequential order, so the notes are not played at the same time. Typical arpeggios are major or minor triads, but there are a lot of other possible chords, you can select.</source>
<translation type="unfinished"/>
</message>
<message>
<source>RANGE</source>
<translation>OMFÅNG</translation>
</message>
<message>
<source>Arpeggio range:</source>
<translation>Arpeggio-omfång:</translation>
</message>
<message>
<source>octave(s)</source>
<translation>oktav(er)</translation>
</message>
<message>
<source>Use this knob for setting the arpeggio range in octaves. The selected arpeggio will be played within specified number of octaves.</source>
<translation type="unfinished"/>
</message>
<message>
<source>TIME</source>
<translation>TID</translation>
</message>
<message>
<source>Arpeggio time:</source>
<translation>Arpeggio-tid:</translation>
</message>
<message>
<source>ms</source>
<translation>ms</translation>
</message>
<message>
<source>Use this knob for setting the arpeggio time in milliseconds. The arpeggio time specifies how long each arpeggio-tone should be played.</source>
<translation>Använd denna ratt för att ställa arpeggio-tiden i millisekunder. Arpeggio-tiden anger hur länge varje arpeggio-ton ska spelas.</translation>
</message>
<message>
<source>GATE</source>
<translation>GATE</translation>
</message>
<message>
<source>Arpeggio gate:</source>
<translation type="unfinished"/>
</message>
<message>
<source>%</source>
<translation>%</translation>
</message>
<message>
<source>Use this knob for setting the arpeggio gate. The arpeggio gate specifies the percent of a whole arpeggio-tone that should be played. With this you can make cool staccato arpeggios.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chord:</source>
<translation>Ackord:</translation>
</message>
<message>
<source>Direction:</source>
<translation>Riktning:</translation>
</message>
<message>
<source>Mode:</source>
<translation>Läge:</translation>
</message>
<message>
<source>SKIP</source>
<translation type="unfinished"/>
</message>
<message>
<source>Skip rate:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The skip function will make the arpeggiator pause one step randomly. From its start in full counter clockwise position and no effect it will gradually progress to full amnesia at maximum setting.</source>
<translation type="unfinished"/>
</message>
<message>
<source>MISS</source>
<translation type="unfinished"/>
</message>
<message>
<source>Miss rate:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The miss function will make the arpeggiator miss the intended note.</source>
<translation type="unfinished"/>
</message>
<message>
<source>CYCLE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cycle notes:</source>
<translation type="unfinished"/>
</message>
<message>
<source>note(s)</source>
<translation>not(er)</translation>
</message>
<message>
<source>Jumps over n steps in the arpeggio and cycles around if we're over the note range. If the total note range is evenly divisible by the number of steps jumped over you will get stuck in a shorter arpeggio or even on one note.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentFunctionNoteStacking</name>
<message>
<source>octave</source>
<translation>oktav</translation>
</message>
<message>
<source>Major</source>
<translation type="unfinished"/>
</message>
<message>
<source>Majb5</source>
<translation type="unfinished"/>
</message>
<message>
<source>minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>minb5</source>
<translation type="unfinished"/>
</message>
<message>
<source>sus2</source>
<translation type="unfinished"/>
</message>
<message>
<source>sus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>aug</source>
<translation type="unfinished"/>
</message>
<message>
<source>augsus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>tri</source>
<translation type="unfinished"/>
</message>
<message>
<source>6</source>
<translation type="unfinished"/>
</message>
<message>
<source>6sus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>6add9</source>
<translation type="unfinished"/>
</message>
<message>
<source>m6</source>
<translation type="unfinished"/>
</message>
<message>
<source>m6add9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7</source>
<translation type="unfinished"/>
</message>
<message>
<source>7sus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>7#5</source>
<translation type="unfinished"/>
</message>
<message>
<source>7b5</source>
<translation type="unfinished"/>
</message>
<message>
<source>7#9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7#5#9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7#5b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7b5b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>7add11</source>
<translation type="unfinished"/>
</message>
<message>
<source>7add13</source>
<translation type="unfinished"/>
</message>
<message>
<source>7#11</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj7</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj7b5</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj7#5</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj7#11</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj7add13</source>
<translation type="unfinished"/>
</message>
<message>
<source>m7</source>
<translation type="unfinished"/>
</message>
<message>
<source>m7b5</source>
<translation type="unfinished"/>
</message>
<message>
<source>m7b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>m7add11</source>
<translation type="unfinished"/>
</message>
<message>
<source>m7add13</source>
<translation type="unfinished"/>
</message>
<message>
<source>m-Maj7</source>
<translation type="unfinished"/>
</message>
<message>
<source>m-Maj7add11</source>
<translation type="unfinished"/>
</message>
<message>
<source>m-Maj7add13</source>
<translation type="unfinished"/>
</message>
<message>
<source>9</source>
<translation type="unfinished"/>
</message>
<message>
<source>9sus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>add9</source>
<translation type="unfinished"/>
</message>
<message>
<source>9#5</source>
<translation type="unfinished"/>
</message>
<message>
<source>9b5</source>
<translation type="unfinished"/>
</message>
<message>
<source>9#11</source>
<translation type="unfinished"/>
</message>
<message>
<source>9b13</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj9</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj9sus4</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj9#5</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj9#11</source>
<translation type="unfinished"/>
</message>
<message>
<source>m9</source>
<translation type="unfinished"/>
</message>
<message>
<source>madd9</source>
<translation type="unfinished"/>
</message>
<message>
<source>m9b5</source>
<translation type="unfinished"/>
</message>
<message>
<source>m9-Maj7</source>
<translation type="unfinished"/>
</message>
<message>
<source>11</source>
<translation type="unfinished"/>
</message>
<message>
<source>11b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj11</source>
<translation type="unfinished"/>
</message>
<message>
<source>m11</source>
<translation type="unfinished"/>
</message>
<message>
<source>m-Maj11</source>
<translation type="unfinished"/>
</message>
<message>
<source>13</source>
<translation type="unfinished"/>
</message>
<message>
<source>13#9</source>
<translation type="unfinished"/>
</message>
<message>
<source>13b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>13b5b9</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maj13</source>
<translation type="unfinished"/>
</message>
<message>
<source>m13</source>
<translation type="unfinished"/>
</message>
<message>
<source>m-Maj13</source>
<translation type="unfinished"/>
</message>
<message>
<source>Harmonic minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Melodic minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Whole tone</source>
<translation type="unfinished"/>
</message>
<message>
<source>Diminished</source>
<translation type="unfinished"/>
</message>
<message>
<source>Major pentatonic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Minor pentatonic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Jap in sen</source>
<translation type="unfinished"/>
</message>
<message>
<source>Major bebop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dominant bebop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Blues</source>
<translation type="unfinished"/>
</message>
<message>
<source>Arabic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enigmatic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Neopolitan</source>
<translation type="unfinished"/>
</message>
<message>
<source>Neopolitan minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hungarian minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dorian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phrygolydian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Lydian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mixolydian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Aeolian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Locrian</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chords</source>
<translation>Ackord</translation>
</message>
<message>
<source>Chord type</source>
<translation>Ackord-typ</translation>
</message>
<message>
<source>Chord range</source>
<translation type="unfinished"/>
</message>
<message>
<source>Minor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chromatic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Half-Whole Diminished</source>
<translation type="unfinished"/>
</message>
<message>
<source>5</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phrygian dominant</source>
<translation type="unfinished"/>
</message>
<message>
<source>Persian</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentFunctionNoteStackingView</name>
<message>
<source>RANGE</source>
<translation>OMFÅNG</translation>
</message>
<message>
<source>Chord range:</source>
<translation type="unfinished"/>
</message>
<message>
<source>octave(s)</source>
<translation>oktav(er)</translation>
</message>
<message>
<source>Use this knob for setting the chord range in octaves. The selected chord will be played within specified number of octaves.</source>
<translation type="unfinished"/>
</message>
<message>
<source>STACKING</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chord:</source>
<translation>Ackord:</translation>
</message>
</context>
<context>
<name>InstrumentMidiIOView</name>
<message>
<source>ENABLE MIDI INPUT</source>
<translation>AKTIVERA MIDI-INMATNING</translation>
</message>
<message>
<source>CHANNEL</source>
<translation>KANAL</translation>
</message>
<message>
<source>VELOCITY</source>
<translation type="unfinished"/>
</message>
<message>
<source>ENABLE MIDI OUTPUT</source>
<translation type="unfinished"/>
</message>
<message>
<source>PROGRAM</source>
<translation type="unfinished"/>
</message>
<message>
<source>MIDI devices to receive MIDI events from</source>
<translation type="unfinished"/>
</message>
<message>
<source>MIDI devices to send MIDI events to</source>
<translation type="unfinished"/>
</message>
<message>
<source>NOTE</source>
<translation type="unfinished"/>
</message>
<message>
<source>CUSTOM BASE VELOCITY</source>
<translation type="unfinished"/>
</message>
<message>
<source>Specify the velocity normalization base for MIDI-based instruments at 100% note velocity</source>
<translation type="unfinished"/>
</message>
<message>
<source>BASE VELOCITY</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentMiscView</name>
<message>
<source>MASTER PITCH</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enables the use of Master Pitch</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentSoundShaping</name>
<message>
<source>VOLUME</source>
<translation>VOLYM</translation>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>CUTOFF</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cutoff frequency</source>
<translation>Cutoff frekvens</translation>
</message>
<message>
<source>RESO</source>
<translation>RESO</translation>
</message>
<message>
<source>Resonance</source>
<translation>Resonans</translation>
</message>
<message>
<source>Envelopes/LFOs</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter type</source>
<translation>Filtertyp</translation>
</message>
<message>
<source>Q/Resonance</source>
<translation>Q/Resonans</translation>
</message>
<message>
<source>LowPass</source>
<translation>Lågpass</translation>
</message>
<message>
<source>HiPass</source>
<translation>Högpass</translation>
</message>
<message>
<source>BandPass csg</source>
<translation type="unfinished"/>
</message>
<message>
<source>BandPass czpg</source>
<translation type="unfinished"/>
</message>
<message>
<source>Notch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Allpass</source>
<translation type="unfinished"/>
</message>
<message>
<source>Moog</source>
<translation>Moog</translation>
</message>
<message>
<source>2x LowPass</source>
<translation>2x Lågpass</translation>
</message>
<message>
<source>RC LowPass 12dB</source>
<translation>RC Lågpass 12dB</translation>
</message>
<message>
<source>RC BandPass 12dB</source>
<translation>RC BandPass 12dB</translation>
</message>
<message>
<source>RC HighPass 12dB</source>
<translation>RC Högpass 12dB</translation>
</message>
<message>
<source>RC LowPass 24dB</source>
<translation>RC Lågpass 24dB</translation>
</message>
<message>
<source>RC BandPass 24dB</source>
<translation>RC BandPass 24dB</translation>
</message>
<message>
<source>RC HighPass 24dB</source>
<translation>RC Högpass 24dB</translation>
</message>
<message>
<source>Vocal Formant Filter</source>
<translation type="unfinished"/>
</message>
<message>
<source>2x Moog</source>
<translation type="unfinished"/>
</message>
<message>
<source>SV LowPass</source>
<translation>SV Lågpass</translation>
</message>
<message>
<source>SV BandPass</source>
<translation>SV BandPass</translation>
</message>
<message>
<source>SV HighPass</source>
<translation>SV Högpass</translation>
</message>
<message>
<source>SV Notch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fast Formant</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tripole</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentSoundShapingView</name>
<message>
<source>TARGET</source>
<translation>MÅL</translation>
</message>
<message>
<source>These tabs contain envelopes. They're very important for modifying a sound, in that they are almost always necessary for substractive synthesis. For example if you have a volume envelope, you can set when the sound should have a specific volume. If you want to create some soft strings then your sound has to fade in and out very softly. This can be done by setting large attack and release times. It's the same for other envelope targets like panning, cutoff frequency for the used filter and so on. Just monkey around with it! You can really make cool sounds out of a saw-wave with just some envelopes...!</source>
<translation type="unfinished"/>
</message>
<message>
<source>FILTER</source>
<translation>FILTER</translation>
</message>
<message>
<source>Here you can select the built-in filter you want to use for this instrument-track. Filters are very important for changing the characteristics of a sound.</source>
<translation>Här kan du välja vilket inbyggt filter du vill använda för detta instrument-spår. Filter är väldigt viktiga om man vill ändra karaktäristiken på ett ljud.</translation>
</message>
<message>
<source>Hz</source>
<translation>Hz</translation>
</message>
<message>
<source>Use this knob for setting the cutoff frequency for the selected filter. The cutoff frequency specifies the frequency for cutting the signal by a filter. For example a lowpass-filter cuts all frequencies above the cutoff frequency. A highpass-filter cuts all frequencies below cutoff frequency, and so on...</source>
<translation type="unfinished"/>
</message>
<message>
<source>RESO</source>
<translation>RESO</translation>
</message>
<message>
<source>Resonance:</source>
<translation>Resonans:</translation>
</message>
<message>
<source>Use this knob for setting Q/Resonance for the selected filter. Q/Resonance tells the filter how much it should amplify frequencies near Cutoff-frequency.</source>
<translation type="unfinished"/>
</message>
<message>
<source>FREQ</source>
<translation>FREKV.</translation>
</message>
<message>
<source>cutoff frequency:</source>
<translation>cutoff-frekvens:</translation>
</message>
<message>
<source>Envelopes, LFOs and filters are not supported by the current instrument.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentTrack</name>
<message>
<source>unnamed_track</source>
<translation>namnlöst_spår</translation>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Pitch</source>
<translation>Tonhöjd</translation>
</message>
<message>
<source>FX channel</source>
<translation>FX-kanal</translation>
</message>
<message>
<source>Default preset</source>
<translation>Standardinställning</translation>
</message>
<message>
<source>With this knob you can set the volume of the opened channel.</source>
<translation>Med denna ratt ställer du volymen för den öppnade kanalen.</translation>
</message>
<message>
<source>Base note</source>
<translation>Grundton</translation>
</message>
<message>
<source>Pitch range</source>
<translation>Tonhöjdsomfång</translation>
</message>
<message>
<source>Master Pitch</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InstrumentTrackView</name>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>VOL</source>
<translation>VOL</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Panning:</source>
<translation>Panorering:</translation>
</message>
<message>
<source>PAN</source>
<translation>PANORERA</translation>
</message>
<message>
<source>MIDI</source>
<translation>MIDI</translation>
</message>
<message>
<source>Input</source>
<translation>Ingång</translation>
</message>
<message>
<source>Output</source>
<translation>Utgång</translation>
</message>
<message>
<source>FX %1: %2</source>
<translation>FX %1: %2</translation>
</message>
</context>
<context>
<name>InstrumentTrackWindow</name>
<message>
<source>GENERAL SETTINGS</source>
<translation>ÖVERGRIPANDE INSTÄLLNINGAR</translation>
</message>
<message>
<source>Instrument volume</source>
<translation>Instrument-volym</translation>
</message>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>VOL</source>
<translation>VOL</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Panning:</source>
<translation>Panorering:</translation>
</message>
<message>
<source>PAN</source>
<translation>PANORERA</translation>
</message>
<message>
<source>Pitch</source>
<translation>Tonhöjd</translation>
</message>
<message>
<source>Pitch:</source>
<translation>Tonhöjd:</translation>
</message>
<message>
<source>cents</source>
<translation type="unfinished"/>
</message>
<message>
<source>PITCH</source>
<translation type="unfinished"/>
</message>
<message>
<source>FX channel</source>
<translation>FX-kanal</translation>
</message>
<message>
<source>ENV/LFO</source>
<translation type="unfinished"/>
</message>
<message>
<source>FUNC</source>
<translation type="unfinished"/>
</message>
<message>
<source>FX</source>
<translation type="unfinished"/>
</message>
<message>
<source>MIDI</source>
<translation>MIDI</translation>
</message>
<message>
<source>Save preset</source>
<translation>Spara förinställning</translation>
</message>
<message>
<source>XML preset file (*.xpf)</source>
<translation>XML förinställnings-fil (*.xpf)</translation>
</message>
<message>
<source>PLUGIN</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pitch range (semitones)</source>
<translation type="unfinished"/>
</message>
<message>
<source>RANGE</source>
<translation>OMFÅNG</translation>
</message>
<message>
<source>Save current instrument track settings in a preset file</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here, if you want to save current instrument track settings in a preset file. Later you can load this preset by double-clicking it in the preset-browser.</source>
<translation type="unfinished"/>
</message>
<message>
<source>MISC</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use these controls to view and edit the next/previous track in the song editor.</source>
<translation type="unfinished"/>
</message>
<message>
<source>SAVE</source>
<translation>SPARA</translation>
</message>
</context>
<context>
<name>Knob</name>
<message>
<source>Set linear</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set logarithmic</source>
<translation type="unfinished"/>
</message>
<message>
<source>Please enter a new value between -96.0 dBFS and 6.0 dBFS:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Please enter a new value between %1 and %2:</source>
<translation>Ange ett nytt värde mellan %1 och %2:</translation>
</message>
</context>
<context>
<name>LadspaControl</name>
<message>
<source>Link channels</source>
<translation>Länka kanaler</translation>
</message>
</context>
<context>
<name>LadspaControlDialog</name>
<message>
<source>Link Channels</source>
<translation>Länka Kanaler</translation>
</message>
<message>
<source>Channel </source>
<translation>Kanal</translation>
</message>
</context>
<context>
<name>LadspaControlView</name>
<message>
<source>Link channels</source>
<translation>Länka kanaler</translation>
</message>
<message>
<source>Value:</source>
<translation>Värde:</translation>
</message>
<message>
<source>Sorry, no help available.</source>
<translation>Ledsen, ingen hjälp är tillgänglig.</translation>
</message>
</context>
<context>
<name>LadspaEffect</name>
<message>
<source>Unknown LADSPA plugin %1 requested.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>LcdSpinBox</name>
<message>
<source>Please enter a new value between %1 and %2:</source>
<translation>Ange ett nytt värde mellan %1 och %2:</translation>
</message>
</context>
<context>
<name>LeftRightNav</name>
<message>
<source>Previous</source>
<translation>Tidigare</translation>
</message>
<message>
<source>Next</source>
<translation>Nästa</translation>
</message>
<message>
<source>Previous (%1)</source>
<translation>Tidigare (%1)</translation>
</message>
<message>
<source>Next (%1)</source>
<translation>Nästa (%1)</translation>
</message>
</context>
<context>
<name>LfoController</name>
<message>
<source>LFO Controller</source>
<translation type="unfinished"/>
</message>
<message>
<source>Base value</source>
<translation>Basvärde</translation>
</message>
<message>
<source>Oscillator speed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Oscillator amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Oscillator phase</source>
<translation type="unfinished"/>
</message>
<message>
<source>Oscillator waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>Frequency Multiplier</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>LfoControllerDialog</name>
<message>
<source>LFO</source>
<translation>LFO</translation>
</message>
<message>
<source>LFO Controller</source>
<translation type="unfinished"/>
</message>
<message>
<source>BASE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Base amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>todo</source>
<translation type="unfinished"/>
</message>
<message>
<source>SPD</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO-speed:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use this knob for setting speed of the LFO. The bigger this value the faster the LFO oscillates and the faster the effect.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulation amount:</source>
<translation>Moduleringsmängd:</translation>
</message>
<message>
<source>Use this knob for setting modulation amount of the LFO. The bigger this value, the more the connected control (e.g. volume or cutoff-frequency) will be influenced by the LFO.</source>
<translation type="unfinished"/>
</message>
<message>
<source>PHS</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phase offset:</source>
<translation type="unfinished"/>
</message>
<message>
<source>degrees</source>
<translation>grader</translation>
</message>
<message>
<source>With this knob you can set the phase offset of the LFO. That means you can move the point within an oscillation where the oscillator begins to oscillate. For example if you have a sine-wave and have a phase-offset of 180 degrees the wave will first go down. It's the same with a square-wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for a sine-wave.</source>
<translation>Klicka här för sinusvåg.</translation>
</message>
<message>
<source>Click here for a triangle-wave.</source>
<translation>Klicka här för triangelvåg.</translation>
</message>
<message>
<source>Click here for a saw-wave.</source>
<translation>Klicka här för sågtandvåg</translation>
</message>
<message>
<source>Click here for a square-wave.</source>
<translation>Klicka här för fyrkantvåg</translation>
</message>
<message>
<source>Click here for an exponential wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for white-noise.</source>
<translation>Klicka här för vitt brus.</translation>
</message>
<message>
<source>Click here for a user-defined shape.
Double click to pick a file.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for a moog saw-wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>AMNT</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>LmmsCore</name>
<message>
<source>Generating wavetables</source>
<translation type="unfinished"/>
</message>
<message>
<source>Initializing data structures</source>
<translation type="unfinished"/>
</message>
<message>
<source>Opening audio and midi devices</source>
<translation type="unfinished"/>
</message>
<message>
<source>Launching mixer threads</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MainWindow</name>
<message>
<source>Could not save config-file</source>
<translation type="unfinished"/>
</message>
<message>
<source>Could not save configuration file %1. You're probably not permitted to write to this file.
Please make sure you have write-access to the file and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<source>&New</source>
<translation>&Ny</translation>
</message>
<message>
<source>&Open...</source>
<translation>&Öppna...</translation>
</message>
<message>
<source>&Save</source>
<translation>&Spara</translation>
</message>
<message>
<source>Save &As...</source>
<translation>Spara &som...</translation>
</message>
<message>
<source>Import...</source>
<translation>Importera...</translation>
</message>
<message>
<source>E&xport...</source>
<translation>E&xportera...</translation>
</message>
<message>
<source>&Quit</source>
<translation>&Avsluta</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Redigera</translation>
</message>
<message>
<source>Settings</source>
<translation>Inställningar</translation>
</message>
<message>
<source>&Tools</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Help</source>
<translation>&Hjälp</translation>
</message>
<message>
<source>Help</source>
<translation>Hjälp</translation>
</message>
<message>
<source>What's this?</source>
<translation>Vad är detta?</translation>
</message>
<message>
<source>About</source>
<translation>Om</translation>
</message>
<message>
<source>Create new project</source>
<translation>Skapa nytt projekt</translation>
</message>
<message>
<source>Create new project from template</source>
<translation>Skapa nytt projekt från mall</translation>
</message>
<message>
<source>Open existing project</source>
<translation>Öppna existerande projekt</translation>
</message>
<message>
<source>Recently opened projects</source>
<translation>Nyligen öppnade projekt</translation>
</message>
<message>
<source>Save current project</source>
<translation>Spara aktuellt projekt</translation>
</message>
<message>
<source>Export current project</source>
<translation>Exportera aktuellt projekt</translation>
</message>
<message>
<source>Song Editor</source>
<translation>Sång-editor</translation>
</message>
<message>
<source>By pressing this button, you can show or hide the Song-Editor. With the help of the Song-Editor you can edit song-playlist and specify when which track should be played. You can also insert and move samples (e.g. rap samples) directly into the playlist.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Beat+Bassline Editor</source>
<translation>Redigera Trummor+Bas</translation>
</message>
<message>
<source>By pressing this button, you can show or hide the Beat+Bassline Editor. The Beat+Bassline Editor is needed for creating beats, and for opening, adding, and removing channels, and for cutting, copying and pasting beat and bassline-patterns, and for other things like that.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Piano Roll</source>
<translation>Pianorulle</translation>
</message>
<message>
<source>Click here to show or hide the Piano-Roll. With the help of the Piano-Roll you can edit melodies in an easy way.</source>
<translation>Klicka här för att visa eller dölja pianorullen. Med hjälp av pianorullen kan du skapa melodier på ett enkelt sätt.</translation>
</message>
<message>
<source>Automation Editor</source>
<translation>Automations-editor</translation>
</message>
<message>
<source>Click here to show or hide the Automation Editor. With the help of the Automation Editor you can edit dynamic values in an easy way.</source>
<translation type="unfinished"/>
</message>
<message>
<source>FX Mixer</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to show or hide the FX Mixer. The FX Mixer is a very powerful tool for managing effects for your song. You can insert effects into different effect-channels.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Project Notes</source>
<translation>Projektanteckningar</translation>
</message>
<message>
<source>Click here to show or hide the project notes window. In this window you can put down your project notes.</source>
<translation>Klicka här för att visa eller dölja fönstret för projektanteckningar. I detta fönster kan du göra noteringar om ditt projekt,</translation>
</message>
<message>
<source>Controller Rack</source>
<translation>Kontrollrack</translation>
</message>
<message>
<source>Untitled</source>
<translation>Namnlös</translation>
</message>
<message>
<source>LMMS %1</source>
<translation>LMMS %1</translation>
</message>
<message>
<source>Project not saved</source>
<translation>Projekt inte sparat</translation>
</message>
<message>
<source>The current project was modified since last saving. Do you want to save it now?</source>
<translation>Projektet har ändrats sedan det sparades senast. Vill du spara nu?</translation>
</message>
<message>
<source>Help not available</source>
<translation>Hjälp inte tillgänglig</translation>
</message>
<message>
<source>Currently there's no help available in LMMS.
Please visit http://lmms.sf.net/wiki for documentation on LMMS.</source>
<translation>Just nu finns ingen hjälp tillgänglig i LMMS
Besök https://lmms.io/documentation/ för dokumentation (Engelska).</translation>
</message>
<message>
<source>LMMS (*.mmp *.mmpz)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Version %1</source>
<translation>Version %1</translation>
</message>
<message>
<source>Configuration file</source>
<translation>Konfigurationsfil</translation>
</message>
<message>
<source>Error while parsing configuration file at line %1:%2: %3</source>
<translation>Fel vid inläsning av konfigurationsfil på rad %1:%2: %3</translation>
</message>
<message>
<source>Volumes</source>
<translation type="unfinished"/>
</message>
<message>
<source>Undo</source>
<translation>Ångra</translation>
</message>
<message>
<source>Redo</source>
<translation>Gör om</translation>
</message>
<message>
<source>My Projects</source>
<translation>Mina Projekt</translation>
</message>
<message>
<source>My Samples</source>
<translation type="unfinished"/>
</message>
<message>
<source>My Presets</source>
<translation type="unfinished"/>
</message>
<message>
<source>My Home</source>
<translation type="unfinished"/>
</message>
<message>
<source>My Computer</source>
<translation>Min dator</translation>
</message>
<message>
<source>&File</source>
<translation>&Fil</translation>
</message>
<message>
<source>&Recently Opened Projects</source>
<translation>&Nyligen öppnade projekt</translation>
</message>
<message>
<source>Save as New &Version</source>
<translation>Spara som Ny &Version</translation>
</message>
<message>
<source>E&xport Tracks...</source>
<translation>E&xportera spår...</translation>
</message>
<message>
<source>Online Help</source>
<translation>Online-hjälp</translation>
</message>
<message>
<source>What's This?</source>
<translation>Vad är det här?</translation>
</message>
<message>
<source>Open Project</source>
<translation>Öppna Projekt</translation>
</message>
<message>
<source>Save Project</source>
<translation>Spara Projekt</translation>
</message>
<message>
<source>Project recovery</source>
<translation>Projektåterställning</translation>
</message>
<message>
<source>There is a recovery file present. It looks like the last session did not end properly or another instance of LMMS is already running. Do you want to recover the project of this session?</source>
<translation>Det finns en återställningsfil tillgänglig. Det verkar som om programmet inte avslutades korrekt senast, eller så körs redan LMMS. Vill du återställa detta projekt?</translation>
</message>
<message>
<source>Recover</source>
<translation>Återställ</translation>
</message>
<message>
<source>Recover the file. Please don't run multiple instances of LMMS when you do this.</source>
<translation>Återställ filen. Se till att du bara har en instans av LMMS igång när du gör detta.</translation>
</message>
<message>
<source>Ignore</source>
<translation>Ignorera</translation>
</message>
<message>
<source>Launch LMMS as usual but with automatic backup disabled to prevent the present recover file from being overwritten.</source>
<translation>Starta LMMS som vanligt men med automatisk backup avstängt för att förhindra att den nuvarande återställningsfilen blir överskriven.</translation>
</message>
<message>
<source>Discard</source>
<translation>Kasta bort</translation>
</message>
<message>
<source>Launch a default session and delete the restored files. This is not reversible.</source>
<translation>Starta en standard-session och ta bort den återskapade filen. Detta går inte ångra.</translation>
</message>
<message>
<source>Preparing plugin browser</source>
<translation>Förbereder plugin-browser</translation>
</message>
<message>
<source>Preparing file browsers</source>
<translation>Förbereder fil-browser</translation>
</message>
<message>
<source>Root directory</source>
<translation>Rotkatalog</translation>
</message>
<message>
<source>Loading background artwork</source>
<translation>Laddar bakgrunds-grafik</translation>
</message>
<message>
<source>New from template</source>
<translation>Nytt från mall</translation>
</message>
<message>
<source>Save as default template</source>
<translation>Spara som standard-mall</translation>
</message>
<message>
<source>&View</source>
<translation>&Visa</translation>
</message>
<message>
<source>Toggle metronome</source>
<translation>Slå på/av metronom</translation>
</message>
<message>
<source>Show/hide Song-Editor</source>
<translation>Visa/dölj Sång-editor</translation>
</message>
<message>
<source>Show/hide Beat+Bassline Editor</source>
<translation>Visa/dölj Takt+Bas-editor</translation>
</message>
<message>
<source>Show/hide Piano-Roll</source>
<translation>Visa/dölj pianorulle</translation>
</message>
<message>
<source>Show/hide Automation Editor</source>
<translation>Visa/dölj Automations-editor</translation>
</message>
<message>
<source>Show/hide FX Mixer</source>
<translation>Visa/dölj FX Mixer</translation>
</message>
<message>
<source>Show/hide project notes</source>
<translation>Visa/dölj projektanteckningar</translation>
</message>
<message>
<source>Show/hide controller rack</source>
<translation>Visa/dölj kontrollrack</translation>
</message>
<message>
<source>Recover session. Please save your work!</source>
<translation>Återställnings-session. Spara ditt arbete!</translation>
</message>
<message>
<source>Automatic backup disabled. Remember to save your work!</source>
<translation>Automatisk backup avstängd. Kom ihåg att spara ditt arbete!</translation>
</message>
<message>
<source>Recovered project not saved</source>
<translation>Återställt projekt inte sparat</translation>
</message>
<message>
<source>This project was recovered from the previous session. It is currently unsaved and will be lost if you don't save it. Do you want to save it now?</source>
<translation>Projektet återställdes från den senaste sessionen. Det kommer försvinna om du inte sparar det. Vill du spara projektet nu?</translation>
</message>
<message>
<source>LMMS Project</source>
<translation>LMMS-Projekt</translation>
</message>
<message>
<source>LMMS Project Template</source>
<translation>LMMS-Projektmall</translation>
</message>
<message>
<source>Overwrite default template?</source>
<translation>Vill du skriva över standardmallen?</translation>
</message>
<message>
<source>This will overwrite your current default template.</source>
<translation>Detta kommer skriva över din nuvarande standardmall.</translation>
</message>
<message>
<source>Volume as dBFS</source>
<translation>Volym som dBFS</translation>
</message>
<message>
<source>Smooth scroll</source>
<translation>Mjuk rullning</translation>
</message>
<message>
<source>Enable note labels in piano roll</source>
<translation>Visa noter i pianorulle</translation>
</message>
<message>
<source>Save project template</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MeterDialog</name>
<message>
<source>Meter Numerator</source>
<translation type="unfinished"/>
</message>
<message>
<source>Meter Denominator</source>
<translation type="unfinished"/>
</message>
<message>
<source>TIME SIG</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MeterModel</name>
<message>
<source>Numerator</source>
<translation type="unfinished"/>
</message>
<message>
<source>Denominator</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MidiController</name>
<message>
<source>MIDI Controller</source>
<translation type="unfinished"/>
</message>
<message>
<source>unnamed_midi_controller</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MidiImport</name>
<message>
<source>Setup incomplete</source>
<translation>Installation ofullständig</translation>
</message>
<message>
<source>You do not have set up a default soundfont in the settings dialog (Edit->Settings). Therefore no sound will be played back after importing this MIDI file. You should download a General MIDI soundfont, specify it in settings dialog and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<source>You did not compile LMMS with support for SoundFont2 player, which is used to add default sound to imported MIDI files. Therefore no sound will be played back after importing this MIDI file.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Track</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MidiJack</name>
<message>
<source>JACK server down</source>
<extracomment>When JACK(JACK Audio Connection Kit) disconnects, it will show the following message (title)</extracomment>
<translation>JACK server nerstängd</translation>
</message>
<message>
<source>The JACK server seems to be shuted down.</source>
<extracomment>When JACK(JACK Audio Connection Kit) disconnects, it will show the following message (dialog message)</extracomment>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MidiPort</name>
<message>
<source>Input channel</source>
<translation>Ingångskanal</translation>
</message>
<message>
<source>Output channel</source>
<translation>Utgångskanal</translation>
</message>
<message>
<source>Input controller</source>
<translation>Ingångskontroller</translation>
</message>
<message>
<source>Output controller</source>
<translation>Utgångskontroller</translation>
</message>
<message>
<source>Fixed input velocity</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fixed output velocity</source>
<translation type="unfinished"/>
</message>
<message>
<source>Output MIDI program</source>
<translation type="unfinished"/>
</message>
<message>
<source>Receive MIDI-events</source>
<translation>Ta emot MIDI-event</translation>
</message>
<message>
<source>Send MIDI-events</source>
<translation>Skicka MIDI-event</translation>
</message>
<message>
<source>Fixed output note</source>
<translation>Fast utgångs-not</translation>
</message>
<message>
<source>Base velocity</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MidiSetupWidget</name>
<message>
<source>DEVICE</source>
<translation>ENHET</translation>
</message>
</context>
<context>
<name>MonstroInstrument</name>
<message>
<source>Osc 1 Volume</source>
<translation>Osc 1 Volym</translation>
</message>
<message>
<source>Osc 1 Panning</source>
<translation>Osc 1 Panorering</translation>
</message>
<message>
<source>Osc 1 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Fine detune left</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Fine detune right</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Stereo phase offset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Pulse width</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Sync send on rise</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 1 Sync send on fall</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Panning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Fine detune left</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Fine detune right</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Stereo phase offset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Sync Hard</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 2 Sync Reverse</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Panning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Stereo phase offset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Sub-oscillator mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Waveform 1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Waveform 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Sync Hard</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc 3 Sync Reverse</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 1 Waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 1 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 1 Rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 1 Phase</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 2 Waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 2 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 2 Rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO 2 Phase</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Pre-delay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Hold</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Sustain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Release</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 1 Slope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Pre-delay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Hold</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Sustain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Release</source>
<translation type="unfinished"/>
</message>
<message>
<source>Env 2 Slope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc2-3 modulation</source>
<translation type="unfinished"/>
</message>
<message>
<source>Selected view</source>
<translation>Vald vy</translation>
</message>
<message>
<source>Vol1-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol1-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol1-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol1-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol2-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol2-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol2-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol2-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol3-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol3-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol3-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vol3-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs1-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs1-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs1-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs1-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs2-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs2-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs2-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs2-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs3-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs3-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs3-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phs3-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit1-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit1-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit1-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit1-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit2-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit2-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit2-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit2-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit3-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit3-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit3-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pit3-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>PW1-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>PW1-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>PW1-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>PW1-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sub3-Env1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sub3-Env2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sub3-LFO1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sub3-LFO2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sine wave</source>
<translation>Sinusvåg</translation>
</message>
<message>
<source>Bandlimited Triangle wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited Saw wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited Ramp wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited Square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited Moog saw wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Soft square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Absolute sine wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Exponential wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>White noise</source>
<translation type="unfinished"/>
</message>
<message>
<source>Digital Triangle wave</source>
<translation>Digital Triangelvåg</translation>
</message>
<message>
<source>Digital Saw wave</source>
<translation>Digital Sågtandsvåg</translation>
</message>
<message>
<source>Digital Ramp wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Digital Square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Digital Moog saw wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Triangle wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>Saw wave</source>
<translation>Sågtandsvåg</translation>
</message>
<message>
<source>Ramp wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Square wave</source>
<translation>Fyrkantvåg</translation>
</message>
<message>
<source>Moog saw wave</source>
<translation>Moog sågtandsvåg</translation>
</message>
<message>
<source>Abs. sine wave</source>
<translation>Abs. sinusvåg</translation>
</message>
<message>
<source>Random</source>
<translation>Slumpmässig</translation>
</message>
<message>
<source>Random smooth</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MonstroView</name>
<message>
<source>Operators view</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Operators view contains all the operators. These include both audible operators (oscillators) and inaudible operators, or modulators: Low-frequency oscillators and Envelopes.
Knobs and other widgets in the Operators view have their own what's this -texts, so you can get more specific help for them that way. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Matrix view</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Matrix view contains the modulation matrix. Here you can define the modulation relationships between the various operators: Each audible operator (oscillators 1-3) has 3-4 properties that can be modulated by any of the modulators. Using more modulations consumes more CPU power.
The view is divided to modulation targets, grouped by the target oscillator. Available targets are volume, pitch, phase, pulse width and sub-osc ratio. Note: some targets are specific to one oscillator only.
Each modulation target has 4 knobs, one for each modulator. By default the knobs are at 0, which means no modulation. Turning a knob to 1 causes that modulator to affect the modulation target as much as possible. Turning it to -1 does the same, but the modulation is inversed. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix Osc2 with Osc3</source>
<translation>Mixa Osc2 med Osc3</translation>
</message>
<message>
<source>Modulate amplitude of Osc3 with Osc2</source>
<translation>Modulera amplituden för Osc3 med Osc2</translation>
</message>
<message>
<source>Modulate frequency of Osc3 with Osc2</source>
<translation>Modulera frekvensen för Osc3 med Osc2</translation>
</message>
<message>
<source>Modulate phase of Osc3 with Osc2</source>
<translation>Modulera fasen för Osc3 med Osc2</translation>
</message>
<message>
<source>The CRS knob changes the tuning of oscillator 1 in semitone steps. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The CRS knob changes the tuning of oscillator 2 in semitone steps. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The CRS knob changes the tuning of oscillator 3 in semitone steps. </source>
<translation type="unfinished"/>
</message>
<message>
<source>FTL and FTR change the finetuning of the oscillator for left and right channels respectively. These can add stereo-detuning to the oscillator which widens the stereo image and causes an illusion of space. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The SPO knob modifies the difference in phase between left and right channels. Higher difference creates a wider stereo image. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The PW knob controls the pulse width, also known as duty cycle, of oscillator 1. Oscillator 1 is a digital pulse wave oscillator, it doesn't produce bandlimited output, which means that you can use it as an audible oscillator but it will cause aliasing. You can also use it as an inaudible source of a sync signal, which can be used to synchronize oscillators 2 and 3. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Send Sync on Rise: When enabled, the Sync signal is sent every time the state of oscillator 1 changes from low to high, ie. when the amplitude changes from -1 to 1. Oscillator 1's pitch, phase and pulse width may affect the timing of syncs, but its volume has no effect on them. Sync signals are sent independently for both left and right channels. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Send Sync on Fall: When enabled, the Sync signal is sent every time the state of oscillator 1 changes from high to low, ie. when the amplitude changes from 1 to -1. Oscillator 1's pitch, phase and pulse width may affect the timing of syncs, but its volume has no effect on them. Sync signals are sent independently for both left and right channels. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Hard sync: Every time the oscillator receives a sync signal from oscillator 1, its phase is reset to 0 + whatever its phase offset is. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverse sync: Every time the oscillator receives a sync signal from oscillator 1, the amplitude of the oscillator gets inverted. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Choose waveform for oscillator 2. </source>
<translation>Välj vågform för oscillator 2.</translation>
</message>
<message>
<source>Choose waveform for oscillator 3's first sub-osc. Oscillator 3 can smoothly interpolate between two different waveforms. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Choose waveform for oscillator 3's second sub-osc. Oscillator 3 can smoothly interpolate between two different waveforms. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The SUB knob changes the mixing ratio of the two sub-oscs of oscillator 3. Each sub-osc can be set to produce a different waveform, and oscillator 3 can smoothly interpolate between them. All incoming modulations to oscillator 3 are applied to both sub-oscs/waveforms in the exact same way. </source>
<translation type="unfinished"/>
</message>
<message>
<source>In addition to dedicated modulators, Monstro allows oscillator 3 to be modulated by the output of oscillator 2.
Mix mode means no modulation: the outputs of the oscillators are simply mixed together. </source>
<translation type="unfinished"/>
</message>
<message>
<source>In addition to dedicated modulators, Monstro allows oscillator 3 to be modulated by the output of oscillator 2.
AM means amplitude modulation: Oscillator 3's amplitude (volume) is modulated by oscillator 2. </source>
<translation type="unfinished"/>
</message>
<message>
<source>In addition to dedicated modulators, Monstro allows oscillator 3 to be modulated by the output of oscillator 2.
FM means frequency modulation: Oscillator 3's frequency (pitch) is modulated by oscillator 2. The frequency modulation is implemented as phase modulation, which gives a more stable overall pitch than "pure" frequency modulation. </source>
<translation type="unfinished"/>
</message>
<message>
<source>In addition to dedicated modulators, Monstro allows oscillator 3 to be modulated by the output of oscillator 2.
PM means phase modulation: Oscillator 3's phase is modulated by oscillator 2. It differs from frequency modulation in that the phase changes are not cumulative. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Select the waveform for LFO 1.
"Random" and "Random smooth" are special waveforms: they produce random output, where the rate of the LFO controls how often the state of the LFO changes. The smooth version interpolates between these states with cosine interpolation. These random modes can be used to give "life" to your presets - add some of that analog unpredictability... </source>
<translation type="unfinished"/>
</message>
<message>
<source>Select the waveform for LFO 2.
"Random" and "Random smooth" are special waveforms: they produce random output, where the rate of the LFO controls how often the state of the LFO changes. The smooth version interpolates between these states with cosine interpolation. These random modes can be used to give "life" to your presets - add some of that analog unpredictability... </source>
<translation type="unfinished"/>
</message>
<message>
<source>Attack causes the LFO to come on gradually from the start of the note. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Rate sets the speed of the LFO, measured in milliseconds per cycle. Can be synced to tempo. </source>
<translation type="unfinished"/>
</message>
<message>
<source>PHS controls the phase offset of the LFO. </source>
<translation type="unfinished"/>
</message>
<message>
<source>PRE, or pre-delay, delays the start of the envelope from the start of the note. 0 means no delay. </source>
<translation type="unfinished"/>
</message>
<message>
<source>ATT, or attack, controls how fast the envelope ramps up at start, measured in milliseconds. A value of 0 means instant. </source>
<translation type="unfinished"/>
</message>
<message>
<source>HOLD controls how long the envelope stays at peak after the attack phase. </source>
<translation type="unfinished"/>
</message>
<message>
<source>DEC, or decay, controls how fast the envelope falls off from its peak, measured in milliseconds it would take to go from peak to zero. The actual decay may be shorter if sustain is used. </source>
<translation type="unfinished"/>
</message>
<message>
<source>SUS, or sustain, controls the sustain level of the envelope. The decay phase will not go below this level as long as the note is held. </source>
<translation type="unfinished"/>
</message>
<message>
<source>REL, or release, controls how long the release is for the note, measured in how long it would take to fall from peak to zero. Actual release may be shorter, depending on at what phase the note is released. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The slope knob controls the curve or shape of the envelope. A value of 0 creates straight rises and falls. Negative values create curves that start slowly, peak quickly and fall of slowly again. Positive values create curves that start and end quickly, and stay longer near the peaks. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source> semitones</source>
<translation>halvtoner</translation>
</message>
<message>
<source>Finetune left</source>
<translation type="unfinished"/>
</message>
<message>
<source> cents</source>
<translation type="unfinished"/>
</message>
<message>
<source>Finetune right</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stereo phase offset</source>
<translation type="unfinished"/>
</message>
<message>
<source> deg</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pulse width</source>
<translation type="unfinished"/>
</message>
<message>
<source>Send sync on pulse rise</source>
<translation type="unfinished"/>
</message>
<message>
<source>Send sync on pulse fall</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hard sync oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverse sync oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sub-osc mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hard sync oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverse sync oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Attack</source>
<translation>Attack</translation>
</message>
<message>
<source>Rate</source>
<translation>Värdera</translation>
</message>
<message>
<source>Phase</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pre-delay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hold</source>
<translation>Håll</translation>
</message>
<message>
<source>Decay</source>
<translation>Decay</translation>
</message>
<message>
<source>Sustain</source>
<translation>Sustain</translation>
</message>
<message>
<source>Release</source>
<translation>Släpp</translation>
</message>
<message>
<source>Slope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulation amount</source>
<translation>Moduleringsmängd</translation>
</message>
</context>
<context>
<name>MultitapEchoControlDialog</name>
<message>
<source>Length</source>
<translation>Längd</translation>
</message>
<message>
<source>Step length:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dry</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dry Gain:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stages</source>
<translation type="unfinished"/>
</message>
<message>
<source>Lowpass stages:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Swap inputs</source>
<translation type="unfinished"/>
</message>
<message>
<source>Swap left and right input channel for reflections</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>NesInstrument</name>
<message>
<source>Channel 1 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 Envelope length</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 Sweep amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 Sweep rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Envelope length</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Sweep amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 Sweep rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 3 Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 3 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 Envelope length</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 Noise frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 Noise frequency sweep</source>
<translation type="unfinished"/>
</message>
<message>
<source>Master volume</source>
<translation>Huvudvolym</translation>
</message>
<message>
<source>Vibrato</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>NesInstrumentView</name>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Coarse detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Envelope length</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable channel 1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 1 loop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable sweep 1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>12.5% Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>25% Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>50% Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>75% Duty cycle</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable channel 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 2 loop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable sweep 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable channel 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise Frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>Frequency sweep</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable channel 4</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 4</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable envelope 4 loop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Quantize noise frequency when using note frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use note frequency for noise</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Master Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vibrato</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OscillatorObject</name>
<message>
<source>Osc %1 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 panning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 coarse detuning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 fine detuning left</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 fine detuning right</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 phase-offset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 stereo phase-detuning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 wave shape</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulation type %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 harmonic</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PatchesDialog</name>
<message>
<source>Qsynth: Channel Preset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bank selector</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bank</source>
<translation>Bank</translation>
</message>
<message>
<source>Program selector</source>
<translation type="unfinished"/>
</message>
<message>
<source>Patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Name</source>
<translation>Namn</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
</context>
<context>
<name>PatmanView</name>
<message>
<source>Open other patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to open another patch-file. Loop and Tune settings are not reset.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Loop</source>
<translation type="unfinished"/>
</message>
<message>
<source>Loop mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Here you can toggle the Loop mode. If enabled, PatMan will use the loop information available in the file.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tune</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tune mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Here you can toggle the Tune mode. If enabled, PatMan will tune the sample to match the note's frequency.</source>
<translation type="unfinished"/>
</message>
<message>
<source>No file selected</source>
<translation>Ingen fil vald</translation>
</message>
<message>
<source>Open patch file</source>
<translation>Öppna patch-fil</translation>
</message>
<message>
<source>Patch-Files (*.pat)</source>
<translation>Patch-filer (*.pat)</translation>
</message>
</context>
<context>
<name>PatternView</name>
<message>
<source>Open in piano-roll</source>
<translation>Öppna i pianorulle</translation>
</message>
<message>
<source>Clear all notes</source>
<translation>Rensa alla noter</translation>
</message>
<message>
<source>Reset name</source>
<translation>Nollställ namn</translation>
</message>
<message>
<source>Change name</source>
<translation>Byt namn</translation>
</message>
<message>
<source>Add steps</source>
<translation>Lägg till steg</translation>
</message>
<message>
<source>Remove steps</source>
<translation>Ta bort steg</translation>
</message>
<message>
<source>use mouse wheel to set velocity of a step</source>
<translation type="unfinished"/>
</message>
<message>
<source>double-click to open in Piano Roll</source>
<translation>Dubbelklicka för att öppna i Pianorulle</translation>
</message>
<message>
<source>Clone Steps</source>
<translation>Klona steg</translation>
</message>
</context>
<context>
<name>PeakController</name>
<message>
<source>Peak Controller</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak Controller Bug</source>
<translation type="unfinished"/>
</message>
<message>
<source>Due to a bug in older version of LMMS, the peak controllers may not be connect properly. Please ensure that peak controllers are connected properly and re-save this file. Sorry for any inconvenience caused.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PeakControllerDialog</name>
<message>
<source>PEAK</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Controller</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PeakControllerEffectControlDialog</name>
<message>
<source>BASE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Base amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulation amount:</source>
<translation>Moduleringsmängd:</translation>
</message>
<message>
<source>Attack:</source>
<translation>Attack:</translation>
</message>
<message>
<source>Release:</source>
<translation>Release:</translation>
</message>
<message>
<source>AMNT</source>
<translation type="unfinished"/>
</message>
<message>
<source>MULT</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amount Multiplicator:</source>
<translation type="unfinished"/>
</message>
<message>
<source>ATCK</source>
<translation type="unfinished"/>
</message>
<message>
<source>DCAY</source>
<translation type="unfinished"/>
</message>
<message>
<source>Treshold:</source>
<translation type="unfinished"/>
</message>
<message>
<source>TRSH</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PeakControllerEffectControls</name>
<message>
<source>Base value</source>
<translation>Basvärde</translation>
</message>
<message>
<source>Modulation amount</source>
<translation>Moduleringsmängd</translation>
</message>
<message>
<source>Mute output</source>
<translation>Tysta utgångs-ljud</translation>
</message>
<message>
<source>Attack</source>
<translation>Attack</translation>
</message>
<message>
<source>Release</source>
<translation>Släpp</translation>
</message>
<message>
<source>Abs Value</source>
<translation>Abs-värde</translation>
</message>
<message>
<source>Amount Multiplicator</source>
<translation type="unfinished"/>
</message>
<message>
<source>Treshold</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PianoRoll</name>
<message>
<source>Please open a pattern by double-clicking on it!</source>
<translation>Dubbelklicka för att öppna ett mönster!</translation>
</message>
<message>
<source>Last note</source>
<translation>Senaste noten</translation>
</message>
<message>
<source>Note lock</source>
<translation type="unfinished"/>
</message>
<message>
<source>Note Velocity</source>
<translation type="unfinished"/>
</message>
<message>
<source>Note Panning</source>
<translation>Not-panorering</translation>
</message>
<message>
<source>Mark/unmark current semitone</source>
<translation>Markera/avmarkera nuvarande halvton</translation>
</message>
<message>
<source>Mark current scale</source>
<translation>Markera nuvarande skala</translation>
</message>
<message>
<source>Mark current chord</source>
<translation>Markera nuvarande ackord</translation>
</message>
<message>
<source>Unmark all</source>
<translation>Avmarkera allt</translation>
</message>
<message>
<source>No scale</source>
<translation>Ingen skala</translation>
</message>
<message>
<source>No chord</source>
<translation>Inget ackord</translation>
</message>
<message>
<source>Velocity: %1%</source>
<translation type="unfinished"/>
</message>
<message>
<source>Panning: %1% left</source>
<translation>Panorering: %1% vänster</translation>
</message>
<message>
<source>Panning: %1% right</source>
<translation>Panorering: %1% höger</translation>
</message>
<message>
<source>Panning: center</source>
<translation>Panorering: center</translation>
</message>
<message>
<source>Please enter a new value between %1 and %2:</source>
<translation>Ange ett nytt värde mellan %1 och %2:</translation>
</message>
<message>
<source>Mark/unmark all corresponding octave semitones</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select all notes on this key</source>
<translation>Välj alla noter på denna tangent</translation>
</message>
</context>
<context>
<name>PianoRollWindow</name>
<message>
<source>Play/pause current pattern (Space)</source>
<translation>Spela/pausa aktuellt mönster (mellanslag)</translation>
</message>
<message>
<source>Record notes from MIDI-device/channel-piano</source>
<translation>Spela in noter från MIDI-enhet/kanal-piano</translation>
</message>
<message>
<source>Record notes from MIDI-device/channel-piano while playing song or BB track</source>
<translation>Spela in noter från MIDI-enhet/kanal-piano medan sång eller BB-spår spelas</translation>
</message>
<message>
<source>Stop playing of current pattern (Space)</source>
<translation>Sluta spela aktuellt mönster (mellanslag)</translation>
</message>
<message>
<source>Click here to play the current pattern. This is useful while editing it. The pattern is automatically looped when its end is reached.</source>
<translation>Klicka här för att spela det aktuella mönstret, detta är användbart när man redigerar. Mönstret spelas från början igen när det nått sitt slut.</translation>
</message>
<message>
<source>Click here to record notes from a MIDI-device or the virtual test-piano of the according channel-window to the current pattern. When recording all notes you play will be written to this pattern and you can play and edit them afterwards.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to record notes from a MIDI-device or the virtual test-piano of the according channel-window to the current pattern. When recording all notes you play will be written to this pattern and you will hear the song or BB track in the background.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to stop playback of current pattern.</source>
<translation>Klicka här för att stoppa uppspelning av de aktuella mönstret.</translation>
</message>
<message>
<source>Draw mode (Shift+D)</source>
<translation>Ritläge (Shift+D)</translation>
</message>
<message>
<source>Erase mode (Shift+E)</source>
<translation>Suddläge (Shift+E)</translation>
</message>
<message>
<source>Select mode (Shift+S)</source>
<translation>Markeringsläge (Shift+S)</translation>
</message>
<message>
<source>Detune mode (Shift+T)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and draw mode will be activated. In this mode you can add, resize and move notes. This is the default mode which is used most of the time. You can also press 'Shift+D' on your keyboard to activate this mode. In this mode, hold %1 to temporarily go into select mode.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and erase mode will be activated. In this mode you can erase notes. You can also press 'Shift+E' on your keyboard to activate this mode.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and select mode will be activated. In this mode you can select notes. Alternatively, you can hold %1 in draw mode to temporarily use select mode.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and detune mode will be activated. In this mode you can click a note to open its automation detuning. You can utilize this to slide notes from one to another. You can also press 'Shift+T' on your keyboard to activate this mode.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cut selected notes (%1+X)</source>
<translation>Klipp ut valda noter (%1+X)</translation>
</message>
<message>
<source>Copy selected notes (%1+C)</source>
<translation>Kopiera valda noter (%1+C)</translation>
</message>
<message>
<source>Paste notes from clipboard (%1+V)</source>
<translation>Klistra in noter (%1+V)</translation>
</message>
<message>
<source>Click here and the selected notes will be cut into the clipboard. You can paste them anywhere in any pattern by clicking on the paste button.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and the selected notes will be copied into the clipboard. You can paste them anywhere in any pattern by clicking on the paste button.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here and the notes from the clipboard will be pasted at the first visible measure.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This controls the magnification of an axis. It can be helpful to choose magnification for a specific task. For ordinary editing, the magnification should be fitted to your smallest notes. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The 'Q' stands for quantization, and controls the grid size notes and control points snap to. With smaller quantization values, you can draw shorter notes in Piano Roll, and more exact control points in the Automation Editor.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This lets you select the length of new notes. 'Last Note' means that LMMS will use the note length of the note you last edited</source>
<translation type="unfinished"/>
</message>
<message>
<source>The feature is directly connected to the context-menu on the virtual keyboard, to the left in Piano Roll. After you have chosen the scale you want in this drop-down menu, you can right click on a desired key in the virtual keyboard, and then choose 'Mark current Scale'. LMMS will highlight all notes that belongs to the chosen scale, and in the key you have selected!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Let you select a chord which LMMS then can draw or highlight.You can find the most common chords in this drop-down menu. After you have selected a chord, click anywhere to place the chord, and right click on the virtual keyboard to open context menu and highlight the chord. To return to single note placement, you need to choose 'No chord' in this drop-down menu.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Edit actions</source>
<translation>Redigera åtgärder</translation>
</message>
<message>
<source>Copy paste controls</source>
<translation type="unfinished"/>
</message>
<message>
<source>Timeline controls</source>
<translation>Tidslinjekontroller</translation>
</message>
<message>
<source>Zoom and note controls</source>
<translation type="unfinished"/>
</message>
<message>
<source>Piano-Roll - %1</source>
<translation>Pianorulle - %1</translation>
</message>
<message>
<source>Piano-Roll - no pattern</source>
<translation>Pianorulle - inget mönster</translation>
</message>
<message>
<source>Quantize</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PianoView</name>
<message>
<source>Base note</source>
<translation>Bas-not</translation>
</message>
</context>
<context>
<name>Plugin</name>
<message>
<source>Plugin not found</source>
<translation>Plugin hittades inte</translation>
</message>
<message>
<source>The plugin "%1" wasn't found or could not be loaded!
Reason: "%2"</source>
<translation>Pluginet "%1" hittades inte eller kunde inte laddas!
Orsak: "%2"</translation>
</message>
<message>
<source>Error while loading plugin</source>
<translation>Fel vid inläsning av plugin</translation>
</message>
<message>
<source>Failed to load plugin "%1"!</source>
<translation>Misslyckades att läsa in plugin "%1"!</translation>
</message>
</context>
<context>
<name>PluginBrowser</name>
<message>
<source>Instrument browser</source>
<translation type="unfinished"/>
</message>
<message>
<source>Drag an instrument into either the Song-Editor, the Beat+Bassline Editor or into an existing instrument track.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Instrument Plugins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PluginFactory</name>
<message>
<source>Plugin not found.</source>
<translation>Plugin hittades inte.</translation>
</message>
<message>
<source>LMMS plugin %1 does not have a plugin descriptor named %2!</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ProjectNotes</name>
<message>
<source>Project notes</source>
<translation>Projektanteckningar</translation>
</message>
<message>
<source>Put down your project notes here.</source>
<translation>Skriv ner dina anteckningar för projektet här.</translation>
</message>
<message>
<source>Edit Actions</source>
<translation>Redigera Händelser</translation>
</message>
<message>
<source>&Undo</source>
<translation>&Ångra</translation>
</message>
<message>
<source>%1+Z</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Redo</source>
<translation>&Gör om</translation>
</message>
<message>
<source>%1+Y</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Copy</source>
<translation>&Kopiera</translation>
</message>
<message>
<source>%1+C</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cu&t</source>
<translation>Klipp u&t</translation>
</message>
<message>
<source>%1+X</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Paste</source>
<translation>&Klistra in</translation>
</message>
<message>
<source>%1+V</source>
<translation>%1+V</translation>
</message>
<message>
<source>Format Actions</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Bold</source>
<translation>&Fet</translation>
</message>
<message>
<source>%1+B</source>
<translation>%1+B</translation>
</message>
<message>
<source>&Italic</source>
<translation>&Kursiv</translation>
</message>
<message>
<source>%1+I</source>
<translation>%1+I</translation>
</message>
<message>
<source>&Underline</source>
<translation>&Understruken</translation>
</message>
<message>
<source>%1+U</source>
<translation>%1+U</translation>
</message>
<message>
<source>&Left</source>
<translation>&Vänster</translation>
</message>
<message>
<source>%1+L</source>
<translation>%1+L</translation>
</message>
<message>
<source>C&enter</source>
<translation>C&entrera</translation>
</message>
<message>
<source>%1+E</source>
<translation>%1+E</translation>
</message>
<message>
<source>&Right</source>
<translation>&Höger</translation>
</message>
<message>
<source>%1+R</source>
<translation>%1+R</translation>
</message>
<message>
<source>&Justify</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1+J</source>
<translation>%1+J</translation>
</message>
<message>
<source>&Color...</source>
<translation>&Färg...</translation>
</message>
</context>
<context>
<name>ProjectRenderer</name>
<message>
<source>WAV-File (*.wav)</source>
<translation>WAV-Fil (*.wav)</translation>
</message>
<message>
<source>Compressed OGG-File (*.ogg)</source>
<translation>Komprimerad OGG-Fil (*.ogg)</translation>
</message>
</context>
<context>
<name>QWidget</name>
<message>
<source>Name: </source>
<translation>Namn:</translation>
</message>
<message>
<source>Maker: </source>
<translation>Skapare:</translation>
</message>
<message>
<source>Copyright: </source>
<translation>Copyright: </translation>
</message>
<message>
<source>Requires Real Time: </source>
<translation type="unfinished"/>
</message>
<message>
<source>Yes</source>
<translation>Ja</translation>
</message>
<message>
<source>No</source>
<translation>Nej</translation>
</message>
<message>
<source>Real Time Capable: </source>
<translation type="unfinished"/>
</message>
<message>
<source>In Place Broken: </source>
<translation type="unfinished"/>
</message>
<message>
<source>Channels In: </source>
<translation>Kanaler In:</translation>
</message>
<message>
<source>Channels Out: </source>
<translation>Kanaler Ut:</translation>
</message>
<message>
<source>File: </source>
<translation>Fil:</translation>
</message>
<message>
<source>File: %1</source>
<translation>Fil: %1</translation>
</message>
</context>
<context>
<name>RenameDialog</name>
<message>
<source>Rename...</source>
<translation>Byt namn...</translation>
</message>
</context>
<context>
<name>SampleBuffer</name>
<message>
<source>Open audio file</source>
<translation>Öppna ljudfil</translation>
</message>
<message>
<source>Wave-Files (*.wav)</source>
<translation>Wave-Filer (*.wav)</translation>
</message>
<message>
<source>OGG-Files (*.ogg)</source>
<translation>OGG-Filer (*.ogg)</translation>
</message>
<message>
<source>DrumSynth-Files (*.ds)</source>
<translation type="unfinished"/>
</message>
<message>
<source>FLAC-Files (*.flac)</source>
<translation>FLAC-Filer (*.flac)</translation>
</message>
<message>
<source>SPEEX-Files (*.spx)</source>
<translation type="unfinished"/>
</message>
<message>
<source>VOC-Files (*.voc)</source>
<translation>VOC-Filer
(*.voc)</translation>
</message>
<message>
<source>AIFF-Files (*.aif *.aiff)</source>
<translation>AIFF-Filer (*.aif *.aiff)</translation>
</message>
<message>
<source>AU-Files (*.au)</source>
<translation>AU-Filer (*.au)</translation>
</message>
<message>
<source>RAW-Files (*.raw)</source>
<translation>RAW-Filer (*.raw)</translation>
</message>
<message>
<source>All Audio-Files (*.wav *.ogg *.ds *.flac *.spx *.voc *.aif *.aiff *.au *.raw)</source>
<translation>Alla Ljudfiler (*.wav *.ogg *.ds *.flac *.spx *.voc *.aif *.aiff *.au *.raw)</translation>
</message>
</context>
<context>
<name>SampleTCOView</name>
<message>
<source>double-click to select sample</source>
<translation>dubbelklicka för att välja ljudfil</translation>
</message>
<message>
<source>Delete (middle mousebutton)</source>
<translation>Ta bort (musens mitt-knapp)</translation>
</message>
<message>
<source>Cut</source>
<translation>Klipp ut</translation>
</message>
<message>
<source>Copy</source>
<translation>Kopiera</translation>
</message>
<message>
<source>Paste</source>
<translation>Klistra in</translation>
</message>
<message>
<source>Mute/unmute (<%1> + middle click)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SampleTrack</name>
<message>
<source>Sample track</source>
<translation>Ljudfils-spår</translation>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
</context>
<context>
<name>SampleTrackView</name>
<message>
<source>Track volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>VOL</source>
<translation>VOL</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Panning:</source>
<translation>Panorering:</translation>
</message>
<message>
<source>PAN</source>
<translation>PANORERA</translation>
</message>
</context>
<context>
<name>SetupDialog</name>
<message>
<source>Setup LMMS</source>
<translation>Ställ in LMMS</translation>
</message>
<message>
<source>General settings</source>
<translation type="unfinished"/>
</message>
<message>
<source>BUFFER SIZE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reset to default-value</source>
<translation>Återställ till standardvärde</translation>
</message>
<message>
<source>MISC</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable tooltips</source>
<translation>Aktivera verktygs-tips</translation>
</message>
<message>
<source>Show restart warning after changing settings</source>
<translation type="unfinished"/>
</message>
<message>
<source>Display volume as dBFS </source>
<translation>Visa volym som dBFS</translation>
</message>
<message>
<source>Compress project files per default</source>
<translation>Komprimera projektfiler som standard</translation>
</message>
<message>
<source>One instrument track window mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>HQ-mode for output audio-device</source>
<translation type="unfinished"/>
</message>
<message>
<source>Compact track buttons</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sync VST plugins to host playback</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable note labels in piano roll</source>
<translation>Visa noter i pianorulle</translation>
</message>
<message>
<source>Enable waveform display by default</source>
<translation type="unfinished"/>
</message>
<message>
<source>Keep effects running even without input</source>
<translation type="unfinished"/>
</message>
<message>
<source>Create backup file when saving a project</source>
<translation>Skapa en backup-fil när ett projekt sparas</translation>
</message>
<message>
<source>LANGUAGE</source>
<translation>SPRÅK</translation>
</message>
<message>
<source>Paths</source>
<translation>Sökvägar</translation>
</message>
<message>
<source>LMMS working directory</source>
<translation>LMMS arbetskatalog</translation>
</message>
<message>
<source>VST-plugin directory</source>
<translation>Katalog för VST-plugin</translation>
</message>
<message>
<source>Background artwork</source>
<translation type="unfinished"/>
</message>
<message>
<source>STK rawwave directory</source>
<translation>Katalog för STK rå-vågform</translation>
</message>
<message>
<source>Default Soundfont File</source>
<translation type="unfinished"/>
</message>
<message>
<source>Performance settings</source>
<translation type="unfinished"/>
</message>
<message>
<source>UI effects vs. performance</source>
<translation type="unfinished"/>
</message>
<message>
<source>Smooth scroll in Song Editor</source>
<translation>Mjuk rullning i Sång-editorn</translation>
</message>
<message>
<source>Show playback cursor in AudioFileProcessor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Audio settings</source>
<translation type="unfinished"/>
</message>
<message>
<source>AUDIO INTERFACE</source>
<translation type="unfinished"/>
</message>
<message>
<source>MIDI settings</source>
<translation>MIDI-inställningar</translation>
</message>
<message>
<source>MIDI INTERFACE</source>
<translation type="unfinished"/>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
<message>
<source>Restart LMMS</source>
<translation>Starta om LMMS</translation>
</message>
<message>
<source>Please note that most changes won't take effect until you restart LMMS!</source>
<translation>Många av ändringarna kommer inte gälla förrän LMMS startats om!</translation>
</message>
<message>
<source>Frames: %1
Latency: %2 ms</source>
<translation type="unfinished"/>
</message>
<message>
<source>Here you can setup the internal buffer-size used by LMMS. Smaller values result in a lower latency but also may cause unusable sound or bad performance, especially on older computers or systems with a non-realtime kernel.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Choose LMMS working directory</source>
<translation>Välj arbetskatalog för LMMS</translation>
</message>
<message>
<source>Choose your VST-plugin directory</source>
<translation>Välj katalog för dina VST-plugin</translation>
</message>
<message>
<source>Choose artwork-theme directory</source>
<translation>Välj katalog för gränssnitts-tema</translation>
</message>
<message>
<source>Choose LADSPA plugin directory</source>
<translation>Välj katalog för LADSPA-plugin</translation>
</message>
<message>
<source>Choose STK rawwave directory</source>
<translation>Välj katalog för STK-råfiler</translation>
</message>
<message>
<source>Choose default SoundFont</source>
<translation>Välj standard-SoundFont</translation>
</message>
<message>
<source>Choose background artwork</source>
<translation>Välj bakgrunds-grafik</translation>
</message>
<message>
<source>Here you can select your preferred audio-interface. Depending on the configuration of your system during compilation time you can choose between ALSA, JACK, OSS and more. Below you see a box which offers controls to setup the selected audio-interface.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Here you can select your preferred MIDI-interface. Depending on the configuration of your system during compilation time you can choose between ALSA, OSS and more. Below you see a box which offers controls to setup the selected MIDI-interface.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reopen last project on start</source>
<translation type="unfinished"/>
</message>
<message>
<source>Directories</source>
<translation>Kataloger</translation>
</message>
<message>
<source>Themes directory</source>
<translation>Katalog för teman</translation>
</message>
<message>
<source>GIG directory</source>
<translation>Katalog för GIG-filer</translation>
</message>
<message>
<source>SF2 directory</source>
<translation>Katalog för SF2-filer</translation>
</message>
<message>
<source>LADSPA plugin directories</source>
<translation>Katalog för LADSPA-plugins</translation>
</message>
<message>
<source>Auto save</source>
<translation>Autospara</translation>
</message>
<message>
<source>Choose your GIG directory</source>
<translation>Välj din GIG-katalog</translation>
</message>
<message>
<source>Choose your SF2 directory</source>
<translation>Välj din SF2-katalog</translation>
</message>
<message>
<source>minutes</source>
<translation>minuter</translation>
</message>
<message>
<source>minute</source>
<translation>minut</translation>
</message>
<message>
<source>Enable auto-save</source>
<translation type="unfinished"/>
</message>
<message>
<source>Allow auto-save while playing</source>
<translation type="unfinished"/>
</message>
<message>
<source>Disabled</source>
<translation type="unfinished"/>
</message>
<message>
<source>Auto-save interval: %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set the time between automatic backup to %1.
Remember to also save your project manually. You can choose to disable saving while playing, something some older systems find difficult.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>Song</name>
<message>
<source>Tempo</source>
<translation>Tempo</translation>
</message>
<message>
<source>Master volume</source>
<translation>Huvudvolym</translation>
</message>
<message>
<source>Master pitch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Project saved</source>
<translation>Projekt sparat</translation>
</message>
<message>
<source>The project %1 is now saved.</source>
<translation>Projektet %1 är nu sparat.</translation>
</message>
<message>
<source>Project NOT saved.</source>
<translation>Projektet är INTE sparat.</translation>
</message>
<message>
<source>The project %1 was not saved!</source>
<translation>Projektet %1 sparades inte!</translation>
</message>
<message>
<source>Import file</source>
<translation>Importera fil</translation>
</message>
<message>
<source>MIDI sequences</source>
<translation>MIDI-sekvenser</translation>
</message>
<message>
<source>Hydrogen projects</source>
<translation type="unfinished"/>
</message>
<message>
<source>All file types</source>
<translation>Alla filtyper</translation>
</message>
<message>
<source>Empty project</source>
<translation>Tomt projekt</translation>
</message>
<message>
<source>This project is empty so exporting makes no sense. Please put some items into Song Editor first!</source>
<translation>Projektet är tomt, export är meningslöst. Skapa något i Sång-editorn innan du exporterar!</translation>
</message>
<message>
<source>Select directory for writing exported tracks...</source>
<translation type="unfinished"/>
</message>
<message>
<source>untitled</source>
<translation>namnlös</translation>
</message>
<message>
<source>Select file for project-export...</source>
<translation type="unfinished"/>
</message>
<message>
<source>The following errors occured while loading: </source>
<translation type="unfinished"/>
</message>
<message>
<source>MIDI File (*.mid)</source>
<translation>MIDI-fil (*.mid)</translation>
</message>
<message>
<source>LMMS Error report</source>
<translation>LMMS Felrapport</translation>
</message>
<message>
<source>Save project</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SongEditor</name>
<message>
<source>Could not open file</source>
<translation>kunde inte öppna fil</translation>
</message>
<message>
<source>Could not write file</source>
<translation>Kunde inte skriva fil</translation>
</message>
<message>
<source>Could not open file %1. You probably have no permissions to read this file.
Please make sure to have at least read permissions to the file and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error in file</source>
<translation>Fil-fel</translation>
</message>
<message>
<source>The file %1 seems to contain errors and therefore can't be loaded.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tempo</source>
<translation>Tempo</translation>
</message>
<message>
<source>TEMPO/BPM</source>
<translation>TEMPO/BPM</translation>
</message>
<message>
<source>tempo of song</source>
<translation>Sångtempo</translation>
</message>
<message>
<source>The tempo of a song is specified in beats per minute (BPM). If you want to change the tempo of your song, change this value. Every measure has four beats, so the tempo in BPM specifies, how many measures / 4 should be played within a minute (or how many measures should be played within four minutes).</source>
<translation type="unfinished"/>
</message>
<message>
<source>High quality mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Master volume</source>
<translation>Huvudvolym</translation>
</message>
<message>
<source>master volume</source>
<translation>huvudvolym</translation>
</message>
<message>
<source>Master pitch</source>
<translation type="unfinished"/>
</message>
<message>
<source>master pitch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Value: %1%</source>
<translation>Värde: %1%</translation>
</message>
<message>
<source>Value: %1 semitones</source>
<translation>Värde: %1 halvtoner</translation>
</message>
<message>
<source>Could not open %1 for writing. You probably are not permitted to write to this file. Please make sure you have write-access to the file and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<source>template</source>
<translation>mall</translation>
</message>
<message>
<source>project</source>
<translation>projekt</translation>
</message>
<message>
<source>Version difference</source>
<translation>Versions-skillnad</translation>
</message>
<message>
<source>This %1 was created with LMMS %2.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SongEditorWindow</name>
<message>
<source>Song-Editor</source>
<translation>Sång-Editor</translation>
</message>
<message>
<source>Play song (Space)</source>
<translation>Spela sång (Mellanslag)</translation>
</message>
<message>
<source>Record samples from Audio-device</source>
<translation type="unfinished"/>
</message>
<message>
<source>Record samples from Audio-device while playing song or BB track</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stop song (Space)</source>
<translation>Sluta spela sång (Mellanslag)</translation>
</message>
<message>
<source>Add beat/bassline</source>
<translation>Lägg till trummor/bas</translation>
</message>
<message>
<source>Add sample-track</source>
<translation>Lägg till ljudfils-spår</translation>
</message>
<message>
<source>Add automation-track</source>
<translation>Lägg till automationsspår</translation>
</message>
<message>
<source>Draw mode</source>
<translation>Ritläge</translation>
</message>
<message>
<source>Edit mode (select and move)</source>
<translation>Redigeringsläge (välj och flytta)</translation>
</message>
<message>
<source>Click here, if you want to play your whole song. Playing will be started at the song-position-marker (green). You can also move it while playing.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here, if you want to stop playing of your song. The song-position-marker will be set to the start of your song.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Track actions</source>
<translation type="unfinished"/>
</message>
<message>
<source>Edit actions</source>
<translation>Redigera åtgärder</translation>
</message>
<message>
<source>Timeline controls</source>
<translation>Tidslinjekontroller</translation>
</message>
<message>
<source>Zoom controls</source>
<translation>Zoomningskontroller</translation>
</message>
</context>
<context>
<name>SpectrumAnalyzerControlDialog</name>
<message>
<source>Linear spectrum</source>
<translation type="unfinished"/>
</message>
<message>
<source>Linear Y axis</source>
<translation>Linjär Y-axel</translation>
</message>
</context>
<context>
<name>SpectrumAnalyzerControls</name>
<message>
<source>Linear spectrum</source>
<translation type="unfinished"/>
</message>
<message>
<source>Linear Y axis</source>
<translation>Linjär Y-axel</translation>
</message>
<message>
<source>Channel mode</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SubWindow</name>
<message>
<source>Close</source>
<translation>Stäng</translation>
</message>
<message>
<source>Maximize</source>
<translation>Maximera</translation>
</message>
<message>
<source>Restore</source>
<translation>Återställ</translation>
</message>
</context>
<context>
<name>TabWidget</name>
<message>
<source>Settings for %1</source>
<translation>Inställningar för %1</translation>
</message>
</context>
<context>
<name>TempoSyncKnob</name>
<message>
<source>Tempo Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>No Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Eight beats</source>
<translation>Åtta takter</translation>
</message>
<message>
<source>Whole note</source>
<translation>Hel-not</translation>
</message>
<message>
<source>Half note</source>
<translation>Halvnot</translation>
</message>
<message>
<source>Quarter note</source>
<translation type="unfinished"/>
</message>
<message>
<source>8th note</source>
<translation type="unfinished"/>
</message>
<message>
<source>16th note</source>
<translation type="unfinished"/>
</message>
<message>
<source>32nd note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Custom...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Custom </source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to Eight Beats</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to Whole Note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to Half Note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to Quarter Note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to 8th Note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to 16th Note</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synced to 32nd Note</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TimeDisplayWidget</name>
<message>
<source>click to change time units</source>
<translation>Klicka för att ändra tidsenheter</translation>
</message>
<message>
<source>MIN</source>
<translation>MIN</translation>
</message>
<message>
<source>SEC</source>
<translation>SEK</translation>
</message>
<message>
<source>MSEC</source>
<translation>MSEK</translation>
</message>
<message>
<source>BAR</source>
<translation type="unfinished"/>
</message>
<message>
<source>BEAT</source>
<translation>TAKT</translation>
</message>
<message>
<source>TICK</source>
<translation>TICK</translation>
</message>
</context>
<context>
<name>TimeLineWidget</name>
<message>
<source>Enable/disable auto-scrolling</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable/disable loop-points</source>
<translation type="unfinished"/>
</message>
<message>
<source>After stopping go back to begin</source>
<translation type="unfinished"/>
</message>
<message>
<source>After stopping go back to position at which playing was started</source>
<translation type="unfinished"/>
</message>
<message>
<source>After stopping keep position</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hint</source>
<translation>Ledtråd</translation>
</message>
<message>
<source>Press <%1> to disable magnetic loop points.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hold <Shift> to move the begin loop point; Press <%1> to disable magnetic loop points.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>Track</name>
<message>
<source>Mute</source>
<translation>Tysta</translation>
</message>
<message>
<source>Solo</source>
<translation>Solo</translation>
</message>
</context>
<context>
<name>TrackContainer</name>
<message>
<source>Couldn't import file</source>
<translation>Kunde inte importera filen</translation>
</message>
<message>
<source>Couldn't find a filter for importing file %1.
You should convert this file into a format supported by LMMS using another software.</source>
<translation>Kunde inte hitta ett filter för att importera filen %1.
Du bör konvertera filen till ett format som stöd av LMMS genom att använda ett annat program.</translation>
</message>
<message>
<source>Couldn't open file</source>
<translation>Kunde inte öppna filen</translation>
</message>
<message>
<source>Couldn't open file %1 for reading.
Please make sure you have read-permission to the file and the directory containing the file and try again!</source>
<translation>Kunde inte öppna filen %1 för läsning.
Se till att du har läsrättigheter för filen och katalogen som innehåller filen och försök igen!</translation>
</message>
<message>
<source>Loading project...</source>
<translation>Läser in projekt...</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
<message>
<source>Please wait...</source>
<translation>Vänligen vänta...</translation>
</message>
<message>
<source>Importing MIDI-file...</source>
<translation>Importerar MIDI-fil...</translation>
</message>
</context>
<context>
<name>TrackContentObject</name>
<message>
<source>Mute</source>
<translation>Tysta</translation>
</message>
</context>
<context>
<name>TrackContentObjectView</name>
<message>
<source>Current position</source>
<translation>Aktuell position</translation>
</message>
<message>
<source>Hint</source>
<translation>Ledtråd</translation>
</message>
<message>
<source>Press <%1> and drag to make a copy.</source>
<translation>Håll nere <%1> och dra för att kopiera.</translation>
</message>
<message>
<source>Current length</source>
<translation>Aktuell längd</translation>
</message>
<message>
<source>Press <%1> for free resizing.</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1:%2 (%3:%4 to %5:%6)</source>
<translation>%1:%2 (%3:%4 till %5:%6)</translation>
</message>
<message>
<source>Delete (middle mousebutton)</source>
<translation>Ta bort (musens mitt-knapp)</translation>
</message>
<message>
<source>Cut</source>
<translation>Klipp ut</translation>
</message>
<message>
<source>Copy</source>
<translation>Kopiera</translation>
</message>
<message>
<source>Paste</source>
<translation>Klistra in</translation>
</message>
<message>
<source>Mute/unmute (<%1> + middle click)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TrackOperationsWidget</name>
<message>
<source>Press <%1> while clicking on move-grip to begin a new drag'n'drop-action.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Actions for this track</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mute</source>
<translation>Tysta</translation>
</message>
<message>
<source>Solo</source>
<translation>Solo</translation>
</message>
<message>
<source>Mute this track</source>
<translation>Tysta detta spår</translation>
</message>
<message>
<source>Clone this track</source>
<translation>Klona detta spår</translation>
</message>
<message>
<source>Remove this track</source>
<translation>Ta bort detta spår</translation>
</message>
<message>
<source>Clear this track</source>
<translation>Rensa detta spår</translation>
</message>
<message>
<source>FX %1: %2</source>
<translation>FX %1: %2</translation>
</message>
<message>
<source>Turn all recording on</source>
<translation type="unfinished"/>
</message>
<message>
<source>Turn all recording off</source>
<translation type="unfinished"/>
</message>
<message>
<source>Assign to new FX Channel</source>
<translation>Koppla till ny FX-kanal</translation>
</message>
</context>
<context>
<name>TripleOscillatorView</name>
<message>
<source>Use phase modulation for modulating oscillator 1 with oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use amplitude modulation for modulating oscillator 1 with oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix output of oscillator 1 & 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synchronize oscillator 1 with oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use frequency modulation for modulating oscillator 1 with oscillator 2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use phase modulation for modulating oscillator 2 with oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use amplitude modulation for modulating oscillator 2 with oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix output of oscillator 2 & 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Synchronize oscillator 2 with oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use frequency modulation for modulating oscillator 2 with oscillator 3</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>With this knob you can set the volume of oscillator %1. When setting a value of 0 the oscillator is turned off. Otherwise you can hear the oscillator as loud as you set it here.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 panning:</source>
<translation type="unfinished"/>
</message>
<message>
<source>With this knob you can set the panning of the oscillator %1. A value of -100 means 100% left and a value of 100 moves oscillator-output right.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 coarse detuning:</source>
<translation type="unfinished"/>
</message>
<message>
<source>semitones</source>
<translation>halvtoner</translation>
</message>
<message>
<source>With this knob you can set the coarse detuning of oscillator %1. You can detune the oscillator 24 semitones (2 octaves) up and down. This is useful for creating sounds with a chord.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 fine detuning left:</source>
<translation type="unfinished"/>
</message>
<message>
<source>cents</source>
<translation type="unfinished"/>
</message>
<message>
<source>With this knob you can set the fine detuning of oscillator %1 for the left channel. The fine-detuning is ranged between -100 cents and +100 cents. This is useful for creating "fat" sounds.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 fine detuning right:</source>
<translation type="unfinished"/>
</message>
<message>
<source>With this knob you can set the fine detuning of oscillator %1 for the right channel. The fine-detuning is ranged between -100 cents and +100 cents. This is useful for creating "fat" sounds.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 phase-offset:</source>
<translation type="unfinished"/>
</message>
<message>
<source>degrees</source>
<translation>grader</translation>
</message>
<message>
<source>With this knob you can set the phase-offset of oscillator %1. That means you can move the point within an oscillation where the oscillator begins to oscillate. For example if you have a sine-wave and have a phase-offset of 180 degrees the wave will first go down. It's the same with a square-wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 stereo phase-detuning:</source>
<translation type="unfinished"/>
</message>
<message>
<source>With this knob you can set the stereo phase-detuning of oscillator %1. The stereo phase-detuning specifies the size of the difference between the phase-offset of left and right channel. This is very good for creating wide stereo sounds.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a sine-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a triangle-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a saw-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a square-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a moog-like saw-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use an exponential wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use white-noise for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a user-defined waveform for current oscillator.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>VersionedSaveDialog</name>
<message>
<source>Increment version number</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decrement version number</source>
<translation type="unfinished"/>
</message>
<message>
<source> already exists. Do you want to replace it?</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>VestigeInstrumentView</name>
<message>
<source>Open other VST-plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here, if you want to open another VST-plugin. After clicking on this button, a file-open-dialog appears and you can select your file.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show/hide GUI</source>
<translation>Visa/dölj användargränssnitt</translation>
</message>
<message>
<source>Click here to show or hide the graphical user interface (GUI) of your VST-plugin.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Turn off all notes</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open VST-plugin</source>
<translation>Öppna VST-plugin</translation>
</message>
<message>
<source>DLL-files (*.dll)</source>
<translation>DLL-filer (*.dll)</translation>
</message>
<message>
<source>EXE-files (*.exe)</source>
<translation>EXE-filer (*.exe)</translation>
</message>
<message>
<source>No VST-plugin loaded</source>
<translation type="unfinished"/>
</message>
<message>
<source>Control VST-plugin from LMMS host</source>
<translation>Kontrollera VST-plugin från LMMS-värd</translation>
</message>
<message>
<source>Click here, if you want to control VST-plugin from host.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open VST-plugin preset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here, if you want to open another *.fxp, *.fxb VST-plugin preset.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Previous (-)</source>
<translation>Tidigare (-)</translation>
</message>
<message>
<source>Click here, if you want to switch to another VST-plugin preset program.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Save preset</source>
<translation>Spara förinställning</translation>
</message>
<message>
<source>Click here, if you want to save current VST-plugin preset program.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Next (+)</source>
<translation>Nästa (+)</translation>
</message>
<message>
<source>Click here to select presets that are currently loaded in VST.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Preset</source>
<translation>Förinställning</translation>
</message>
<message>
<source>by </source>
<translation>av</translation>
</message>
<message>
<source> - VST plugin control</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>VisualizationWidget</name>
<message>
<source>click to enable/disable visualization of master-output</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click to enable</source>
<translation>Klicka för att aktivera</translation>
</message>
</context>
<context>
<name>VstEffectControlDialog</name>
<message>
<source>Show/hide</source>
<translation>Visa/dölj</translation>
</message>
<message>
<source>Control VST-plugin from LMMS host</source>
<translation>Kontrollera VST-plugin från LMMS-värd</translation>
</message>
<message>
<source>Click here, if you want to control VST-plugin from host.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open VST-plugin preset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here, if you want to open another *.fxp, *.fxb VST-plugin preset.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Previous (-)</source>
<translation>Tidigare (-)</translation>
</message>
<message>
<source>Click here, if you want to switch to another VST-plugin preset program.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Next (+)</source>
<translation>Nästa (+)</translation>
</message>
<message>
<source>Click here to select presets that are currently loaded in VST.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Save preset</source>
<translation>Spara förinställning</translation>
</message>
<message>
<source>Click here, if you want to save current VST-plugin preset program.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Effect by: </source>
<translation>Effekt skapad av:</translation>
</message>
<message>
<source>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<br /></source>
<translation>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<br /></translation>
</message>
</context>
<context>
<name>VstPlugin</name>
<message>
<source>Loading plugin</source>
<translation>Laddar plugin</translation>
</message>
<message>
<source>Open Preset</source>
<translation>Öppna Förinställning</translation>
</message>
<message>
<source>Vst Plugin Preset (*.fxp *.fxb)</source>
<translation type="unfinished"/>
</message>
<message>
<source>: default</source>
<translation>: standard</translation>
</message>
<message>
<source>"</source>
<translation>"</translation>
</message>
<message>
<source>'</source>
<translation>'</translation>
</message>
<message>
<source>Save Preset</source>
<translation>Spara Förinställning</translation>
</message>
<message>
<source>.fxp</source>
<translation>.fxp</translation>
</message>
<message>
<source>.FXP</source>
<translation>.FXP</translation>
</message>
<message>
<source>.FXB</source>
<translation>.FXB</translation>
</message>
<message>
<source>.fxb</source>
<translation>.fxb</translation>
</message>
<message>
<source>Please wait while loading VST plugin...</source>
<translation>Vänta medan VST-plugin läses in...</translation>
</message>
<message>
<source>The VST plugin %1 could not be loaded.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WatsynInstrument</name>
<message>
<source>Volume A1</source>
<translation>Volym A1</translation>
</message>
<message>
<source>Volume A2</source>
<translation>Volym A2</translation>
</message>
<message>
<source>Volume B1</source>
<translation>Volym B2</translation>
</message>
<message>
<source>Volume B2</source>
<translation>Volym B2</translation>
</message>
<message>
<source>Panning A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Panning A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Panning B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Panning B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Freq. multiplier A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Freq. multiplier A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Freq. multiplier B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Freq. multiplier B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left detune A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left detune A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left detune B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left detune B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right detune A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right detune A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right detune B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right detune B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix envelope amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix envelope attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix envelope hold</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix envelope decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>A1-B2 Crosstalk</source>
<translation type="unfinished"/>
</message>
<message>
<source>A2-A1 modulation</source>
<translation type="unfinished"/>
</message>
<message>
<source>B2-B1 modulation</source>
<translation type="unfinished"/>
</message>
<message>
<source>Selected graph</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WatsynView</name>
<message>
<source>Select oscillator A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select oscillator A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select oscillator B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select oscillator B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix output of A2 to A1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulate amplitude of A1 with output of A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Ring-modulate A1 and A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulate phase of A1 with output of A2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix output of B2 to B1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulate amplitude of B1 with output of B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Ring-modulate B1 and B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulate phase of B1 with output of B2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Draw your own waveform here by dragging your mouse on this graph.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Load waveform</source>
<translation>Ladda vågform</translation>
</message>
<message>
<source>Click to load a waveform from a sample file</source>
<translation>Klicka för att ladda in en vågform från en ljudfil</translation>
</message>
<message>
<source>Phase left</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click to shift phase by -15 degrees</source>
<translation type="unfinished"/>
</message>
<message>
<source>Phase right</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click to shift phase by +15 degrees</source>
<translation type="unfinished"/>
</message>
<message>
<source>Normalize</source>
<translation>Normalisera</translation>
</message>
<message>
<source>Click to normalize</source>
<translation>Klicka för normalisering</translation>
</message>
<message>
<source>Invert</source>
<translation>Invertera</translation>
</message>
<message>
<source>Click to invert</source>
<translation>Klicka för invertering</translation>
</message>
<message>
<source>Smooth</source>
<translation>Utjämna</translation>
</message>
<message>
<source>Click to smooth</source>
<translation>Klicka för utjämning</translation>
</message>
<message>
<source>Sine wave</source>
<translation>Sinusvåg</translation>
</message>
<message>
<source>Click for sine wave</source>
<translation>Klicka för sinusvåg</translation>
</message>
<message>
<source>Triangle wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>Click for triangle wave</source>
<translation>Klicka för triangelvåg</translation>
</message>
<message>
<source>Click for saw wave</source>
<translation>Klicka för sågtandvåg</translation>
</message>
<message>
<source>Square wave</source>
<translation>Fyrkantvåg</translation>
</message>
<message>
<source>Click for square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Panning</source>
<translation>Panorering</translation>
</message>
<message>
<source>Freq. multiplier</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left detune</source>
<translation type="unfinished"/>
</message>
<message>
<source> cents</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right detune</source>
<translation type="unfinished"/>
</message>
<message>
<source>A-B Mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix envelope amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix envelope attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix envelope hold</source>
<translation type="unfinished"/>
</message>
<message>
<source>Mix envelope decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Crosstalk</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ZynAddSubFxInstrument</name>
<message>
<source>Portamento</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter Frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter Resonance</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandwidth</source>
<translation>Bandbredd</translation>
</message>
<message>
<source>FM Gain</source>
<translation>FM-Förstärkning</translation>
</message>
<message>
<source>Resonance Center Frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>Resonance Bandwidth</source>
<translation type="unfinished"/>
</message>
<message>
<source>Forward MIDI Control Change Events</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ZynAddSubFxView</name>
<message>
<source>Show GUI</source>
<translation>Visa användargränssnitt</translation>
</message>
<message>
<source>Click here to show or hide the graphical user interface (GUI) of ZynAddSubFX.</source>
<translation>Klicka här för att visa eller dölja användargränssnittet för ZynAddSubFX.</translation>
</message><|fim▁hole|> <message>
<source>Portamento:</source>
<translation>Portamento:</translation>
</message>
<message>
<source>PORT</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter Frequency:</source>
<translation>Filter-frekvens:</translation>
</message>
<message>
<source>FREQ</source>
<translation>FREQ</translation>
</message>
<message>
<source>Filter Resonance:</source>
<translation>Filter-resonans:</translation>
</message>
<message>
<source>RES</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandwidth:</source>
<translation>Bandbredd:</translation>
</message>
<message>
<source>BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>FM Gain:</source>
<translation>FM-Förstärkning:</translation>
</message>
<message>
<source>FM GAIN</source>
<translation type="unfinished"/>
</message>
<message>
<source>Resonance center frequency:</source>
<translation type="unfinished"/>
</message>
<message>
<source>RES CF</source>
<translation type="unfinished"/>
</message>
<message>
<source>Resonance bandwidth:</source>
<translation type="unfinished"/>
</message>
<message>
<source>RES BW</source>
<translation type="unfinished"/>
</message>
<message>
<source>Forward MIDI Control Changes</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>audioFileProcessor</name>
<message>
<source>Amplify</source>
<translation>Amplifiera</translation>
</message>
<message>
<source>Start of sample</source>
<translation>Start på ljudfil</translation>
</message>
<message>
<source>End of sample</source>
<translation>Slut på ljudfil</translation>
</message>
<message>
<source>Reverse sample</source>
<translation>Spela baklänges</translation>
</message>
<message>
<source>Stutter</source>
<translation type="unfinished"/>
</message>
<message>
<source>Loopback point</source>
<translation type="unfinished"/>
</message>
<message>
<source>Loop mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Interpolation mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>None</source>
<translation type="unfinished"/>
</message>
<message>
<source>Linear</source>
<translation>Linjär</translation>
</message>
<message>
<source>Sinc</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sample not found: %1</source>
<translation>Ljudfil hittades inte: %1</translation>
</message>
</context>
<context>
<name>bitInvader</name>
<message>
<source>Samplelength</source>
<translation>Ljudfilslängd</translation>
</message>
</context>
<context>
<name>bitInvaderView</name>
<message>
<source>Sample Length</source>
<translation>Ljudfilens Längd</translation>
</message>
<message>
<source>Sine wave</source>
<translation>Sinusvåg</translation>
</message>
<message>
<source>Triangle wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>Saw wave</source>
<translation>Sågtandsvåg</translation>
</message>
<message>
<source>Square wave</source>
<translation>Fyrkantvåg</translation>
</message>
<message>
<source>White noise wave</source>
<translation>Vitt brus-våg</translation>
</message>
<message>
<source>User defined wave</source>
<translation>Användardefinierad vågform</translation>
</message>
<message>
<source>Smooth</source>
<translation>Utjämna</translation>
</message>
<message>
<source>Click here to smooth waveform.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Interpolation</source>
<translation>Interpolering</translation>
</message>
<message>
<source>Normalize</source>
<translation>Normalisera</translation>
</message>
<message>
<source>Draw your own waveform here by dragging your mouse on this graph.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click for a sine-wave.</source>
<translation>Klicka för sinusvåg</translation>
</message>
<message>
<source>Click here for a triangle-wave.</source>
<translation>Klicka här för triangelvåg.</translation>
</message>
<message>
<source>Click here for a saw-wave.</source>
<translation>Klicka här för sågtandvåg</translation>
</message>
<message>
<source>Click here for a square-wave.</source>
<translation>Klicka här för fyrkantvåg</translation>
</message>
<message>
<source>Click here for white-noise.</source>
<translation>Klicka här för vitt brus.</translation>
</message>
<message>
<source>Click here for a user-defined shape.</source>
<translation>Klicka här för en användardefinierad kurva.</translation>
</message>
</context>
<context>
<name>dynProcControlDialog</name>
<message>
<source>INPUT</source>
<translation>INGÅNG</translation>
</message>
<message>
<source>Input gain:</source>
<translation>Ingångsförstärkning:</translation>
</message>
<message>
<source>OUTPUT</source>
<translation>UTGÅNG</translation>
</message>
<message>
<source>Output gain:</source>
<translation>Utgångsförstärkning:</translation>
</message>
<message>
<source>ATTACK</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak attack time:</source>
<translation type="unfinished"/>
</message>
<message>
<source>RELEASE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Peak release time:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reset waveform</source>
<translation>Återställ vågform</translation>
</message>
<message>
<source>Click here to reset the wavegraph back to default</source>
<translation type="unfinished"/>
</message>
<message>
<source>Smooth waveform</source>
<translation>Mjuk vågform</translation>
</message>
<message>
<source>Click here to apply smoothing to wavegraph</source>
<translation type="unfinished"/>
</message>
<message>
<source>Increase wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to increase wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decrease wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to decrease wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stereomode Maximum</source>
<translation type="unfinished"/>
</message>
<message>
<source>Process based on the maximum of both stereo channels</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stereomode Average</source>
<translation type="unfinished"/>
</message>
<message>
<source>Process based on the average of both stereo channels</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stereomode Unlinked</source>
<translation type="unfinished"/>
</message>
<message>
<source>Process each stereo channel independently</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>dynProcControls</name>
<message>
<source>Input gain</source>
<translation>Ingångsförstärkning</translation>
</message>
<message>
<source>Output gain</source>
<translation>Utgångsförstärkning</translation>
</message>
<message>
<source>Attack time</source>
<translation type="unfinished"/>
</message>
<message>
<source>Release time</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stereo mode</source>
<translation>Stereo-läge</translation>
</message>
</context>
<context>
<name>fxLineLcdSpinBox</name>
<message>
<source>Assign to:</source>
<translation>Tilldela till:</translation>
</message>
<message>
<source>New FX Channel</source>
<translation>Ny FX-Kanal</translation>
</message>
</context>
<context>
<name>graphModel</name>
<message>
<source>Graph</source>
<translation>Graf</translation>
</message>
</context>
<context>
<name>kickerInstrument</name>
<message>
<source>Start frequency</source>
<translation>Startfrekvens</translation>
</message>
<message>
<source>End frequency</source>
<translation>Slutfrekvens</translation>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Length</source>
<translation>Längd</translation>
</message>
<message>
<source>Distortion Start</source>
<translation type="unfinished"/>
</message>
<message>
<source>Distortion End</source>
<translation type="unfinished"/>
</message>
<message>
<source>Envelope Slope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise</source>
<translation>Brus</translation>
</message>
<message>
<source>Click</source>
<translation>Klick</translation>
</message>
<message>
<source>Frequency Slope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Start from note</source>
<translation type="unfinished"/>
</message>
<message>
<source>End to note</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>kickerInstrumentView</name>
<message>
<source>Start frequency:</source>
<translation type="unfinished"/>
</message>
<message>
<source>End frequency:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Gain:</source>
<translation>Förstärkning:</translation>
</message>
<message>
<source>Frequency Slope:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Envelope Length:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Envelope Slope:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click:</source>
<translation>Klick:</translation>
</message>
<message>
<source>Noise:</source>
<translation>Brus:</translation>
</message>
<message>
<source>Distortion Start:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Distortion End:</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ladspaBrowserView</name>
<message>
<source>Available Effects</source>
<translation type="unfinished"/>
</message>
<message>
<source>Unavailable Effects</source>
<translation type="unfinished"/>
</message>
<message>
<source>Instruments</source>
<translation>Instrument</translation>
</message>
<message>
<source>Analysis Tools</source>
<translation>Analysverktyg</translation>
</message>
<message>
<source>Don't know</source>
<translation>Vet inte</translation>
</message>
<message>
<source>This dialog displays information on all of the LADSPA plugins LMMS was able to locate. The plugins are divided into five categories based upon an interpretation of the port types and names.
Available Effects are those that can be used by LMMS. In order for LMMS to be able to use an effect, it must, first and foremost, be an effect, which is to say, it has to have both input channels and output channels. LMMS identifies an input channel as an audio rate port containing 'in' in the name. Output channels are identified by the letters 'out'. Furthermore, the effect must have the same number of inputs and outputs and be real time capable.
Unavailable Effects are those that were identified as effects, but either didn't have the same number of input and output channels or weren't real time capable.
Instruments are plugins for which only output channels were identified.
Analysis Tools are plugins for which only input channels were identified.
Don't Knows are plugins for which no input or output channels were identified.
Double clicking any of the plugins will bring up information on the ports.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Type:</source>
<translation>Typ:</translation>
</message>
</context>
<context>
<name>ladspaDescription</name>
<message>
<source>Plugins</source>
<translation>Plugin</translation>
</message>
<message>
<source>Description</source>
<translation>Beskrivning</translation>
</message>
</context>
<context>
<name>ladspaPortDialog</name>
<message>
<source>Ports</source>
<translation>Portar</translation>
</message>
<message>
<source>Name</source>
<translation>Namn</translation>
</message>
<message>
<source>Rate</source>
<translation>Värdera</translation>
</message>
<message>
<source>Direction</source>
<translation>Riktning</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Min < Default < Max</source>
<translation>Min < Standard < Max</translation>
</message>
<message>
<source>Logarithmic</source>
<translation>Logaritmisk</translation>
</message>
<message>
<source>SR Dependent</source>
<translation type="unfinished"/>
</message>
<message>
<source>Audio</source>
<translation>Ljud</translation>
</message>
<message>
<source>Control</source>
<translation>Kontroll</translation>
</message>
<message>
<source>Input</source>
<translation>Ingång</translation>
</message>
<message>
<source>Output</source>
<translation>Utgång</translation>
</message>
<message>
<source>Toggled</source>
<translation type="unfinished"/>
</message>
<message>
<source>Integer</source>
<translation>Heltal</translation>
</message>
<message>
<source>Float</source>
<translation>Flyttal</translation>
</message>
<message>
<source>Yes</source>
<translation>Ja</translation>
</message>
</context>
<context>
<name>lb302Synth</name>
<message>
<source>VCF Cutoff Frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>VCF Resonance</source>
<translation type="unfinished"/>
</message>
<message>
<source>VCF Envelope Mod</source>
<translation type="unfinished"/>
</message>
<message>
<source>VCF Envelope Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Distortion</source>
<translation type="unfinished"/>
</message>
<message>
<source>Waveform</source>
<translation>Vågform</translation>
</message>
<message>
<source>Slide Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Slide</source>
<translation type="unfinished"/>
</message>
<message>
<source>Accent</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dead</source>
<translation type="unfinished"/>
</message>
<message>
<source>24dB/oct Filter</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>lb302SynthView</name>
<message>
<source>Cutoff Freq:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Resonance:</source>
<translation>Resonans:</translation>
</message>
<message>
<source>Env Mod:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decay:</source>
<translation>Decay:</translation>
</message>
<message>
<source>303-es-que, 24dB/octave, 3 pole filter</source>
<translation type="unfinished"/>
</message>
<message>
<source>Slide Decay:</source>
<translation type="unfinished"/>
</message>
<message>
<source>DIST:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Saw wave</source>
<translation>Sågtandsvåg</translation>
</message>
<message>
<source>Click here for a saw-wave.</source>
<translation>Klicka här för sågtandvåg</translation>
</message>
<message>
<source>Triangle wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>Click here for a triangle-wave.</source>
<translation>Klicka här för triangelvåg.</translation>
</message>
<message>
<source>Square wave</source>
<translation>Fyrkantvåg</translation>
</message>
<message>
<source>Click here for a square-wave.</source>
<translation>Klicka här för fyrkantvåg</translation>
</message>
<message>
<source>Rounded square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for a square-wave with a rounded end.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Moog wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for a moog-like wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sine wave</source>
<translation>Sinusvåg</translation>
</message>
<message>
<source>Click for a sine-wave.</source>
<translation>Klicka för sinusvåg</translation>
</message>
<message>
<source>White noise wave</source>
<translation>Vitt brus-våg</translation>
</message>
<message>
<source>Click here for an exponential wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for white-noise.</source>
<translation>Klicka här för vitt brus.</translation>
</message>
<message>
<source>Bandlimited saw wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for bandlimited saw wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited square wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for bandlimited square wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited triangle wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for bandlimited triangle wave.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bandlimited moog saw wave</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here for bandlimited moog saw wave.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>malletsInstrument</name>
<message>
<source>Hardness</source>
<translation type="unfinished"/>
</message>
<message>
<source>Position</source>
<translation>Position</translation>
</message>
<message>
<source>Vibrato Gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vibrato Freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stick Mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulator</source>
<translation type="unfinished"/>
</message>
<message>
<source>Crossfade</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Speed</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Depth</source>
<translation type="unfinished"/>
</message>
<message>
<source>ADSR</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pressure</source>
<translation type="unfinished"/>
</message>
<message>
<source>Motion</source>
<translation type="unfinished"/>
</message>
<message>
<source>Speed</source>
<translation>Hastighet</translation>
</message>
<message>
<source>Bowed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Spread</source>
<translation type="unfinished"/>
</message>
<message>
<source>Marimba</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vibraphone</source>
<translation type="unfinished"/>
</message>
<message>
<source>Agogo</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wood1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reso</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wood2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Beats</source>
<translation type="unfinished"/>
</message>
<message>
<source>Two Fixed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clump</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tubular Bells</source>
<translation type="unfinished"/>
</message>
<message>
<source>Uniform Bar</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tuned Bar</source>
<translation type="unfinished"/>
</message>
<message>
<source>Glass</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tibetan Bowl</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>malletsInstrumentView</name>
<message>
<source>Instrument</source>
<translation type="unfinished"/>
</message>
<message>
<source>Spread</source>
<translation type="unfinished"/>
</message>
<message>
<source>Spread:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hardness</source>
<translation type="unfinished"/>
</message>
<message>
<source>Hardness:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Position</source>
<translation>Position</translation>
</message>
<message>
<source>Position:</source>
<translation>Position:</translation>
</message>
<message>
<source>Vib Gain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vib Gain:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vib Freq</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vib Freq:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stick Mix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Stick Mix:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulator</source>
<translation type="unfinished"/>
</message>
<message>
<source>Modulator:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Crossfade</source>
<translation type="unfinished"/>
</message>
<message>
<source>Crossfade:</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Speed</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Speed:</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Depth</source>
<translation type="unfinished"/>
</message>
<message>
<source>LFO Depth:</source>
<translation type="unfinished"/>
</message>
<message>
<source>ADSR</source>
<translation type="unfinished"/>
</message>
<message>
<source>ADSR:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pressure</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pressure:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Speed</source>
<translation>Hastighet</translation>
</message>
<message>
<source>Speed:</source>
<translation>Hastighet:</translation>
</message>
<message>
<source>Missing files</source>
<translation type="unfinished"/>
</message>
<message>
<source>Your Stk-installation seems to be incomplete. Please make sure the full Stk-package is installed!</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>manageVSTEffectView</name>
<message>
<source> - VST parameter control</source>
<translation type="unfinished"/>
</message>
<message>
<source>VST Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here if you want to synchronize all parameters with VST plugin.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Automated</source>
<translation>Automatiserad</translation>
</message>
<message>
<source>Click here if you want to display automated parameters only.</source>
<translation type="unfinished"/>
</message>
<message>
<source> Close </source>
<translation>Stäng</translation>
</message>
<message>
<source>Close VST effect knob-controller window.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>manageVestigeInstrumentView</name>
<message>
<source> - VST plugin control</source>
<translation type="unfinished"/>
</message>
<message>
<source>VST Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here if you want to synchronize all parameters with VST plugin.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Automated</source>
<translation>Automatiserad</translation>
</message>
<message>
<source>Click here if you want to display automated parameters only.</source>
<translation type="unfinished"/>
</message>
<message>
<source> Close </source>
<translation>Stäng</translation>
</message>
<message>
<source>Close VST plugin knob-controller window.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>opl2instrument</name>
<message>
<source>Patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Sustain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Release</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Level Scaling</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Frequency Multiple</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Feedback</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Key Scaling Rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Percussive Envelope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Tremolo</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Vibrato</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 1 Waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Sustain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Release</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Level Scaling</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Frequency Multiple</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Key Scaling Rate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Percussive Envelope</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Tremolo</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Vibrato</source>
<translation type="unfinished"/>
</message>
<message>
<source>Op 2 Waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>FM</source>
<translation>FM</translation>
</message>
<message>
<source>Vibrato Depth</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tremolo Depth</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>opl2instrumentView</name>
<message>
<source>Attack</source>
<translation>Attack</translation>
</message>
<message>
<source>Decay</source>
<translation>Decay</translation>
</message>
<message>
<source>Release</source>
<translation>Släpp</translation>
</message>
<message>
<source>Frequency multiplier</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>organicInstrument</name>
<message>
<source>Distortion</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
</context>
<context>
<name>organicInstrumentView</name>
<message>
<source>Distortion:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>Randomise</source>
<translation>Slumpa</translation>
</message>
<message>
<source>Osc %1 waveform:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 panning:</source>
<translation type="unfinished"/>
</message>
<message>
<source>cents</source>
<translation type="unfinished"/>
</message>
<message>
<source>The distortion knob adds distortion to the output of the instrument. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The volume knob controls the volume of the output of the instrument. It is cumulative with the instrument window's volume control. </source>
<translation type="unfinished"/>
</message>
<message>
<source>The randomize button randomizes all knobs except the harmonics,main volume and distortion knobs. </source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 stereo detuning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Osc %1 harmonic:</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FreeBoyInstrument</name>
<message>
<source>Sweep time</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep direction</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep RtShift amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave Pattern Duty</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume sweep direction</source>
<translation type="unfinished"/>
</message>
<message>
<source>Length of each step in sweep</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 3 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Right Output level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Left Output level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 3 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 1 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 2 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 3 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel 4 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Treble</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bass</source>
<translation>Bas</translation>
</message>
<message>
<source>Shift Register width</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FreeBoyInstrumentView</name>
<message>
<source>Sweep Time:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep Time</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep RtShift amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sweep RtShift amount</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave pattern duty:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave Pattern Duty</source>
<translation type="unfinished"/>
</message>
<message>
<source>Square Channel 1 Volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Length of each step in sweep:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Length of each step in sweep</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave pattern duty</source>
<translation type="unfinished"/>
</message>
<message>
<source>Square Channel 2 Volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Square Channel 2 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave Channel Volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave Channel Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise Channel Volume:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Noise Channel Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>SO1 Volume (Right):</source>
<translation type="unfinished"/>
</message>
<message>
<source>SO1 Volume (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>SO2 Volume (Left):</source>
<translation type="unfinished"/>
</message>
<message>
<source>SO2 Volume (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Treble:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Treble</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bass:</source>
<translation>Bas:</translation>
</message>
<message>
<source>Bass</source>
<translation>Bas</translation>
</message>
<message>
<source>Sweep Direction</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume Sweep Direction</source>
<translation type="unfinished"/>
</message>
<message>
<source>Shift Register Width</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel1 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel2 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel3 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel4 to SO1 (Right)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel1 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel2 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel3 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Channel4 to SO2 (Left)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wave Pattern</source>
<translation type="unfinished"/>
</message>
<message>
<source>The amount of increase or decrease in frequency</source>
<translation type="unfinished"/>
</message>
<message>
<source>The rate at which increase or decrease in frequency occurs</source>
<translation type="unfinished"/>
</message>
<message>
<source>The duty cycle is the ratio of the duration (time) that a signal is ON versus the total period of the signal.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Square Channel 1 Volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>The delay between step change</source>
<translation type="unfinished"/>
</message>
<message>
<source>Draw the wave here</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>patchesDialog</name>
<message>
<source>Qsynth: Channel Preset</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bank selector</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bank</source>
<translation>Bank</translation>
</message>
<message>
<source>Program selector</source>
<translation type="unfinished"/>
</message>
<message>
<source>Patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Name</source>
<translation>Namn</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>Avbryt</translation>
</message>
</context>
<context>
<name>pluginBrowser</name>
<message>
<source>no description</source>
<translation>ingen beskrivning</translation>
</message>
<message>
<source>Incomplete monophonic imitation tb303</source>
<translation type="unfinished"/>
</message>
<message>
<source>Plugin for freely manipulating stereo output</source>
<translation type="unfinished"/>
</message>
<message>
<source>Plugin for controlling knobs with sound peaks</source>
<translation type="unfinished"/>
</message>
<message>
<source>Plugin for enhancing stereo separation of a stereo input file</source>
<translation type="unfinished"/>
</message>
<message>
<source>List installed LADSPA plugins</source>
<translation type="unfinished"/>
</message>
<message>
<source>GUS-compatible patch instrument</source>
<translation type="unfinished"/>
</message>
<message>
<source>Additive Synthesizer for organ-like sounds</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tuneful things to bang on</source>
<translation type="unfinished"/>
</message>
<message>
<source>VST-host for using VST(i)-plugins within LMMS</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vibrating string modeler</source>
<translation type="unfinished"/>
</message>
<message>
<source>plugin for using arbitrary LADSPA-effects inside LMMS.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter for importing MIDI-files into LMMS</source>
<translation>Filter för att importera MIDI-filer till LMMS</translation>
</message>
<message>
<source>Emulation of the MOS6581 and MOS8580 SID.
This chip was used in the Commodore 64 computer.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Player for SoundFont files</source>
<translation>Spelare för SoundFont-filer</translation>
</message>
<message>
<source>Emulation of GameBoy (TM) APU</source>
<translation>Emulering av GameBoy (TM) APU</translation>
</message>
<message>
<source>Customizable wavetable synthesizer</source>
<translation type="unfinished"/>
</message>
<message>
<source>Embedded ZynAddSubFX</source>
<translation type="unfinished"/>
</message>
<message>
<source>2-operator FM Synth</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter for importing Hydrogen files into LMMS</source>
<translation>Filter för att importera Hydrogen-filer till LMMS</translation>
</message>
<message>
<source>LMMS port of sfxr</source>
<translation type="unfinished"/>
</message>
<message>
<source>Monstrous 3-oscillator synth with modulation matrix</source>
<translation type="unfinished"/>
</message>
<message>
<source>Three powerful oscillators you can modulate in several ways</source>
<translation type="unfinished"/>
</message>
<message>
<source>A native amplifier plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>Carla Rack Instrument</source>
<translation type="unfinished"/>
</message>
<message>
<source>4-oscillator modulatable wavetable synth</source>
<translation type="unfinished"/>
</message>
<message>
<source>plugin for waveshaping</source>
<translation type="unfinished"/>
</message>
<message>
<source>Boost your bass the fast and simple way</source>
<translation type="unfinished"/>
</message>
<message>
<source>Versatile drum synthesizer</source>
<translation>Mångsidig trum-synth</translation>
</message>
<message>
<source>Simple sampler with various settings for using samples (e.g. drums) in an instrument-track</source>
<translation type="unfinished"/>
</message>
<message>
<source>plugin for processing dynamics in a flexible way</source>
<translation type="unfinished"/>
</message>
<message>
<source>Carla Patchbay Instrument</source>
<translation type="unfinished"/>
</message>
<message>
<source>plugin for using arbitrary VST effects inside LMMS.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Graphical spectrum analyzer plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>A NES-like synthesizer</source>
<translation>En NES-lik synthesizer</translation>
</message>
<message>
<source>A native delay plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>Player for GIG files</source>
<translation>Spelare för GIG-filer</translation>
</message>
<message>
<source>A multitap echo delay plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>A native flanger plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>An oversampling bitcrusher</source>
<translation type="unfinished"/>
</message>
<message>
<source>A native eq plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>A 4-band Crossover Equalizer</source>
<translation type="unfinished"/>
</message>
<message>
<source>A Dual filter plugin</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filter for exporting MIDI-files from LMMS</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>sf2Instrument</name>
<message>
<source>Bank</source>
<translation>Bank</translation>
</message>
<message>
<source>Patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Reverb</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Roomsize</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Damping</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Width</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Lines</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Level</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Speed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Depth</source>
<translation type="unfinished"/>
</message>
<message>
<source>A soundfont %1 could not be loaded.</source>
<translation>SoundFont %1 kunde inte läsas in.</translation>
</message>
</context>
<context>
<name>sf2InstrumentView</name>
<message>
<source>Open other SoundFont file</source>
<translation>Öppna en annan SoundFont-fil</translation>
</message>
<message>
<source>Click here to open another SF2 file</source>
<translation>Klicka här för att öppna en annan SF2-fil</translation>
</message>
<message>
<source>Choose the patch</source>
<translation type="unfinished"/>
</message>
<message>
<source>Gain</source>
<translation>Förstärkning</translation>
</message>
<message>
<source>Apply reverb (if supported)</source>
<translation type="unfinished"/>
</message>
<message>
<source>This button enables the reverb effect. This is useful for cool effects, but only works on files that support it.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Roomsize:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Damping:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Width:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reverb Level:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Apply chorus (if supported)</source>
<translation type="unfinished"/>
</message>
<message>
<source>This button enables the chorus effect. This is useful for cool echo effects, but only works on files that support it.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Lines:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Level:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Speed:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Chorus Depth:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open SoundFont file</source>
<translation type="unfinished"/>
</message>
<message>
<source>SoundFont2 Files (*.sf2)</source>
<translation>SoundFont2-filer (*.sf2)</translation>
</message>
</context>
<context>
<name>sfxrInstrument</name>
<message>
<source>Wave Form</source>
<translation>Vågform</translation>
</message>
</context>
<context>
<name>sidInstrument</name>
<message>
<source>Cutoff</source>
<translation type="unfinished"/>
</message>
<message>
<source>Resonance</source>
<translation>Resonans</translation>
</message>
<message>
<source>Filter type</source>
<translation>Filtertyp</translation>
</message>
<message>
<source>Voice 3 off</source>
<translation type="unfinished"/>
</message>
<message>
<source>Volume</source>
<translation>Volym</translation>
</message>
<message>
<source>Chip model</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>sidInstrumentView</name>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>Resonance:</source>
<translation>Resonans:</translation>
</message>
<message>
<source>Cutoff frequency:</source>
<translation type="unfinished"/>
</message>
<message>
<source>High-Pass filter </source>
<translation>Högpassfilter</translation>
</message>
<message>
<source>Band-Pass filter </source>
<translation>Bandpassfilter</translation>
</message>
<message>
<source>Low-Pass filter </source>
<translation>Lågpassfilter</translation>
</message>
<message>
<source>Voice3 Off </source>
<translation type="unfinished"/>
</message>
<message>
<source>MOS6581 SID </source>
<translation>MOS6581 SID </translation>
</message>
<message>
<source>MOS8580 SID </source>
<translation>MOS8580 SID </translation>
</message>
<message>
<source>Attack:</source>
<translation>Attack:</translation>
</message>
<message>
<source>Attack rate determines how rapidly the output of Voice %1 rises from zero to peak amplitude.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decay:</source>
<translation>Decay:</translation>
</message>
<message>
<source>Decay rate determines how rapidly the output falls from the peak amplitude to the selected Sustain level.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sustain:</source>
<translation>Sustain:</translation>
</message>
<message>
<source>Output of Voice %1 will remain at the selected Sustain amplitude as long as the note is held.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Release:</source>
<translation>Release:</translation>
</message>
<message>
<source>The output of of Voice %1 will fall from Sustain amplitude to zero amplitude at the selected Release rate.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pulse Width:</source>
<translation>Pulsbredd:</translation>
</message>
<message>
<source>The Pulse Width resolution allows the width to be smoothly swept with no discernable stepping. The Pulse waveform on Oscillator %1 must be selected to have any audible effect.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Coarse:</source>
<translation>Grov:</translation>
</message>
<message>
<source>The Coarse detuning allows to detune Voice %1 one octave up or down.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pulse Wave</source>
<translation>Pulsvåg</translation>
</message>
<message>
<source>Triangle Wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>SawTooth</source>
<translation>Sågtand</translation>
</message>
<message>
<source>Noise</source>
<translation>Brus</translation>
</message>
<message>
<source>Sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sync synchronizes the fundamental frequency of Oscillator %1 with the fundamental frequency of Oscillator %2 producing "Hard Sync" effects.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Ring-Mod</source>
<translation type="unfinished"/>
</message>
<message>
<source>Ring-mod replaces the Triangle Waveform output of Oscillator %1 with a "Ring Modulated" combination of Oscillators %1 and %2.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Filtered</source>
<translation>Filtrerad</translation>
</message>
<message>
<source>When Filtered is on, Voice %1 will be processed through the Filter. When Filtered is off, Voice %1 appears directly at the output, and the Filter has no effect on it.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Test</source>
<translation>Testa</translation>
</message>
<message>
<source>Test, when set, resets and locks Oscillator %1 at zero until Test is turned off.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>stereoEnhancerControlDialog</name>
<message>
<source>WIDE</source>
<translation type="unfinished"/>
</message>
<message>
<source>Width:</source>
<translation>Bredd:</translation>
</message>
</context>
<context>
<name>stereoEnhancerControls</name>
<message>
<source>Width</source>
<translation>Bredd</translation>
</message>
</context>
<context>
<name>stereoMatrixControlDialog</name>
<message>
<source>Left to Left Vol:</source>
<translation>Vänster till Vänster Vol.:</translation>
</message>
<message>
<source>Left to Right Vol:</source>
<translation>Vänster till Höger Vol.:</translation>
</message>
<message>
<source>Right to Left Vol:</source>
<translation>Höger till Vänster Vol.:</translation>
</message>
<message>
<source>Right to Right Vol:</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>stereoMatrixControls</name>
<message>
<source>Left to Left</source>
<translation>Vänster till vänster</translation>
</message>
<message>
<source>Left to Right</source>
<translation>Vänster till höger</translation>
</message>
<message>
<source>Right to Left</source>
<translation>Höger till vänster</translation>
</message>
<message>
<source>Right to Right</source>
<translation>Höger till höger</translation>
</message>
</context>
<context>
<name>vestigeInstrument</name>
<message>
<source>Loading plugin</source>
<translation>Laddar plugin</translation>
</message>
<message>
<source>Please wait while loading VST-plugin...</source>
<translation>Vänta medans VST-plugin läses in...</translation>
</message>
</context>
<context>
<name>vibed</name>
<message>
<source>String %1 volume</source>
<translation type="unfinished"/>
</message>
<message>
<source>String %1 stiffness</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pick %1 position</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pickup %1 position</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pan %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Detune %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fuzziness %1 </source>
<translation type="unfinished"/>
</message>
<message>
<source>Length %1</source>
<translation>Längd %1</translation>
</message>
<message>
<source>Impulse %1</source>
<translation>Impuls %1</translation>
</message>
<message>
<source>Octave %1</source>
<translation>Oktav %1</translation>
</message>
</context>
<context>
<name>vibedView</name>
<message>
<source>Volume:</source>
<translation>Volym:</translation>
</message>
<message>
<source>The 'V' knob sets the volume of the selected string.</source>
<translation type="unfinished"/>
</message>
<message>
<source>String stiffness:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The 'S' knob sets the stiffness of the selected string. The stiffness of the string affects how long the string will ring out. The lower the setting, the longer the string will ring.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pick position:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The 'P' knob sets the position where the selected string will be 'picked'. The lower the setting the closer the pick is to the bridge.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pickup position:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The 'PU' knob sets the position where the vibrations will be monitored for the selected string. The lower the setting, the closer the pickup is to the bridge.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pan:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Pan knob determines the location of the selected string in the stereo field.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Detune:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Detune knob modifies the pitch of the selected string. Settings less than zero will cause the string to sound flat. Settings greater than zero will cause the string to sound sharp.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fuzziness:</source>
<translation type="unfinished"/>
</message>
<message>
<source>The Slap knob adds a bit of fuzz to the selected string which is most apparent during the attack, though it can also be used to make the string sound more 'metallic'.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Length:</source>
<translation>Längd:</translation>
</message>
<message>
<source>The Length knob sets the length of the selected string. Longer strings will both ring longer and sound brighter, however, they will also eat up more CPU cycles.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Impulse or initial state</source>
<translation type="unfinished"/>
</message>
<message>
<source>The 'Imp' selector determines whether the waveform in the graph is to be treated as an impulse imparted to the string by the pick or the initial state of the string.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Octave</source>
<translation>Oktav</translation>
</message>
<message>
<source>The Octave selector is used to choose which harmonic of the note the string will ring at. For example, '-2' means the string will ring two octaves below the fundamental, 'F' means the string will ring at the fundamental, and '6' means the string will ring six octaves above the fundamental.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Impulse Editor</source>
<translation type="unfinished"/>
</message>
<message>
<source>The waveform editor provides control over the initial state or impulse that is used to start the string vibrating. The buttons to the right of the graph will initialize the waveform to the selected type. The '?' button will load a waveform from a file--only the first 128 samples will be loaded.
The waveform can also be drawn in the graph.
The 'S' button will smooth the waveform.
The 'N' button will normalize the waveform.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Vibed models up to nine independently vibrating strings. The 'String' selector allows you to choose which string is being edited. The 'Imp' selector chooses whether the graph represents an impulse or the initial state of the string. The 'Octave' selector chooses which harmonic the string should vibrate at.
The graph allows you to control the initial state or impulse used to set the string in motion.
The 'V' knob controls the volume. The 'S' knob controls the string's stiffness. The 'P' knob controls the pick position. The 'PU' knob controls the pickup position.
'Pan' and 'Detune' hopefully don't need explanation. The 'Slap' knob adds a bit of fuzz to the sound of the string.
The 'Length' knob controls the length of the string.
The LED in the lower right corner of the waveform editor determines whether the string is active in the current instrument.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable waveform</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to enable/disable waveform.</source>
<translation type="unfinished"/>
</message>
<message>
<source>String</source>
<translation>Sträng</translation>
</message>
<message>
<source>The String selector is used to choose which string the controls are editing. A Vibed instrument can contain up to nine independently vibrating strings. The LED in the lower right corner of the waveform editor indicates whether the selected string is active.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sine wave</source>
<translation>Sinusvåg</translation>
</message>
<message>
<source>Triangle wave</source>
<translation>Triangelvåg</translation>
</message>
<message>
<source>Saw wave</source>
<translation>Sågtandsvåg</translation>
</message>
<message>
<source>Square wave</source>
<translation>Fyrkantvåg</translation>
</message>
<message>
<source>White noise wave</source>
<translation>Vitt brus-våg</translation>
</message>
<message>
<source>User defined wave</source>
<translation>Användardefinierad vågform</translation>
</message>
<message>
<source>Smooth</source>
<translation>Utjämna</translation>
</message>
<message>
<source>Click here to smooth waveform.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Normalize</source>
<translation>Normalisera</translation>
</message>
<message>
<source>Click here to normalize waveform.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a sine-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a triangle-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a saw-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a square-wave for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use white-noise for current oscillator.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use a user-defined waveform for current oscillator.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>voiceObject</name>
<message>
<source>Voice %1 pulse width</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 attack</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 decay</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 sustain</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 release</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 coarse detuning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 wave shape</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 sync</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 ring modulate</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 filtered</source>
<translation type="unfinished"/>
</message>
<message>
<source>Voice %1 test</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>waveShaperControlDialog</name>
<message>
<source>INPUT</source>
<translation>INGÅNG</translation>
</message>
<message>
<source>Input gain:</source>
<translation>Ingångsförstärkning:</translation>
</message>
<message>
<source>OUTPUT</source>
<translation>UTGÅNG</translation>
</message>
<message>
<source>Output gain:</source>
<translation>Utgångsförstärkning:</translation>
</message>
<message>
<source>Reset waveform</source>
<translation>Återställ vågform</translation>
</message>
<message>
<source>Click here to reset the wavegraph back to default</source>
<translation type="unfinished"/>
</message>
<message>
<source>Smooth waveform</source>
<translation>Mjuk vågform</translation>
</message>
<message>
<source>Click here to apply smoothing to wavegraph</source>
<translation type="unfinished"/>
</message>
<message>
<source>Increase graph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to increase wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Decrease graph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Click here to decrease wavegraph amplitude by 1dB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clip input</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clip input signal to 0dB</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>waveShaperControls</name>
<message>
<source>Input gain</source>
<translation>Ingångsförstärkning</translation>
</message>
<message>
<source>Output gain</source>
<translation>Utgångsförstärkning</translation>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>filter_operators.py<|end_file_name|><|fim▁begin|><|fim▁hole|>IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]<|fim▁end|>
|
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
|
<|file_name|>kmpd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Downloads kernel from http://kernel.ubuntu.com/~kernel-ppa/mainline/
# Requires: python-bs4
# Copyright (c) 2012 Savvas Radevic <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import urlparse
import urllib
import os
import urllib2
import platform
from bs4 import BeautifulSoup
import re
import sys
import subprocess
import tempfile
# We need to use apt.VersionCompare(a,b) to compare debian package versions
import apt_pkg
import argparse
# MODULE INIT
apt_pkg.init()
# PARSE ARGUMENTS
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-d', '--disable-filter', action='store_true',
help='Do not filter out release candidate versions')
parser.add_argument('-p', '--prefer-stable', action='store_true',
help='Prefer latest stable version instead of latest release candidate of the same version (e.g. prefer v3.9-raring instead of v3.9-rc8-raring)')
parser.add_argument('-l', '--latest-ver', action='store_true',
help='Chooses last version of each branch')
parser.add_argument('-u', '--update', action='store_true',
help='Upgrade kernel installed')
parser.add_argument('-y', '--daily', action='store_true',
help='Download daily build kernel (with next patches)')
parser.add_argument('-w', '--lowlatency', action='store_true',
help='Downloads lowlatency kernel')
args = parser.parse_args()
print(args)
url = "http://kernel.ubuntu.com/~kernel-ppa/mainline/"
print("Contacting {0}".format(url))
source = urllib.urlopen(url).read()
#print(source)
soup = BeautifulSoup(source, "html.parser")
kernels = list()
rel = re.sub('-\w*', '', platform.release())
print("Current system kernel release version: {0}".format(rel))
release = re.sub('([0-9])\.([0-9]{1,2})\.([0-9]{1,2})','',rel);
previous_ver = re.split('\.', rel)
previous_href = ""
upgrade = ""
actual_ver = []
if args.update:
args.latest_ver = True
selk = -1
for link in soup.find_all('a'):
href = link.get('href')
if href[0:5] == "daily" and args.daily:
kernels.append(str(href)+"current/")
selk = 0
elif not args.disable_filter:
#If filter is not disabled, apply all filters
if not args.latest_ver:
selk = 0
#Original way
if re.search("rc\d", href):
#If the version is a release candidate, bypass
continue
if href[0] == "v":
kver = href[1:-1] #strip first and last characters
vc = apt_pkg.version_compare(kver, rel)
if vc > 0:
# If kernel newer than current one
#print("{0} > {1}".format(kver, rel))
kernels.append(href)
else:
if href[0] == "v":
version = re.strip('\.',href[1:-1])
if not args.update:
selk = 0
if int(version[0]) > int(previous_version[0]):
previous_version[0] = version[0]
previous_version[1] = -1
kernels.append(previous_href)
if int(version[1]) > int(previous_version[1]):
previous_version[1] = version[1]
kernels.append(previous_href)
previous_href = href
else:
if int(version[0]) == int(actual_ver[0]) and int(version[1]) == int(actual_ver[1]):
if int(version[2]) > int(actual_ver[2]):
kernels = [href]
selk = 1
else:
selk = 0
kernels.append(href)
if previous_href != "":<|fim▁hole|>else:
if not args.update:
# SELECT KERNEL
i = 0
for k in kernels:
i += 1
print("{0}. {1}".format(i, k))
selk = -1
while not 0 < selk <= len(kernels):
try:
defaultk = len(kernels)
if args.prefer_stable:
if re.search('-rc\d+-', kernels[-1]):
# If a release candidate is the last item in list
teststable = re.sub("-rc\d+-","-",kernels[-1])
if teststable in kernels:
defaultk = kernels.index(teststable) + 1
sel = raw_input("Please enter an integer [{0}]: ".format(defaultk))
if sel == "":
selk = defaultk
break
selk = int(sel)
except ValueError:
continue
print("You chose: {0}".format(kernels[selk-1]))
else:
print("You chose: {0}".format(kernels[selk-1]))
# SELECT ARCH
i = 0
archs = ("i386", "amd64")
sysarch = platform.machine().replace(
"x86_64", "amd64").replace("i686", "i386")
print("Your system architecture: {0}".format(sysarch))
try:
defaultarch = archs.index(sysarch)+1
except:
defaultarch = 1
for a in archs:
i += 1
print("{0}. {1}".format(i, a))
sela = -1
while not 0 < sela <= len(archs):
try:
sela = raw_input("Please enter an integer [{0}]: ".format(defaultarch))
if sela == "":
sela = defaultarch
break
sela = int(sela)
except ValueError:
continue
print("You chose: {0}".format(archs[sela-1]))
# SELECT PACKAGES
sel1 = -1
while True:
sel1 = raw_input("Would you like to download kernel headers [Y/n]: ")
if sel1 == "":
selkh = True
break
if not sel1 in tuple("yYnN"):
continue
else:
if sel1 in tuple("yY"):
selkh = True
else:
selkh = False
break
sel2 = -1
while True:
sel2 = raw_input("Would you like to download kernel image [Y/n]: ")
if sel2 == "":
selki = True
break
if not sel2 in tuple("yYnN"):
continue
else:
if sel2 in tuple("yY"):
selki = True
else:
selki = False
break
sel3 = -1
while True:
sel3 = raw_input("Would you like to download kernel extras [Y/n]: ")
if sel3 == "":
selke = True
break
if not sel3 in tuple("yYnN"):
continue
else:
if sel3 in tuple("yY"):
selke = True
else:
selke = False
break
print("Kernel headers: {0}, Kernel image: {1}, Kernel extras: {2}".
format(selkh, selki, selke))
# selk = selected kernel
# sela = selected arch
# selkh = kernel headers? T/F
# selki = kernel image? T/F
# selke = kernel extra? T/F
link = "http://kernel.ubuntu.com/~kernel-ppa/mainline/{0}".format(kernels[selk-1])
print("Contacting {0}".format(link))
source = urllib.urlopen(link).read()
soup = BeautifulSoup(source)
files = list()
for l in soup.find_all('a'):
href = l.get('href')
rxstr = "linux-headers.*_(?:{0}|all)\.deb".format(archs[sela-1])
if selkh and re.search(rxstr, href):
url = "{0}{1}".format(link, href)
files.append(url)
rxstr = "linux-image.*_{0}\.deb".format(archs[sela-1])
if selki and re.search(rxstr, href):
url = "{0}{1}".format(link, href)
files.append(url)
rxstr = "linux-image-extra.*_{0}\.deb".format(archs[sela-1])
if selke and re.search(rxstr, href):
url = "{0}{1}".format(link, href)
files.append(url)
#Create temp folder
tempfolder = tempfile.mkdtemp()
print("Using temporary folder: {0}".format(tempfolder))
re_lowlatency = re.compile('.*lowlatency.*')
re_generic = re.compile('.*generic.*')
files2 = []
for url in files:
if args.lowlatency:
coincidence = re_lowlatency.match(url)
if coincidence:
files2.append(coincidence.group())
else:
coincidence = re_generic.match(url)
if coincidence:
files2.append(coincidence.group())
files = files2
print files
for url in files:
#Change directory to temp folder
os.chdir(tempfolder)
file_name = url.split('/')[-1]
u = urllib2.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print("Downloading: {0} Bytes: {1}".format(url, file_size))
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
p = float(file_size_dl) / file_size
status = r"{0} [{1:.2%}]".format(file_size_dl, p)
status = status + chr(8)*(len(status)+1)
sys.stdout.write(status)
f.close()
# INSTALL PACKAGES
sel6 = -1
while True:
sel6 = raw_input("Would you like to install the downloaded packages? [Y/n]: ")
if sel6 == "":
selinst = True
break
if not sel6 in tuple("yYnN"):
continue
else:
if sel6 in tuple("yY"):
selinst = True
else:
selinst = False
break
if selinst:
print("Installing packages... please type in your password if requested")
subprocess.call("sudo dpkg -i {0}/*.deb".format(tempfolder), shell=True)
else:
print("Will not install packages")
raw_input("All done! Press [Enter] key to exit.")<|fim▁end|>
|
kernels.append(previous_href)
if selk == -1:
print "0 UPDATES"
print rel
|
<|file_name|>morestack-address.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod rusti {
#[nolink]
#[abi = "rust-intrinsic"]
pub extern "rust-intrinsic" {
pub fn morestack_addr() -> *();
}
}
pub fn main() {
unsafe {
let addr = rusti::morestack_addr();
assert!(addr.is_not_null());
error!("%?", addr);
}
}<|fim▁end|>
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
<|file_name|>openbsd_pkg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Patrik Lundin <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: openbsd_pkg
author: "Patrik Lundin (@eest)"
version_added: "1.1"
short_description: Manage packages on OpenBSD.
description:
- Manage packages on OpenBSD using the pkg tools.
requirements: [ "python >= 2.5" ]
options:
name:
required: true
description:
- Name of the package.
state:
required: true
choices: [ present, latest, absent ]
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
build:
required: false
choices: [ yes, no ]
default: no
description:
- Build the package from source instead of downloading and installing
a binary. Requires that the port source tree is already installed.
Automatically builds and installs the 'sqlports' package, if it is
not already installed.
version_added: "2.1"
ports_dir:
required: false
default: /usr/ports
description:
- When used in combination with the 'build' option, allows overriding
the default ports source directory.
version_added: "2.1"
clean:
required: false
choices: [ yes, no ]
default: no
description:
- When updating or removing packages, delete the extra configuration
file(s) in the old packages which are annotated with @extra in
the packaging-list.
version_added: "2.3"
quick:
required: false
choices: [ yes, no ]
default: no
description:
- Replace or delete packages quickly; do not bother with checksums
before removing normal files.
version_added: "2.3"
'''
EXAMPLES = '''
# Make sure nmap is installed
- openbsd_pkg:
name: nmap
state: present
# Make sure nmap is the latest version
- openbsd_pkg:
name: nmap
state: latest
# Make sure nmap is not installed
- openbsd_pkg:
name: nmap
state: absent
# Make sure nmap is installed, build it from source if it is not
- openbsd_pkg:
name: nmap
state: present
build: yes
# Specify a pkg flavour with '--'
- openbsd_pkg:
name: vim--no_x11
state: present
# Specify the default flavour to avoid ambiguity errors
- openbsd_pkg:
name: vim--
state: present
# Specify a package branch (requires at least OpenBSD 6.0)
- openbsd_pkg:
name: python%3.5
state: present
# Update all packages on the system
- openbsd_pkg:
name: '*'
state: latest
# Purge a package and it's configuration files
- openbsd_pkg: name=mpd clean=yes state=absent
# Quickly remove a package without checking checksums
- openbsd_pkg: name=qt5 quick=yes state=absent
'''
import os
import platform
import re
import shlex
import sqlite3
from distutils.version import StrictVersion
# Function used for executing commands.
def execute_command(cmd, module):
# Break command line into arguments.
# This makes run_command() use shell=False which we need to not cause shell
# expansion of special characters like '*'.
cmd_args = shlex.split(cmd)
return module.run_command(cmd_args)
# Function used to find out if a package is currently installed.
def get_package_state(names, pkg_spec, module):
info_cmd = 'pkg_info -Iq'
for name in names:
command = "%s inst:%s" % (info_cmd, name)
rc, stdout, stderr = execute_command(command, module)
if stderr:
module.fail_json(msg="failed in get_package_state(): " + stderr)
if stdout:
# If the requested package name is just a stem, like "python", we may
# find multiple packages with that name.
pkg_spec[name]['installed_names'] = [installed_name for installed_name in stdout.splitlines()]
module.debug("get_package_state(): installed_names = %s" % pkg_spec[name]['installed_names'])
pkg_spec[name]['installed_state'] = True
else:
pkg_spec[name]['installed_state'] = False
# Function used to make sure a package is present.
def package_present(names, pkg_spec, module):
build = module.params['build']
for name in names:
# It is possible package_present() has been called from package_latest().
# In that case we do not want to operate on the whole list of names,
# only the leftovers.
if pkg_spec['package_latest_leftovers']:
if name not in pkg_spec['package_latest_leftovers']:
module.debug("package_present(): ignoring '%s' which is not a package_latest() leftover" % name)
continue
else:
module.debug("package_present(): handling package_latest() leftovers, installing '%s'" % name)
if module.check_mode:
install_cmd = 'pkg_add -Imn'
else:
if build is True:
port_dir = "%s/%s" % (module.params['ports_dir'], get_package_source_path(name, pkg_spec, module))
if os.path.isdir(port_dir):
if pkg_spec[name]['flavor']:
flavors = pkg_spec[name]['flavor'].replace('-', ' ')
install_cmd = "cd %s && make clean=depends && FLAVOR=\"%s\" make install && make clean=depends" % (port_dir, flavors)
elif pkg_spec[name]['subpackage']:
install_cmd = "cd %s && make clean=depends && SUBPACKAGE=\"%s\" make install && make clean=depends" % (port_dir,
pkg_spec[name]['subpackage'])
else:
install_cmd = "cd %s && make install && make clean=depends" % (port_dir)
else:
module.fail_json(msg="the port source directory %s does not exist" % (port_dir))
else:
install_cmd = 'pkg_add -Im'
if pkg_spec[name]['installed_state'] is False:
# Attempt to install the package
if build is True and not module.check_mode:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = module.run_command(install_cmd, module, use_unsafe_shell=True)
else:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (install_cmd, name), module)
# The behaviour of pkg_add is a bit different depending on if a
# specific version is supplied or not.
#
# When a specific version is supplied the return code will be 0 when
# a package is found and 1 when it is not. If a version is not
# supplied the tool will exit 0 in both cases.
#
# It is important to note that "version" relates to the
# packages-specs(7) notion of a version. If using the branch syntax
# (like "python%3.5") even though a branch name may look like a
# version string it is not used an one by pkg_add.
if pkg_spec[name]['version'] or build is True:
# Depend on the return code.
module.debug("package_present(): depending on return code for name '%s'" % name)
if pkg_spec[name]['rc']:
pkg_spec[name]['changed'] = False
else:
# Depend on stderr instead.
module.debug("package_present(): depending on stderr for name '%s'" % name)
if pkg_spec[name]['stderr']:
# There is a corner case where having an empty directory in
# installpath prior to the right location will result in a
# "file:/local/package/directory/ is empty" message on stderr
# while still installing the package, so we need to look for
# for a message like "packagename-1.0: ok" just in case.
match = re.search("\W%s-[^:]+: ok\W" % pkg_spec[name]['stem'], pkg_spec[name]['stdout'])
if match:
# It turns out we were able to install the package.
module.debug("package_present(): we were able to install package for name '%s'" % name)
else:
# We really did fail, fake the return code.
module.debug("package_present(): we really did fail for name '%s'" % name)
pkg_spec[name]['rc'] = 1
pkg_spec[name]['changed'] = False
else:
module.debug("package_present(): stderr was not set for name '%s'" % name)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to make sure a package is the latest available version.
def package_latest(names, pkg_spec, module):
if module.params['build'] is True:
module.fail_json(msg="the combination of build=%s and state=latest is not supported" % module.params['build'])
upgrade_cmd = 'pkg_add -um'
if module.check_mode:
upgrade_cmd += 'n'
if module.params['clean']:
upgrade_cmd += 'c'
if module.params['quick']:
upgrade_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to upgrade the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (upgrade_cmd, name), module)
# Look for output looking something like "nmap-6.01->6.25: ok" to see if
# something changed (or would have changed). Use \W to delimit the match
# from progress meter output.
pkg_spec[name]['changed'] = False
for installed_name in pkg_spec[name]['installed_names']:
module.debug("package_latest(): checking for pre-upgrade package name: %s" % installed_name)
match = re.search("\W%s->.+: ok\W" % installed_name, pkg_spec[name]['stdout'])
if match:
module.debug("package_latest(): pre-upgrade package name match: %s" % installed_name)
pkg_spec[name]['changed'] = True
break
# FIXME: This part is problematic. Based on the issues mentioned (and
# handled) in package_present() it is not safe to blindly trust stderr
# as an indicator that the command failed, and in the case with
# empty installpath directories this will break.
#
# For now keep this safeguard here, but ignore it if we managed to
# parse out a successful update above. This way we will report a
# successful run when we actually modify something but fail
# otherwise.
if pkg_spec[name]['changed'] is not True:
if pkg_spec[name]['stderr']:
pkg_spec[name]['rc'] = 1
else:
# Note packages that need to be handled by package_present
module.debug("package_latest(): package '%s' is not installed, will be handled by package_present()" % name)
pkg_spec['package_latest_leftovers'].append(name)
# If there were any packages that were not installed we call
# package_present() which will handle those.
if pkg_spec['package_latest_leftovers']:
module.debug("package_latest(): calling package_present() to handle leftovers")
package_present(names, pkg_spec, module)
# Function used to make sure a package is not installed.
def package_absent(names, pkg_spec, module):
remove_cmd = 'pkg_delete -I'
if module.check_mode:
remove_cmd += 'n'
if module.params['clean']:
remove_cmd += 'c'
if module.params['quick']:
remove_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to remove the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (remove_cmd, name), module)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['changed'] = False
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to parse the package name based on packages-specs(7).
# The general name structure is "stem-version[-flavors]".
#
# Names containing "%" are a special variation not part of the
# packages-specs(7) syntax. See pkg_add(1) on OpenBSD 6.0 or later for a
# description.
def parse_package_name(names, pkg_spec, module):
# Initialize empty list of package_latest() leftovers.
pkg_spec['package_latest_leftovers'] = []
for name in names:
module.debug("parse_package_name(): parsing name: %s" % name)
# Do some initial matches so we can base the more advanced regex on that.
version_match = re.search("-[0-9]", name)
versionless_match = re.search("--", name)
# Stop if someone is giving us a name that both has a version and is
# version-less at the same time.
if version_match and versionless_match:
module.fail_json(msg="package name both has a version and is version-less: " + name)
# All information for a given name is kept in the pkg_spec keyed by that name.
pkg_spec[name] = {}
# If name includes a version.
if version_match:
match = re.search("^(?P<stem>[^%]+)-(?P<version>[0-9][^-]*)(?P<flavor_separator>-)?(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = match.group('version')
pkg_spec[name]['flavor_separator'] = match.group('flavor_separator')
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'version'
module.debug("version_match: stem: %s, version: %s, flavor_separator: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['version'],
pkg_spec[name]['flavor_separator'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at version_match: " + name)
# If name includes no version but is version-less ("--").
elif versionless_match:
match = re.search("^(?P<stem>[^%]+)--(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = '-'
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'versionless'
module.debug("versionless_match: stem: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at versionless_match: " + name)
# If name includes no version, and is not version-less, it is all a
# stem, possibly with a branch (%branchname) tacked on at the
# end.
else:
match = re.search("^(?P<stem>[^%]+)(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = None
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = None
pkg_spec[name]['flavor'] = None
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'stem'
module.debug("stem_match: stem: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at else: " + name)
# Verify that the managed host is new enough to support branch syntax.
if pkg_spec[name]['branch']:
branch_release = "6.0"
if StrictVersion(platform.release()) < StrictVersion(branch_release):
module.fail_json(msg="package name using 'branch' syntax requires at least OpenBSD %s: %s" % (branch_release, name))
# Sanity check that there are no trailing dashes in flavor.
# Try to stop strange stuff early so we can be strict later.
if pkg_spec[name]['flavor']:
match = re.search("-$", pkg_spec[name]['flavor'])
if match:
module.fail_json(msg="trailing dash in flavor: " + pkg_spec[name]['flavor'])
# Function used for figuring out the port path.
def get_package_source_path(name, pkg_spec, module):
pkg_spec[name]['subpackage'] = None
if pkg_spec[name]['stem'] == 'sqlports':
return 'databases/sqlports'
else:
# try for an exact match first
sqlports_db_file = '/usr/local/share/sqlports'
if not os.path.isfile(sqlports_db_file):
module.fail_json(msg="sqlports file '%s' is missing" % sqlports_db_file)
conn = sqlite3.connect(sqlports_db_file)
first_part_of_query = 'SELECT fullpkgpath, fullpkgname FROM ports WHERE fullpkgname'
query = first_part_of_query + ' = ?'
module.debug("package_package_source_path(): exact query: %s" % query)
cursor = conn.execute(query, (name,))
results = cursor.fetchall()
# next, try for a fuzzier match
if len(results) < 1:
looking_for = pkg_spec[name]['stem'] + (pkg_spec[name]['version_separator'] or '-') + (pkg_spec[name]['version'] or '%')
query = first_part_of_query + ' LIKE ?'
if pkg_spec[name]['flavor']:
looking_for += pkg_spec[name]['flavor_separator'] + pkg_spec[name]['flavor']
module.debug("package_package_source_path(): fuzzy flavor query: %s" % query)
cursor = conn.execute(query, (looking_for,))
elif pkg_spec[name]['style'] == 'versionless':
query += ' AND fullpkgname NOT LIKE ?'
module.debug("package_package_source_path(): fuzzy versionless query: %s" % query)
cursor = conn.execute(query, (looking_for, "%s-%%" % looking_for,))
else:
module.debug("package_package_source_path(): fuzzy query: %s" % query)
cursor = conn.execute(query, (looking_for,))
results = cursor.fetchall()
# error if we don't find exactly 1 match
conn.close()
if len(results) < 1:
module.fail_json(msg="could not find a port by the name '%s'" % name)
if len(results) > 1:
matches = map(lambda x:x[1], results)
module.fail_json(msg="too many matches, unsure which to build: %s" % ' OR '.join(matches))
# there's exactly 1 match, so figure out the subpackage, if any, then return
fullpkgpath = results[0][0]
parts = fullpkgpath.split(',')
if len(parts) > 1 and parts[1][0] == '-':
pkg_spec[name]['subpackage'] = parts[1]
return parts[0]
# Function used for upgrading all installed packages.
def upgrade_packages(pkg_spec, module):
if module.check_mode:
upgrade_cmd = 'pkg_add -Imnu'
else:
upgrade_cmd = 'pkg_add -Imu'
# Create a minimal pkg_spec entry for '*' to store return values.
pkg_spec['*'] = {}
# Attempt to upgrade all packages.
pkg_spec['*']['rc'], pkg_spec['*']['stdout'], pkg_spec['*']['stderr'] = execute_command("%s" % upgrade_cmd, module)
# Try to find any occurrence of a package changing version like:
# "bzip2-1.0.6->1.0.6p0: ok".
match = re.search("\W\w.+->.+: ok\W", pkg_spec['*']['stdout'])
if match:
pkg_spec['*']['changed'] = True
else:
pkg_spec['*']['changed'] = False
# It seems we can not trust the return value, so depend on the presence of
# stderr to know if something failed.
if pkg_spec['*']['stderr']:
pkg_spec['*']['rc'] = 1
else:
pkg_spec['*']['rc'] = 0
# ===========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, type='list'),
state = dict(required=True, choices=['absent', 'installed', 'latest', 'present', 'removed']),
build = dict(default='no', type='bool'),
ports_dir = dict(default='/usr/ports'),
quick = dict(default='no', type='bool'),<|fim▁hole|> clean = dict(default='no', type='bool')
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
build = module.params['build']
ports_dir = module.params['ports_dir']
rc = 0
stdout = ''
stderr = ''
result = {}
result['name'] = name
result['state'] = state
result['build'] = build
# The data structure used to keep track of package information.
pkg_spec = {}
if build is True:
if not os.path.isdir(ports_dir):
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
# build sqlports if its not installed yet
parse_package_name(['sqlports'], pkg_spec, module)
get_package_state(['sqlports'], pkg_spec, module)
if not pkg_spec['sqlports']['installed_state']:
module.debug("main(): installing 'sqlports' because build=%s" % module.params['build'])
package_present(['sqlports'], pkg_spec, module)
asterisk_name = False
for n in name:
if n == '*':
if len(name) != 1:
module.fail_json(msg="the package name '*' can not be mixed with other names")
asterisk_name = True
if asterisk_name:
if state != 'latest':
module.fail_json(msg="the package name '*' is only valid when using state=latest")
else:
# Perform an upgrade of all installed packages.
upgrade_packages(pkg_spec, module)
else:
# Parse package names and put results in the pkg_spec dictionary.
parse_package_name(name, pkg_spec, module)
# Not sure how the branch syntax is supposed to play together
# with build mode. Disable it for now.
for n in name:
if pkg_spec[n]['branch'] and module.params['build'] is True:
module.fail_json(msg="the combination of 'branch' syntax and build=%s is not supported: %s" % (module.params['build'], n))
# Get state for all package names.
get_package_state(name, pkg_spec, module)
# Perform requested action.
if state in ['installed', 'present']:
package_present(name, pkg_spec, module)
elif state in ['absent', 'removed']:
package_absent(name, pkg_spec, module)
elif state == 'latest':
package_latest(name, pkg_spec, module)
# The combined changed status for all requested packages. If anything
# is changed this is set to True.
combined_changed = False
# We combine all error messages in this comma separated string, for example:
# "msg": "Can't find nmapp\n, Can't find nmappp\n"
combined_error_message = ''
# Loop over all requested package names and check if anything failed or
# changed.
for n in name:
if pkg_spec[n]['rc'] != 0:
if pkg_spec[n]['stderr']:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stderr']
else:
combined_error_message = pkg_spec[n]['stderr']
else:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stdout']
else:
combined_error_message = pkg_spec[n]['stdout']
if pkg_spec[n]['changed'] is True:
combined_changed = True
# If combined_error_message contains anything at least some part of the
# list of requested package names failed.
if combined_error_message:
module.fail_json(msg=combined_error_message)
result['changed'] = combined_changed
module.exit_json(**result)
# Import module snippets.
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>rpcmining.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "main.h"
#include "db.h"
#include "init.h"
#include "bitcoinrpc.h"
using namespace json_spirit;
using namespace std;
// Return average network hashes per second based on the last 'lookup' blocks,
// or from the last difficulty change if 'lookup' is nonpositive.
// If 'height' is nonnegative, compute the estimate at the time when a given block was found.
Value GetNetworkHashPS(int lookup, int height) {
CBlockIndex *pb = pindexBest;
if (height >= 0 && height < nBestHeight)
pb = FindBlockByHeight(height);
if (pb == NULL || !pb->nHeight)
return 0;
// If lookup is -1, then use blocks since last difficulty change.
if (lookup <= 0)
lookup = pb->nHeight % 2016 + 1;
// If lookup is larger than chain, then set it to chain length.
if (lookup > pb->nHeight)
lookup = pb->nHeight;
CBlockIndex *pb0 = pb;
int64 minTime = pb0->GetBlockTime();
int64 maxTime = minTime;
for (int i = 0; i < lookup; i++) {
pb0 = pb0->pprev;
int64 time = pb0->GetBlockTime();
minTime = std::min(time, minTime);
maxTime = std::max(time, maxTime);
}
// In case there's a situation where minTime == maxTime, we don't want a divide by zero exception.
if (minTime == maxTime)
return 0;
uint256 workDiff = pb->nChainWork - pb0->nChainWork;
int64 timeDiff = maxTime - minTime;
return (boost::int64_t)(workDiff.getdouble() / timeDiff);
}
Value getnetworkhashps(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getnetworkhashps [blocks] [height]\n"
"Returns the estimated network hashes per second based on the last 120 blocks.\n"
"Pass in [blocks] to override # of blocks, -1 specifies since last difficulty change.\n"
"Pass in [height] to estimate the network speed at the time when a certain block was found.");
return GetNetworkHashPS(params.size() > 0 ? params[0].get_int() : 120, params.size() > 1 ? params[1].get_int() : -1);
}
// Key used by getwork/getblocktemplate miners.
// Allocated in InitRPCMining, free'd in ShutdownRPCMining
static CReserveKey* pMiningKey = NULL;
void InitRPCMining()
{
if (!pwalletMain)
return;
// getwork/getblocktemplate mining rewards paid here:
pMiningKey = new CReserveKey(pwalletMain);
}
void ShutdownRPCMining()
{
if (!pMiningKey)
return;
delete pMiningKey; pMiningKey = NULL;
}
Value getgenerate(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getgenerate\n"
"Returns true or false.");
if (!pMiningKey)
return false;
return GetBoolArg("-gen");
}
Value setgenerate(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"setgenerate <generate> [genproclimit]\n"
"<generate> is true or false to turn generation on or off.\n"
"Generation is limited to [genproclimit] processors, -1 is unlimited.");
bool fGenerate = true;
if (params.size() > 0)
fGenerate = params[0].get_bool();
if (params.size() > 1)
{
int nGenProcLimit = params[1].get_int();
mapArgs["-genproclimit"] = itostr(nGenProcLimit);
if (nGenProcLimit == 0)
fGenerate = false;
}
mapArgs["-gen"] = (fGenerate ? "1" : "0");
assert(pwalletMain != NULL);
GenerateBitcoins(fGenerate, pwalletMain);
return Value::null;
}
Value gethashespersec(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"gethashespersec\n"
"Returns a recent hashes per second performance measurement while generating.");
if (GetTimeMillis() - nHPSTimerStart > 8000)
return (boost::int64_t)0;
return (boost::int64_t)dHashesPerSec;
}
Value getmininginfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getmininginfo\n"
"Returns an object containing mining-related information.");
Object obj;
obj.push_back(Pair("blocks", (int)nBestHeight));
obj.push_back(Pair("currentblocksize",(uint64_t)nLastBlockSize));
obj.push_back(Pair("currentblocktx",(uint64_t)nLastBlockTx));
obj.push_back(Pair("difficulty", (double)GetDifficulty()));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
obj.push_back(Pair("generate", GetBoolArg("-gen")));
obj.push_back(Pair("genproclimit", (int)GetArg("-genproclimit", -1)));
obj.push_back(Pair("hashespersec", gethashespersec(params, false)));
obj.push_back(Pair("networkhashps", getnetworkhashps(params, false)));
obj.push_back(Pair("pooledtx", (uint64_t)mempool.size()));
obj.push_back(Pair("testnet", fTestNet));
return obj;
}
Value getworkex(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getworkex [data, coinbase]\n"
"If [data, coinbase] is not specified, returns extended work data.\n"
);
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Riestercoin is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Riestercoin is downloading blocks...");
typedef map<uint256, pair<CBlock*, CScript> > mapNewBlock_t;
static mapNewBlock_t mapNewBlock; // FIXME: thread safety
static vector<CBlockTemplate*> vNewBlockTemplate;
static CReserveKey reservekey(pwalletMain);
if (params.size() == 0)
{
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60))
{
if (pindexPrev != pindexBest)
{
// Deallocate old blocks since they're obsolete now
mapNewBlock.clear();
BOOST_FOREACH(CBlockTemplate* pblocktemplate, vNewBlockTemplate)
delete pblocktemplate;
vNewBlockTemplate.clear();
}
// Clear pindexPrev so future getworks make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
pblocktemplate = CreateNewBlockWithKey(*pMiningKey);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
vNewBlockTemplate.push_back(pblocktemplate);
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
// Update nExtraNonce
static unsigned int nExtraNonce = 0;
IncrementExtraNonce(pblock, pindexPrev, nExtraNonce);
// Save
mapNewBlock[pblock->hashMerkleRoot] = make_pair(pblock, pblock->vtx[0].vin[0].scriptSig);
// Pre-build hash buffers
char pmidstate[32];
char pdata[128];
char phash1[64];
FormatHashBuffers(pblock, pmidstate, pdata, phash1);
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
CTransaction coinbaseTx = pblock->vtx[0];
std::vector<uint256> merkle = pblock->GetMerkleBranch(0);
Object result;
result.push_back(Pair("data", HexStr(BEGIN(pdata), END(pdata))));
result.push_back(Pair("target", HexStr(BEGIN(hashTarget), END(hashTarget))));
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << coinbaseTx;
result.push_back(Pair("coinbase", HexStr(ssTx.begin(), ssTx.end())));
Array merkle_arr;
BOOST_FOREACH(uint256 merkleh, merkle) {
printf("%s\n", merkleh.ToString().c_str());
merkle_arr.push_back(HexStr(BEGIN(merkleh), END(merkleh)));
}
result.push_back(Pair("merkle", merkle_arr));
return result;
}
else
{
// Parse parameters
vector<unsigned char> vchData = ParseHex(params[0].get_str());
vector<unsigned char> coinbase;
if(params.size() == 2)
coinbase = ParseHex(params[1].get_str());
if (vchData.size() != 128)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
CBlock* pdata = (CBlock*)&vchData[0];
<|fim▁hole|> // Byte reverse
for (int i = 0; i < 128/4; i++)
((unsigned int*)pdata)[i] = ByteReverse(((unsigned int*)pdata)[i]);
// Get saved block
if (!mapNewBlock.count(pdata->hashMerkleRoot))
return false;
CBlock* pblock = mapNewBlock[pdata->hashMerkleRoot].first;
pblock->nTime = pdata->nTime;
pblock->nNonce = pdata->nNonce;
if(coinbase.size() == 0)
pblock->vtx[0].vin[0].scriptSig = mapNewBlock[pdata->hashMerkleRoot].second;
else
CDataStream(coinbase, SER_NETWORK, PROTOCOL_VERSION) >> pblock->vtx[0];
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
return CheckWork(pblock, *pwalletMain, reservekey);
}
}
Value getwork(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getwork [data]\n"
"If [data] is not specified, returns formatted hash data to work on:\n"
" \"midstate\" : precomputed hash state after hashing the first half of the data (DEPRECATED)\n" // deprecated
" \"data\" : block data\n"
" \"hash1\" : formatted hash buffer for second hash (DEPRECATED)\n" // deprecated
" \"target\" : little endian hash target\n"
"If [data] is specified, tries to solve the block and returns true if it was successful.");
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Riestercoin is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Riestercoin is downloading blocks...");
typedef map<uint256, pair<CBlock*, CScript> > mapNewBlock_t;
static mapNewBlock_t mapNewBlock; // FIXME: thread safety
static vector<CBlockTemplate*> vNewBlockTemplate;
if (params.size() == 0)
{
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60))
{
if (pindexPrev != pindexBest)
{
// Deallocate old blocks since they're obsolete now
mapNewBlock.clear();
BOOST_FOREACH(CBlockTemplate* pblocktemplate, vNewBlockTemplate)
delete pblocktemplate;
vNewBlockTemplate.clear();
}
// Clear pindexPrev so future getworks make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
pblocktemplate = CreateNewBlockWithKey(*pMiningKey);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
vNewBlockTemplate.push_back(pblocktemplate);
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
// Update nExtraNonce
static unsigned int nExtraNonce = 0;
IncrementExtraNonce(pblock, pindexPrev, nExtraNonce);
// Save
mapNewBlock[pblock->hashMerkleRoot] = make_pair(pblock, pblock->vtx[0].vin[0].scriptSig);
// Pre-build hash buffers
char pmidstate[32];
char pdata[128];
char phash1[64];
FormatHashBuffers(pblock, pmidstate, pdata, phash1);
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
Object result;
result.push_back(Pair("midstate", HexStr(BEGIN(pmidstate), END(pmidstate)))); // deprecated
result.push_back(Pair("data", HexStr(BEGIN(pdata), END(pdata))));
result.push_back(Pair("hash1", HexStr(BEGIN(phash1), END(phash1)))); // deprecated
result.push_back(Pair("target", HexStr(BEGIN(hashTarget), END(hashTarget))));
return result;
}
else
{
// Parse parameters
vector<unsigned char> vchData = ParseHex(params[0].get_str());
if (vchData.size() != 128)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
CBlock* pdata = (CBlock*)&vchData[0];
// Byte reverse
for (int i = 0; i < 128/4; i++)
((unsigned int*)pdata)[i] = ByteReverse(((unsigned int*)pdata)[i]);
// Get saved block
if (!mapNewBlock.count(pdata->hashMerkleRoot))
return false;
CBlock* pblock = mapNewBlock[pdata->hashMerkleRoot].first;
pblock->nTime = pdata->nTime;
pblock->nNonce = pdata->nNonce;
pblock->vtx[0].vin[0].scriptSig = mapNewBlock[pdata->hashMerkleRoot].second;
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
assert(pwalletMain != NULL);
return CheckWork(pblock, *pwalletMain, *pMiningKey);
}
}
Value getblocktemplate(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getblocktemplate [params]\n"
"Returns data needed to construct a block to work on:\n"
" \"version\" : block version\n"
" \"previousblockhash\" : hash of current highest block\n"
" \"transactions\" : contents of non-coinbase transactions that should be included in the next block\n"
" \"coinbaseaux\" : data that should be included in coinbase\n"
" \"coinbasevalue\" : maximum allowable input to coinbase transaction, including the generation award and transaction fees\n"
" \"target\" : hash target\n"
" \"mintime\" : minimum timestamp appropriate for next block\n"
" \"curtime\" : current timestamp\n"
" \"mutable\" : list of ways the block template may be changed\n"
" \"noncerange\" : range of valid nonces\n"
" \"sigoplimit\" : limit of sigops in blocks\n"
" \"sizelimit\" : limit of block size\n"
" \"bits\" : compressed target of next block\n"
" \"height\" : height of the next block\n"
"See https://en.bitcoin.it/wiki/BIP_0022 for full specification.");
std::string strMode = "template";
if (params.size() > 0)
{
const Object& oparam = params[0].get_obj();
const Value& modeval = find_value(oparam, "mode");
if (modeval.type() == str_type)
strMode = modeval.get_str();
else if (modeval.type() == null_type)
{
/* Do nothing */
}
else
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid mode");
}
if (strMode != "template")
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid mode");
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Riestercoin is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Riestercoin is downloading blocks...");
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 5))
{
// Clear pindexPrev so future calls make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
if(pblocktemplate)
{
delete pblocktemplate;
pblocktemplate = NULL;
}
CScript scriptDummy = CScript() << OP_TRUE;
pblocktemplate = CreateNewBlock(scriptDummy);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
Array transactions;
map<uint256, int64_t> setTxIndex;
int i = 0;
BOOST_FOREACH (CTransaction& tx, pblock->vtx)
{
uint256 txHash = tx.GetHash();
setTxIndex[txHash] = i++;
if (tx.IsCoinBase())
continue;
Object entry;
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << tx;
entry.push_back(Pair("data", HexStr(ssTx.begin(), ssTx.end())));
entry.push_back(Pair("hash", txHash.GetHex()));
Array deps;
BOOST_FOREACH (const CTxIn &in, tx.vin)
{
if (setTxIndex.count(in.prevout.hash))
deps.push_back(setTxIndex[in.prevout.hash]);
}
entry.push_back(Pair("depends", deps));
int index_in_template = i - 1;
entry.push_back(Pair("fee", pblocktemplate->vTxFees[index_in_template]));
entry.push_back(Pair("sigops", pblocktemplate->vTxSigOps[index_in_template]));
transactions.push_back(entry);
}
Object aux;
aux.push_back(Pair("flags", HexStr(COINBASE_FLAGS.begin(), COINBASE_FLAGS.end())));
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
static Array aMutable;
if (aMutable.empty())
{
aMutable.push_back("time");
aMutable.push_back("transactions");
aMutable.push_back("prevblock");
}
Object result;
result.push_back(Pair("version", pblock->nVersion));
result.push_back(Pair("previousblockhash", pblock->hashPrevBlock.GetHex()));
result.push_back(Pair("transactions", transactions));
result.push_back(Pair("coinbaseaux", aux));
result.push_back(Pair("coinbasevalue", (int64_t)pblock->vtx[0].vout[0].nValue));
result.push_back(Pair("target", hashTarget.GetHex()));
result.push_back(Pair("mintime", (int64_t)pindexPrev->GetMedianTimePast()+1));
result.push_back(Pair("mutable", aMutable));
result.push_back(Pair("noncerange", "00000000ffffffff"));
result.push_back(Pair("sigoplimit", (int64_t)MAX_BLOCK_SIGOPS));
result.push_back(Pair("sizelimit", (int64_t)MAX_BLOCK_SIZE));
result.push_back(Pair("curtime", (int64_t)pblock->nTime));
result.push_back(Pair("bits", HexBits(pblock->nBits)));
result.push_back(Pair("height", (int64_t)(pindexPrev->nHeight+1)));
return result;
}
Value submitblock(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"submitblock <hex data> [optional-params-obj]\n"
"[optional-params-obj] parameter is currently ignored.\n"
"Attempts to submit new block to network.\n"
"See https://en.bitcoin.it/wiki/BIP_0022 for full specification.");
vector<unsigned char> blockData(ParseHex(params[0].get_str()));
CDataStream ssBlock(blockData, SER_NETWORK, PROTOCOL_VERSION);
CBlock pblock;
try {
ssBlock >> pblock;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block decode failed");
}
CValidationState state;
bool fAccepted = ProcessBlock(state, NULL, &pblock);
if (!fAccepted)
return "rejected"; // TODO: report validation state
return Value::null;
}<|fim▁end|>
| |
<|file_name|>concordance.py<|end_file_name|><|fim▁begin|># System
"""Concordance Checking."""
import logging
import os
import functools
# Third Party
from sklearn.cluster import MiniBatchKMeans
from scipy.stats import ks_2samp
import numpy as np
import pandas as pd
# First Party
from submission_criteria import common
def has_concordance(P1, P2, P3, c1, c2, c3, threshold=0.12):
"""Checks that the clustered submission data conforms to a concordance threshold
Paramters:
----------
P1 : ndarray
Sorted validation submission probabilities based on the id
P2 : ndarray
Sorted test submission probabilities based on the id
P3 : ndarray
Sorted live submission probabilities based on the id
c1 : ndarray
Clustered validation from the tournament data
c2 : ndarray
Clustered test from the tournament data
c3 : ndarray
Clustered live from the tournament data
threshold : float, optional, default: 0.12
The threshold in which our mean ks_score has to be under to have "concordance"
Returns:
--------
concordance : bool
Boolean value of the clustered submission data having concordance
"""
ks = []
for i in set(c1):
ks_score = max(
ks_2samp(P1.reshape(-1)[c1 == i],
P2.reshape(-1)[c2 == i])[0],
ks_2samp(P1.reshape(-1)[c1 == i],
P3.reshape(-1)[c3 == i])[0],
ks_2samp(P3.reshape(-1)[c3 == i],
P2.reshape(-1)[c2 == i])[0])
ks.append(ks_score)
logging.getLogger().info("Noticed score {}".format(np.mean(ks)))
return np.mean(ks) < threshold
def make_clusters(X, X_1, X_2, X_3):
"""Split submission data into 3 clusters using K-Means clustering
Parameters:
-----------
X: ndarray
tournament data for the competition round
X_1: ndarray
sorted validation data ids from tournament data
X_2: ndarray
sorted test ids data from tournament data
X_3: ndarray
sorted live ids data from tournament data
Returns:
--------
c1: nparray
Clustered validation data
c2: nparray
Clustered test data
c3: nparray
Cluster live data
"""
logging.getLogger().info("New competition, clustering dataset")
kmeans = MiniBatchKMeans(n_clusters=5, random_state=1337)
kmeans.fit(X)
c1, c2, c3 = kmeans.predict(X_1), kmeans.predict(X_2), kmeans.predict(X_3)
logging.getLogger().info("Finished clustering")
return c1, c2, c3
@functools.lru_cache(maxsize=2)
def get_ids(filemanager, tournament_number, round_number):
"""Gets the ids from submission data based on the round_number
Parameters:
-----------
filemanager : FileManager
S3 Bucket data access object for querying competition datasets
round_number : int
The numerical id of the competition
Returns:
--------
val : list
List of all ids in the 'validation' dataset
test : list
List of all ids in the 'test' dataset
live : list
List of all ids in the 'live' dataset
"""
extract_dir = filemanager.download_dataset(tournament_number, round_number)
tournament = pd.read_csv(
os.path.join(extract_dir, "numerai_tournament_data.csv"))
val = tournament[tournament["data_type"] == "validation"]
test = tournament[tournament["data_type"] == "test"]
live = tournament[tournament["data_type"] == "live"]
return list(val["id"]), list(test["id"]), list(live["id"])
def get_sorted_split(data, val_ids, test_ids, live_ids):
"""Split the competition data into validation, test, and live data sets in a sorted fashion
Parameters:
-----------
data : DataFrame
Tournament data for the competition round
val_ids : list
List of all validation data ids
test_ids : list
List of all test data ids
live_ids : list
List of all live data ids
Returns:
--------
validation : ndarray
Validation data features sorted by id
test : ndarray
Test data features sorted by id
live : ndarray
Live data features sorted by id
"""
validation = data[data["id"].isin(val_ids)]
test = data[data["id"].isin(test_ids)]
live = data[data["id"].isin(live_ids)]
validation = validation.sort_values("id")
test = test.sort_values("id")
live = live.sort_values("id")
if any(["feature" in c for c in list(validation)]):
f = [c for c in list(validation) if "feature" in c]
else:
f = ["probability"]
validation = validation[f]
test = test[f]
live = live[f]
return validation.as_matrix(), test.as_matrix(), live.as_matrix()
@functools.lru_cache(maxsize=2)
def get_competition_variables(tournament_number, round_number, filemanager):
"""Return the K-Means Clustered tournament data for the competition round
Parameters:
-----------
round_id : string
UUID of the competition round of the tournament
db_manager : DatabaseManager
DB data access object that has read and write functions to NoSQL DB
filemanager : FileManager
S3 Bucket data access object for querying competition datasets
Returns:
--------
variables : dictionary
Holds clustered tournament data and the round_number
"""
extract_dir = filemanager.download_dataset(tournament_number, round_number)
training = pd.read_csv(
os.path.join(extract_dir, "numerai_training_data.csv"))
tournament = pd.read_csv(
os.path.join(extract_dir, "numerai_tournament_data.csv"))
val_ids, test_ids, live_ids = get_ids(filemanager, tournament_number,
round_number)
return get_competition_variables_from_df(
round_number, training, tournament, val_ids, test_ids, live_ids)
def get_competition_variables_from_df(
round_number: str, training: pd.DataFrame, tournament: pd.DataFrame,
val_ids: list, test_ids: list, live_ids: list) -> dict:
f = [c for c in list(tournament) if "feature" in c]
# TODO the dropna is a hack workaround for https://github.com/numerai/api-ml/issues/68
X = training[f].dropna().as_matrix()
X = np.append(X, tournament[f].as_matrix(), axis=0)
X_1, X_2, X_3 = get_sorted_split(tournament, val_ids, test_ids, live_ids)
c1, c2, c3 = make_clusters(X, X_1, X_2, X_3)
variables = {
"round_number": round_number,
"cluster_1": c1,
"cluster_2": c2,
"cluster_3": c3,
}
return variables
def get_submission_pieces(submission_id, tournament, round_number, db_manager,
filemanager):
"""Get validation, test, and live ids sorted from submission_id
Parameters:
-----------
submission_id : string
ID of the submission
round_number : int
Numerical ID of the competition round of the tournament
db_manager : DatabaseManager
DB data access object that has read and write functions to NoSQL DB
filemanager : FileManager
S3 Bucket data access object for querying competition datasets
Returns:
--------
validation : ndarray
Sorted validation ids from submission data
tests : ndarray
Sorted test ids from submission data
live : ndarray
Sorted live ids from submission data
"""
s3_file, _ = common.get_filename(db_manager.postgres_db, submission_id)
data = filemanager.read_csv(s3_file)
val_ids, test_ids, live_ids = get_ids(filemanager, tournament,
round_number)
validation, tests, live = get_sorted_split(data, val_ids, test_ids,
live_ids)
return validation, tests, live
def submission_concordance(submission, db_manager, filemanager):
"""Determine if a submission is concordant and write the result to DB
Parameters:
-----------
submission : dictionary
Submission data that holds the ids of submission and competition round
db_manager : DatabaseManager
DB data access object that has read and write functions to NoSQL DB
filemanager : FileManager
S3 Bucket data access object for querying competition datasets
"""
tournament, round_number, _dataset_path = common.get_round(
db_manager.postgres_db, submission["submission_id"])<|fim▁hole|> c1, c2, c3 = clusters["cluster_1"], clusters["cluster_2"], clusters[
"cluster_3"]
try:
concordance = has_concordance(P1, P2, P3, c1, c2, c3)
except IndexError:
# If we had an indexing error, that is because the round restart, and we need to try getting the new competition variables.
get_competition_variables.cache_clear()
clusters = get_competition_variables(tournament, round_number,
filemanager)
c1, c2, c3 = clusters["cluster_1"], clusters["cluster_2"], clusters[
"cluster_3"]
concordance = has_concordance(P1, P2, P3, c1, c2, c3)
print('writing concordance', submission['submission_id'], concordance)
db_manager.write_concordance(submission['submission_id'], concordance)<|fim▁end|>
|
clusters = get_competition_variables(tournament, round_number, filemanager)
P1, P2, P3 = get_submission_pieces(submission["submission_id"], tournament,
round_number, db_manager, filemanager)
|
<|file_name|>OpenGLLayer.cpp<|end_file_name|><|fim▁begin|>#include <algorithm>
#include <iostream>
#include "RustyFist/DrawMe.h"
#include "RustyFist/TouchSink.h"
#include "OpenGLLayer.h"
using namespace cocos2d;
using namespace std;
<|fim▁hole|>OpenGLLayer::~OpenGLLayer()
{
}
bool OpenGLLayer::init()
{
return Layer::init();
}
cocos2d::Scene* OpenGLLayer::scene(DrawMe* drawMe, TouchSink* ts)
{
auto scene = Scene::create();
OpenGLLayer *layer = OpenGLLayer::create();
layer->setDrawMe(drawMe);
layer->setTouchSink(ts);
scene->addChild(layer);
return scene;
}
void OpenGLLayer::draw(cocos2d::Renderer* renderer, const cocos2d::Mat4& transform, uint32_t flags)
{
if (_drawMe)
_drawMe->draw();
}
void OpenGLLayer::setTouchSink(TouchSink* ts)
{
_ts = ts;
auto mouseEvents = EventListenerMouse::create();
mouseEvents->onMouseDown = [this](Event* e)
{
if(auto me = dynamic_cast<EventMouse*>(e))
{
if(_ts)
_ts->mouse({me->getCursorX(), me->getCursorY()});
}
};
mouseEvents->onMouseUp = [this](Event* e)
{
if(auto me = dynamic_cast<EventMouse*>(e))
{
if(_ts)
_ts->mouse({me->getCursorX(), me->getCursorY()});
}
};
mouseEvents->onMouseMove = [this](Event* e)
{
if(auto me = dynamic_cast<EventMouse*>(e))
{
if(_ts)
_ts->mouse({me->getCursorX(), me->getCursorY()});
}
};
_eventDispatcher->addEventListenerWithSceneGraphPriority(mouseEvents, this);
}
void OpenGLLayer::onTouchesBegan(const std::vector<cocos2d::Touch*>& touches, cocos2d::Event* unused_event)
{
sendTouch(touches);
}
void OpenGLLayer::onTouchesMoved(const std::vector<cocos2d::Touch*>& touches, cocos2d::Event* unused_event)
{
sendTouch(touches);
}
void OpenGLLayer::onTouchesEnded(const std::vector<cocos2d::Touch*>& touches, cocos2d::Event* unused_event)
{
sendTouch(touches);
}
void OpenGLLayer::onTouchesCancelled(const std::vector<cocos2d::Touch*>& touches, cocos2d::Event* unused_event)
{
sendTouch(touches);
}
void OpenGLLayer::sendTouch(const std::vector<cocos2d::Touch*>& touches)
{
std::vector<::Touch> tochs;
std::transform(touches.begin(), touches.end(), back_inserter(tochs), [](cocos2d::Touch* t)
{
return ::Touch{t->getID(), t->getLocation().x, t->getLocation().y};
});
if(_ts)
_ts->touch(tochs);
}<|fim▁end|>
|
OpenGLLayer::OpenGLLayer()
{
}
|
<|file_name|>IndentOnTopLevel.after.py<|end_file_name|><|fim▁begin|>class C:
def foo(self):
x = 1
y = 2
x = 1<|fim▁hole|> pass<|fim▁end|>
|
def foo():
|
<|file_name|>timer.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
Synchronous Timers
This module exposes the functionality to create timers, block the current task,
and create receivers which will receive notifications after a period of time.
*/
use comm::Receiver;
use io::IoResult;
use kinds::Send;
use rt::rtio::{IoFactory, LocalIo, RtioTimer};
/// A synchronous timer object
///
/// Values of this type can be used to put the current task to sleep for a
/// period of time. Handles to this timer can also be created in the form of
/// receivers which will receive notifications over time.
///
/// # Example
///
/// ```
/// # fn main() {}
/// # fn foo() {
/// use std::io::Timer;
///
/// let mut timer = Timer::new().unwrap();
/// timer.sleep(10); // block the task for awhile
///
/// let timeout = timer.oneshot(10);
/// // do some work
/// timeout.recv(); // wait for the timeout to expire
///
/// let periodic = timer.periodic(10);
/// loop {
/// periodic.recv();
/// // this loop is only executed once every 10ms
/// }
/// # }
/// ```
///
/// If only sleeping is necessary, then a convenience api is provided through
/// the `io::timer` module.
///
/// ```
/// # fn main() {}
/// # fn foo() {
/// use std::io::timer;
///
/// // Put this task to sleep for 5 seconds
/// timer::sleep(5000);
/// # }
/// ```
pub struct Timer {
obj: ~RtioTimer:Send,
}
/// Sleep the current task for `msecs` milliseconds.
pub fn sleep(msecs: u64) {
let timer = Timer::new();
let mut timer = timer.ok().expect("timer::sleep: could not create a Timer");
timer.sleep(msecs)
}
impl Timer {
/// Creates a new timer which can be used to put the current task to sleep
/// for a number of milliseconds, or to possibly create channels which will
/// get notified after an amount of time has passed.
pub fn new() -> IoResult<Timer> {
LocalIo::maybe_raise(|io| io.timer_init().map(|t| Timer { obj: t }))
}
/// Blocks the current task for `msecs` milliseconds.
///
/// Note that this function will cause any other receivers for this timer to
/// be invalidated (the other end will be closed).
pub fn sleep(&mut self, msecs: u64) {
self.obj.sleep(msecs);
}
/// Creates a oneshot receiver which will have a notification sent when
/// `msecs` milliseconds has elapsed. This does *not* block the current
/// task, but instead returns immediately.
///
/// Note that this invalidates any previous receiver which has been created
/// by this timer, and that the returned receiver will be invalidated once
/// the timer is destroyed (when it falls out of scope).
pub fn oneshot(&mut self, msecs: u64) -> Receiver<()> {
self.obj.oneshot(msecs)
}
/// Creates a receiver which will have a continuous stream of notifications
/// being sent every `msecs` milliseconds. This does *not* block the
/// current task, but instead returns immediately. The first notification
/// will not be received immediately, but rather after `msec` milliseconds
/// have passed.
///
/// Note that this invalidates any previous receiver which has been created
/// by this timer, and that the returned receiver will be invalidated once
/// the timer is destroyed (when it falls out of scope).
pub fn periodic(&mut self, msecs: u64) -> Receiver<()> {
self.obj.period(msecs)
}
}
#[cfg(test)]
mod test {
iotest!(fn test_io_timer_sleep_simple() {
let mut timer = Timer::new().unwrap();
timer.sleep(1);
})
iotest!(fn test_io_timer_sleep_oneshot() {
let mut timer = Timer::new().unwrap();
timer.oneshot(1).recv();
})
iotest!(fn test_io_timer_sleep_oneshot_forget() {
let mut timer = Timer::new().unwrap();
timer.oneshot(100000000000);
})
iotest!(fn oneshot_twice() {
let mut timer = Timer::new().unwrap();
let rx1 = timer.oneshot(10000);
let rx = timer.oneshot(1);
rx.recv();
assert_eq!(rx1.recv_opt(), Err(()));
})
iotest!(fn test_io_timer_oneshot_then_sleep() {
let mut timer = Timer::new().unwrap();
let rx = timer.oneshot(100000000000);
timer.sleep(1); // this should inalidate rx
assert_eq!(rx.recv_opt(), Err(()));
})
iotest!(fn test_io_timer_sleep_periodic() {
let mut timer = Timer::new().unwrap();
let rx = timer.periodic(1);
rx.recv();
rx.recv();
rx.recv();
})
iotest!(fn test_io_timer_sleep_periodic_forget() {
let mut timer = Timer::new().unwrap();
timer.periodic(100000000000);
})
iotest!(fn test_io_timer_sleep_standalone() {
sleep(1)
})
iotest!(fn oneshot() {
let mut timer = Timer::new().unwrap();
let rx = timer.oneshot(1);
rx.recv();
assert!(rx.recv_opt().is_err());
let rx = timer.oneshot(1);
rx.recv();
assert!(rx.recv_opt().is_err());
})
iotest!(fn override() {
let mut timer = Timer::new().unwrap();
let orx = timer.oneshot(100);
let prx = timer.periodic(100);
timer.sleep(1);
assert_eq!(orx.recv_opt(), Err(()));
assert_eq!(prx.recv_opt(), Err(()));
timer.oneshot(1).recv();
})
iotest!(fn period() {
let mut timer = Timer::new().unwrap();
let rx = timer.periodic(1);
rx.recv();
rx.recv();
let rx2 = timer.periodic(1);
rx2.recv();
rx2.recv();
})
iotest!(fn sleep() {
let mut timer = Timer::new().unwrap();
timer.sleep(1);
timer.sleep(1);
})
iotest!(fn oneshot_fail() {
let mut timer = Timer::new().unwrap();
let _rx = timer.oneshot(1);
fail!();
} #[should_fail])
iotest!(fn period_fail() {
let mut timer = Timer::new().unwrap();
let _rx = timer.periodic(1);
fail!();
} #[should_fail])
iotest!(fn normal_fail() {
let _timer = Timer::new().unwrap();
fail!();
} #[should_fail])
iotest!(fn closing_channel_during_drop_doesnt_kill_everything() {
// see issue #10375
let mut timer = Timer::new().unwrap();
let timer_rx = timer.periodic(1000);
spawn(proc() {
let _ = timer_rx.recv_opt();
});
// when we drop the TimerWatcher we're going to destroy the channel,
// which must wake up the task on the other end
})
iotest!(fn reset_doesnt_switch_tasks() {
// similar test to the one above.
let mut timer = Timer::new().unwrap();
let timer_rx = timer.periodic(1000);
spawn(proc() {
let _ = timer_rx.recv_opt();
});
timer.oneshot(1);
})
iotest!(fn reset_doesnt_switch_tasks2() {
// similar test to the one above.
let mut timer = Timer::new().unwrap();
let timer_rx = timer.periodic(1000);
spawn(proc() {
let _ = timer_rx.recv_opt();
});
timer.sleep(1);
})
iotest!(fn sender_goes_away_oneshot() {
let rx = {
let mut timer = Timer::new().unwrap();
timer.oneshot(1000)<|fim▁hole|> iotest!(fn sender_goes_away_period() {
let rx = {
let mut timer = Timer::new().unwrap();
timer.periodic(1000)
};
assert_eq!(rx.recv_opt(), Err(()));
})
iotest!(fn receiver_goes_away_oneshot() {
let mut timer1 = Timer::new().unwrap();
timer1.oneshot(1);
let mut timer2 = Timer::new().unwrap();
// while sleeping, the prevous timer should fire and not have its
// callback do something terrible.
timer2.sleep(2);
})
iotest!(fn receiver_goes_away_period() {
let mut timer1 = Timer::new().unwrap();
timer1.periodic(1);
let mut timer2 = Timer::new().unwrap();
// while sleeping, the prevous timer should fire and not have its
// callback do something terrible.
timer2.sleep(2);
})
}<|fim▁end|>
|
};
assert_eq!(rx.recv_opt(), Err(()));
})
|
<|file_name|>game.js<|end_file_name|><|fim▁begin|>var score = 0;
var scoreText;
var map_x = 14;
var map_y = 10;
var game = new Phaser.Game(map_x * 16, map_y * 16, Phaser.AUTO, '', { preload: preload, create: create, update: update });
function preload() {
// game.scale.maxWidth = 600;
// game.scale.maxHeight = 600;
game.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL;
game.scale.setScreenSize();
game.load.image('cat', 'assets/cat.png', 16, 16);
game.load.image('ground', 'assets/darkfloor.png', 16, 16);
game.load.image('l_wall', 'assets/l_wall.png', 16, 16);
game.load.image('r_wall', 'assets/r_wall.png', 16, 16);
game.load.image('t_wall', 'assets/t_wall.png', 16, 16);
game.load.image('tr_wall', 'assets/tr_wall_iso.png', 16, 16);
game.load.image('tl_wall', 'assets/tl_wall_iso.png', 16, 16);
game.load.image('bl_wall', 'assets/bl_wall.png', 16, 16);
game.load.image('br_wall', 'assets/br_wall.png', 16, 16);
game.load.image('stone_door', 'assets/door_stone.png', 16, 16);
game.load.image('star', 'assets/star.png');
}
function create() {
game.physics.startSystem(Phaser.Physics.ARCADE);
// the game world
gmap = game.add.tilemap();
gmap.addTilesetImage('ground', 'ground', 16, 16, null, null, 0);
gmap.addTilesetImage('l_wall', 'l_wall', 16, 16, null, null, 1);
gmap.addTilesetImage('r_wall', 'r_wall', 16, 16, null, null, 2);
gmap.addTilesetImage('tr_wall', 'tr_wall', 16, 16, null, null, 3);
gmap.addTilesetImage('tl_wall', 'tl_wall', 16, 16, null, null, 4);
gmap.addTilesetImage('br_wall', 'br_wall', 16, 16, null, null, 5);
gmap.addTilesetImage('bl_wall', 'bl_wall', 16, 16, null, null, 6);
gmap.addTilesetImage('t_wall', 't_wall', 16, 16, null, null, 7);
gmap.addTilesetImage('stone_door', 'stone_door', 16, 16, null, null, 8);
ground_layer = gmap.create('ground_layer', map_x, map_y, 16, 16);
wall_layer = gmap.create('wall_layer', map_x, map_y, 16, 16);
for (var i=0; i<map_x; i++) {
for(var j=0; j<map_y; j++) {
if (i==0 && j==0) {
gmap.putTile(4, i, j, wall_layer);
} else if (i==map_x/2 && j==0) {
gmap.putTile(8, i, j, wall_layer);
} else if (i==map_x-1 && j == map_y-1) {
gmap.putTile(5, i, j, wall_layer);
} else if (i==0 && j == map_y-1) {
gmap.putTile(6, i, j, wall_layer);
} else if (i==map_x-1 && j == 0) {
gmap.putTile(3, i, j, wall_layer);
} else if (i==map_x-1 && j == map_y-1) {
gmap.putTile(6, i, j, wall_layer);
} else if (i==0) {
gmap.putTile(1, i, j, wall_layer);
} else if(i==map_x-1) {
gmap.putTile(2, i, j, wall_layer);
} else if(j==map_y-1) {
gmap.putTile(7, i, j, wall_layer);
} else if(j==0) {
gmap.putTile(7, i, j, wall_layer);
} else {
gmap.putTile(0, i, j, ground_layer);
}
}
}
wall_layer.resizeWorld();
game.physics.arcade.enable(wall_layer);
gmap.setCollision(wall_layer);
// the player
player = game.add.sprite(32, 32, 'cat');
game.physics.arcade.enable(player);
player.body.collideWorldBounds = true;
// gmap.setCollisionBetween(0, 100, true, wall_layer);
cursors = game.input.keyboard.createCursorKeys();
}
function update() {
game.physics.arcade.collide(player, wall_layer);
player.body.velocity.x = 0;
player.body.velocity.y = 0;
if (cursors.left.isDown) {
player.body.velocity.x = -150;
} else if (cursors.right.isDown) {
player.body.velocity.x = 150;
} else if (cursors.down.isDown) {
player.body.velocity.y = 150;
} else if (cursors.up.isDown) {
player.body.velocity.y = -150;
} else {<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>arithmetic_ast.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate nom;
use std::fmt;
use std::fmt::{Display, Debug, Formatter};
use std::str;
use std::str::FromStr;
use nom::{IResult, digit, multispace};
pub enum Expr {
Value(i64),
Add(Box<Expr>, Box<Expr>),
Sub(Box<Expr>, Box<Expr>),
Mul(Box<Expr>, Box<Expr>),
Div(Box<Expr>, Box<Expr>),
Paren(Box<Expr>),<|fim▁hole|> Sub,
Mul,
Div,
}
impl Display for Expr {
fn fmt(&self, format: &mut Formatter) -> fmt::Result {
use self::Expr::*;
match *self {
Value(val) => write!(format, "{}", val),
Add(ref left, ref right) => write!(format, "{} + {}", left, right),
Sub(ref left, ref right) => write!(format, "{} - {}", left, right),
Mul(ref left, ref right) => write!(format, "{} * {}", left, right),
Div(ref left, ref right) => write!(format, "{} / {}", left, right),
Paren(ref expr) => write!(format, "({})", expr),
}
}
}
impl Debug for Expr {
fn fmt(&self, format: &mut Formatter) -> fmt::Result {
use self::Expr::*;
match *self {
Value(val) => write!(format, "{}", val),
Add(ref left, ref right) => write!(format, "({:?} + {:?})", left, right),
Sub(ref left, ref right) => write!(format, "({:?} - {:?})", left, right),
Mul(ref left, ref right) => write!(format, "({:?} * {:?})", left, right),
Div(ref left, ref right) => write!(format, "({:?} / {:?})", left, right),
Paren(ref expr) => write!(format, "[{:?}]", expr),
}
}
}
named!(parens< Expr >, delimited!(
delimited!(opt!(multispace), tag!("("), opt!(multispace)),
map!(map!(expr, Box::new), Expr::Paren),
delimited!(opt!(multispace), tag!(")"), opt!(multispace))
)
);
named!(factor< Expr >, alt_complete!(
map!(
map_res!(
map_res!(
delimited!(opt!(multispace), digit, opt!(multispace)),
str::from_utf8
),
FromStr::from_str
),
Expr::Value)
| parens
)
);
fn fold_exprs(initial: Expr, remainder: Vec<(Oper, Expr)>) -> Expr {
remainder.into_iter().fold(initial, |acc, pair| {
let (oper, expr) = pair;
match oper {
Oper::Add => Expr::Add(Box::new(acc), Box::new(expr)),
Oper::Sub => Expr::Sub(Box::new(acc), Box::new(expr)),
Oper::Mul => Expr::Mul(Box::new(acc), Box::new(expr)),
Oper::Div => Expr::Div(Box::new(acc), Box::new(expr)),
}
})
}
named!(term< Expr >, chain!(
initial: factor ~
remainder: many0!(
alt!(
chain!(tag!("*") ~ mul: factor, || { (Oper::Mul, mul) }) |
chain!(tag!("/") ~ div: factor, || { (Oper::Div, div) })
)
),
|| fold_exprs(initial, remainder))
);
named!(expr< Expr >, chain!(
initial: term ~
remainder: many0!(
alt!(
chain!(tag!("+") ~ add: term, || { (Oper::Add, add) }) |
chain!(tag!("-") ~ sub: term, || { (Oper::Sub, sub) })
)
),
|| fold_exprs(initial, remainder))
);
#[test]
fn factor_test() {
assert_eq!(factor(&b" 3 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("3")));
}
#[test]
fn term_test() {
assert_eq!(term(&b" 3 * 5 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("(3 * 5)")));
}
#[test]
fn expr_test() {
assert_eq!(expr(&b" 1 + 2 * 3 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("(1 + (2 * 3))")));
assert_eq!(expr(&b" 1 + 2 * 3 / 4 - 5 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("((1 + ((2 * 3) / 4)) - 5)")));
assert_eq!(expr(&b" 72 / 2 / 3 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("((72 / 2) / 3)")));
}
#[test]
fn parens_test() {
assert_eq!(expr(&b" ( 1 + 2 ) * 3 "[..]).map(|x| format!("{:?}", x)),
IResult::Done(&b""[..], String::from("([(1 + 2)] * 3)")));
}<|fim▁end|>
|
}
pub enum Oper {
Add,
|
<|file_name|>validate-changelog.ts<|end_file_name|><|fim▁begin|>#!/usr/bin/env ts-node
import * as Path from 'path'
import * as Fs from 'fs'
import Ajv, { ErrorObject } from 'ajv'
function handleError(error: string) {
console.error(error)
process.exit(-1)
}
function formatErrors(errors: ErrorObject[]): string {
return errors
.map(error => {
const { dataPath, message } = error
const additionalProperties = error.params as any
const additionalProperty = additionalProperties.additionalProperty as string<|fim▁hole|> additionalPropertyText = `, found: '${additionalProperties.additionalProperty}'`
}
// dataPath starts with a leading "."," which is a bit confusing
const element = dataPath.substring(1)
return ` - ${element} - ${message}${additionalPropertyText}`
})
.join('\n')
}
const repositoryRoot = Path.dirname(__dirname)
const changelogPath = Path.join(repositoryRoot, 'changelog.json')
// eslint-disable-next-line no-sync
const changelog = Fs.readFileSync(changelogPath, 'utf8')
let changelogObj = null
try {
changelogObj = JSON.parse(changelog)
} catch {
handleError(
'Unable to parse the contents of changelog.json into a JSON object. Please review the file contents.'
)
}
const schema = {
$schema: 'http://json-schema.org/draft-07/schema#',
type: 'object',
properties: {
releases: {
type: 'object',
patternProperties: {
'^([0-9]+.[0-9]+.[0-9]+)(-beta[0-9]+|-test[0-9]+)?$': {
type: 'array',
items: {
type: 'string',
},
uniqueItems: true,
},
},
additionalProperties: false,
},
},
}
const ajv = new Ajv({ allErrors: true, uniqueItems: true })
const validate = ajv.compile(schema)
const valid = validate(changelogObj)
if (!valid && validate.errors != null) {
handleError(`Errors: \n${formatErrors(validate.errors)}`)
}
console.log('The changelog is totally fine')<|fim▁end|>
|
let additionalPropertyText = ''
if (additionalProperty != null) {
|
<|file_name|>_error.route.ts<|end_file_name|><|fim▁begin|><%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
import { Routes } from '@angular/router';
import { UserRouteAccessService } from '../../shared';
import { ErrorComponent } from './error.component';<|fim▁hole|>export const errorRoute: Routes = [
{
path: 'error',
component: ErrorComponent,
data: {
authorities: [],
pageTitle: 'error.title'
},
},
{
path: 'accessdenied',
component: ErrorComponent,
data: {
authorities: [],
pageTitle: 'error.title',
error403: true
},
}
];<|fim▁end|>
| |
<|file_name|>tree_test.go<|end_file_name|><|fim▁begin|>// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repofiles
import (
"testing"
"code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestGetTreeBySHA(t *testing.T) {
models.PrepareTestEnv(t)
ctx := test.MockContext(t, "user2/repo1")
test.LoadRepo(t, ctx, 1)
test.LoadRepoCommit(t, ctx)
test.LoadUser(t, ctx, 2)
test.LoadGitRepo(t, ctx)
sha := ctx.Repo.Repository.DefaultBranch
page := 1
perPage := 10
ctx.SetParams(":id", "1")
ctx.SetParams(":sha", sha)
tree, err := GetTreeBySHA(ctx.Repo.Repository, ctx.Params(":sha"), page, perPage, true)
assert.Nil(t, err)
expectedTree := &api.GitTreeResponse{
SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/trees/65f1bf27bc3bf70f64657658635e66094edbcb4d",
Entries: []api.GitEntry{
{
Path: "README.md",
Mode: "100644",
Type: "blob",
Size: 30,
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/4b4851ad51df6a7d9f25c979345979eaeb5b349f",<|fim▁hole|> TotalCount: 1,
}
assert.EqualValues(t, expectedTree, tree)
}<|fim▁end|>
|
},
},
Truncated: false,
Page: 1,
|
<|file_name|>Transactions.js<|end_file_name|><|fim▁begin|>/**
* SIP Transactions module.
*/
(function(JsSIP) {
var
C = {
// Transaction states
STATUS_TRYING: 1,
STATUS_PROCEEDING: 2,
STATUS_CALLING: 3,
STATUS_ACCEPTED: 4,
STATUS_COMPLETED: 5,
STATUS_TERMINATED: 6,
STATUS_CONFIRMED: 7,
// Transaction types
NON_INVITE_CLIENT: 'nict',
NON_INVITE_SERVER: 'nist',
INVITE_CLIENT: 'ict',
INVITE_SERVER: 'ist'
};
var NonInviteClientTransaction = function(request_sender, request, transport) {
var via,
via_transport,
events = ['stateChanged'];
this.type = C.NON_INVITE_CLIENT;
this.transport = transport;
this.id = 'z9hG4bK' + Math.floor(Math.random() * 10000000);
this.request_sender = request_sender;
this.request = request;
this.logger = request_sender.ua.getLogger('jssip.transaction.nict', this.id);
if (request_sender.ua.configuration.hack_via_tcp) {
via_transport = 'TCP';
}
else if (request_sender.ua.configuration.hack_via_ws) {
via_transport = 'WS';
}
else {
via_transport = transport.server.scheme;
}
via = 'SIP/2.0/' + via_transport;
via += ' ' + request_sender.ua.configuration.via_host + ';branch=' + this.id;
this.request.setHeader('via', via);
this.request_sender.ua.newTransaction(this);
this.initEvents(events);
};
NonInviteClientTransaction.prototype = new JsSIP.EventEmitter();
NonInviteClientTransaction.prototype.stateChanged = function(state) {
this.state = state;
this.emit('stateChanged', this);
};
NonInviteClientTransaction.prototype.send = function() {
var tr = this;
this.stateChanged(C.STATUS_TRYING);
this.F = window.setTimeout(function() {tr.timer_F();}, JsSIP.Timers.TIMER_F);
if(!this.transport.send(this.request)) {
this.onTransportError();
}
};
NonInviteClientTransaction.prototype.onTransportError = function() {
this.logger.debug('transport error occurred, deleting non-INVITE client transaction ' + this.id);
window.clearTimeout(this.F);
window.clearTimeout(this.K);
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
this.request_sender.onTransportError();
};
NonInviteClientTransaction.prototype.timer_F = function() {
this.logger.debug('Timer F expired for non-INVITE client transaction ' + this.id);
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
this.request_sender.onRequestTimeout();
};
NonInviteClientTransaction.prototype.timer_K = function() {
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
};
NonInviteClientTransaction.prototype.receiveResponse = function(response) {
var
tr = this,
status_code = response.status_code;
if(status_code < 200) {
switch(this.state) {
case C.STATUS_TRYING:
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_PROCEEDING);
this.request_sender.receiveResponse(response);
break;
}
} else {
switch(this.state) {
case C.STATUS_TRYING:
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_COMPLETED);
window.clearTimeout(this.F);
if(status_code === 408) {
this.request_sender.onRequestTimeout();
} else {
this.request_sender.receiveResponse(response);
}
this.K = window.setTimeout(function() {tr.timer_K();}, JsSIP.Timers.TIMER_K);
break;
case C.STATUS_COMPLETED:
break;
}
}
};
var InviteClientTransaction = function(request_sender, request, transport) {
var via,
tr = this,
via_transport,
events = ['stateChanged'];
this.type = C.INVITE_CLIENT;
this.transport = transport;
this.id = 'z9hG4bK' + Math.floor(Math.random() * 10000000);
this.request_sender = request_sender;
this.request = request;
this.logger = request_sender.ua.getLogger('jssip.transaction.ict', this.id);
if (request_sender.ua.configuration.hack_via_tcp) {
via_transport = 'TCP';
}
else if (request_sender.ua.configuration.hack_via_ws) {
via_transport = 'WS';
}
else {
via_transport = transport.server.scheme;
}
via = 'SIP/2.0/' + via_transport;
via += ' ' + request_sender.ua.configuration.via_host + ';branch=' + this.id;
this.request.setHeader('via', via);
this.request_sender.ua.newTransaction(this);
// TODO: Adding here the cancel() method is a hack that must be fixed.
// Add the cancel property to the request.
//Will be called from the request instance, not the transaction itself.
this.request.cancel = function(reason) {
tr.cancel_request(tr, reason);
};
this.initEvents(events);
};
InviteClientTransaction.prototype = new JsSIP.EventEmitter();
InviteClientTransaction.prototype.stateChanged = function(state) {
this.state = state;
this.emit('stateChanged', this);
};
InviteClientTransaction.prototype.send = function() {
var tr = this;
this.stateChanged(C.STATUS_CALLING);
this.B = window.setTimeout(function() {
tr.timer_B();
}, JsSIP.Timers.TIMER_B);
if(!this.transport.send(this.request)) {
this.onTransportError();
}
};
InviteClientTransaction.prototype.onTransportError = function() {
this.logger.debug('transport error occurred, deleting INVITE client transaction ' + this.id);
window.clearTimeout(this.B);
window.clearTimeout(this.D);
window.clearTimeout(this.M);
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
if (this.state !== C.STATUS_ACCEPTED) {
this.request_sender.onTransportError();
}
};
// RFC 6026 7.2
InviteClientTransaction.prototype.timer_M = function() {
this.logger.debug('Timer M expired for INVITE client transaction ' + this.id);
if(this.state === C.STATUS_ACCEPTED) {
window.clearTimeout(this.B);
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
}
};
// RFC 3261 17.1.1
InviteClientTransaction.prototype.timer_B = function() {
this.logger.debug('Timer B expired for INVITE client transaction ' + this.id);
if(this.state === C.STATUS_CALLING) {
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
this.request_sender.onRequestTimeout();
}
};
InviteClientTransaction.prototype.timer_D = function() {
this.logger.debug('Timer D expired for INVITE client transaction ' + this.id);
window.clearTimeout(this.B);
this.stateChanged(C.STATUS_TERMINATED);
this.request_sender.ua.destroyTransaction(this);
};
InviteClientTransaction.prototype.sendACK = function(response) {
var tr = this;
this.ack = 'ACK ' + this.request.ruri + ' SIP/2.0\r\n';
this.ack += 'Via: ' + this.request.headers.Via.toString() + '\r\n';
if(this.request.headers.Route) {
this.ack += 'Route: ' + this.request.headers.Route.toString() + '\r\n';
}
this.ack += 'To: ' + response.getHeader('to') + '\r\n';
this.ack += 'From: ' + this.request.headers.From.toString() + '\r\n';
this.ack += 'Call-ID: ' + this.request.headers['Call-ID'].toString() + '\r\n';
this.ack += 'CSeq: ' + this.request.headers.CSeq.toString().split(' ')[0];
this.ack += ' ACK\r\n';
this.ack += 'Content-Length: 0\r\n\r\n';
this.D = window.setTimeout(function() {tr.timer_D();}, JsSIP.Timers.TIMER_D);
this.transport.send(this.ack);
};
InviteClientTransaction.prototype.cancel_request = function(tr, reason) {
var request = tr.request;
this.cancel = JsSIP.C.CANCEL + ' ' + request.ruri + ' SIP/2.0\r\n';
this.cancel += 'Via: ' + request.headers.Via.toString() + '\r\n';
if(this.request.headers.Route) {
this.cancel += 'Route: ' + request.headers.Route.toString() + '\r\n';
}
this.cancel += 'To: ' + request.headers.To.toString() + '\r\n';
this.cancel += 'From: ' + request.headers.From.toString() + '\r\n';
this.cancel += 'Call-ID: ' + request.headers['Call-ID'].toString() + '\r\n';
this.cancel += 'CSeq: ' + request.headers.CSeq.toString().split(' ')[0] +
' CANCEL\r\n';
if(reason) {
this.cancel += 'Reason: ' + reason + '\r\n';
}
this.cancel += 'Content-Length: 0\r\n\r\n';
// Send only if a provisional response (>100) has been received.
if(this.state === C.STATUS_PROCEEDING) {
this.transport.send(this.cancel);
}
};
InviteClientTransaction.prototype.receiveResponse = function(response) {
var
tr = this,
status_code = response.status_code;
if(status_code >= 100 && status_code <= 199) {
switch(this.state) {
case C.STATUS_CALLING:
this.stateChanged(C.STATUS_PROCEEDING);
this.request_sender.receiveResponse(response);
break;
case C.STATUS_PROCEEDING:
this.request_sender.receiveResponse(response);
break;
}
} else if(status_code >= 200 && status_code <= 299) {
switch(this.state) {
case C.STATUS_CALLING:
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_ACCEPTED);
this.M = window.setTimeout(function() {
tr.timer_M();
}, JsSIP.Timers.TIMER_M);
this.request_sender.receiveResponse(response);
break;
case C.STATUS_ACCEPTED:
this.request_sender.receiveResponse(response);
break;
}
} else if(status_code >= 300 && status_code <= 699) {
switch(this.state) {
case C.STATUS_CALLING:
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_COMPLETED);
this.sendACK(response);
this.request_sender.receiveResponse(response);
break;
case C.STATUS_COMPLETED:
this.sendACK(response);
break;
}
}
};
var AckClientTransaction = function(request_sender, request, transport) {
var via,
via_transport;
this.transport = transport;
this.id = 'z9hG4bK' + Math.floor(Math.random() * 10000000);
this.request_sender = request_sender;
this.request = request;
this.logger = request_sender.ua.getLogger('jssip.transaction.nict', this.id);
if (request_sender.ua.configuration.hack_via_tcp) {
via_transport = 'TCP';
}
else if (request_sender.ua.configuration.hack_via_ws) {
via_transport = 'WS';
}
else {
via_transport = transport.server.scheme;
}
via = 'SIP/2.0/' + via_transport;<|fim▁hole|>};
AckClientTransaction.prototype = new JsSIP.EventEmitter();
AckClientTransaction.prototype.send = function() {
if(!this.transport.send(this.request)) {
this.onTransportError();
}
};
AckClientTransaction.prototype.onTransportError = function() {
this.logger.debug('transport error occurred, for an ACK client transaction ' + this.id);
this.request_sender.onTransportError();
};
var NonInviteServerTransaction = function(request, ua) {
var events = ['stateChanged'];
this.type = C.NON_INVITE_SERVER;
this.id = request.via_branch;
this.request = request;
this.transport = request.transport;
this.ua = ua;
this.last_response = '';
request.server_transaction = this;
this.logger = ua.getLogger('jssip.transaction.nist', this.id);
this.state = C.STATUS_TRYING;
ua.newTransaction(this);
this.initEvents(events);
};
NonInviteServerTransaction.prototype = new JsSIP.EventEmitter();
NonInviteServerTransaction.prototype.stateChanged = function(state) {
this.state = state;
this.emit('stateChanged', this);
};
NonInviteServerTransaction.prototype.timer_J = function() {
this.logger.debug('Timer J expired for non-INVITE server transaction ' + this.id);
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
};
NonInviteServerTransaction.prototype.onTransportError = function() {
if (!this.transportError) {
this.transportError = true;
this.logger.debug('transport error occurred, deleting non-INVITE server transaction ' + this.id);
window.clearTimeout(this.J);
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
}
};
NonInviteServerTransaction.prototype.receiveResponse = function(status_code, response, onSuccess, onFailure) {
var tr = this;
if(status_code === 100) {
/* RFC 4320 4.1
* 'A SIP element MUST NOT
* send any provisional response with a
* Status-Code other than 100 to a non-INVITE request.'
*/
switch(this.state) {
case C.STATUS_TRYING:
this.stateChanged(C.STATUS_PROCEEDING);
if(!this.transport.send(response)) {
this.onTransportError();
}
break;
case C.STATUS_PROCEEDING:
this.last_response = response;
if(!this.transport.send(response)) {
this.onTransportError();
if (onFailure) {
onFailure();
}
} else if (onSuccess) {
onSuccess();
}
break;
}
} else if(status_code >= 200 && status_code <= 699) {
switch(this.state) {
case C.STATUS_TRYING:
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_COMPLETED);
this.last_response = response;
this.J = window.setTimeout(function() {
tr.timer_J();
}, JsSIP.Timers.TIMER_J);
if(!this.transport.send(response)) {
this.onTransportError();
if (onFailure) {
onFailure();
}
} else if (onSuccess) {
onSuccess();
}
break;
case C.STATUS_COMPLETED:
break;
}
}
};
var InviteServerTransaction = function(request, ua) {
var events = ['stateChanged'];
this.type = C.INVITE_SERVER;
this.id = request.via_branch;
this.request = request;
this.transport = request.transport;
this.ua = ua;
this.last_response = '';
request.server_transaction = this;
this.logger = ua.getLogger('jssip.transaction.ist', this.id);
this.state = C.STATUS_PROCEEDING;
ua.newTransaction(this);
this.resendProvisionalTimer = null;
request.reply(100);
this.initEvents(events);
};
InviteServerTransaction.prototype = new JsSIP.EventEmitter();
InviteServerTransaction.prototype.stateChanged = function(state) {
this.state = state;
this.emit('stateChanged', this);
};
InviteServerTransaction.prototype.timer_H = function() {
this.logger.debug('Timer H expired for INVITE server transaction ' + this.id);
if(this.state === C.STATUS_COMPLETED) {
this.logger.log('transactions', 'ACK for INVITE server transaction was never received, call will be terminated');
}
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
};
InviteServerTransaction.prototype.timer_I = function() {
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
};
// RFC 6026 7.1
InviteServerTransaction.prototype.timer_L = function() {
this.logger.debug('Timer L expired for INVITE server transaction ' + this.id);
if(this.state === C.STATUS_ACCEPTED) {
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
}
};
InviteServerTransaction.prototype.onTransportError = function() {
if (!this.transportError) {
this.transportError = true;
this.logger.debug('transport error occurred, deleting INVITE server transaction ' + this.id);
if (this.resendProvisionalTimer !== null) {
window.clearInterval(this.resendProvisionalTimer);
this.resendProvisionalTimer = null;
}
window.clearTimeout(this.L);
window.clearTimeout(this.H);
window.clearTimeout(this.I);
this.stateChanged(C.STATUS_TERMINATED);
this.ua.destroyTransaction(this);
}
};
InviteServerTransaction.prototype.resend_provisional = function() {
if(!this.transport.send(this.last_response)) {
this.onTransportError();
}
};
// INVITE Server Transaction RFC 3261 17.2.1
InviteServerTransaction.prototype.receiveResponse = function(status_code, response, onSuccess, onFailure) {
var tr = this;
if(status_code >= 100 && status_code <= 199) {
switch(this.state) {
case C.STATUS_PROCEEDING:
if(!this.transport.send(response)) {
this.onTransportError();
}
this.last_response = response;
break;
}
}
if(status_code > 100 && status_code <= 199 && this.state === C.STATUS_PROCEEDING) {
// Trigger the resendProvisionalTimer only for the first non 100 provisional response.
if(this.resendProvisionalTimer === null) {
this.resendProvisionalTimer = window.setInterval(function() {
tr.resend_provisional();}, JsSIP.Timers.PROVISIONAL_RESPONSE_INTERVAL);
}
} else if(status_code >= 200 && status_code <= 299) {
switch(this.state) {
case C.STATUS_PROCEEDING:
this.stateChanged(C.STATUS_ACCEPTED);
this.last_response = response;
this.L = window.setTimeout(function() {
tr.timer_L();
}, JsSIP.Timers.TIMER_L);
if (this.resendProvisionalTimer !== null) {
window.clearInterval(this.resendProvisionalTimer);
this.resendProvisionalTimer = null;
}
/* falls through */
case C.STATUS_ACCEPTED:
// Note that this point will be reached for proceeding tr.state also.
if(!this.transport.send(response)) {
this.onTransportError();
if (onFailure) {
onFailure();
}
} else if (onSuccess) {
onSuccess();
}
break;
}
} else if(status_code >= 300 && status_code <= 699) {
switch(this.state) {
case C.STATUS_PROCEEDING:
if (this.resendProvisionalTimer !== null) {
window.clearInterval(this.resendProvisionalTimer);
this.resendProvisionalTimer = null;
}
if(!this.transport.send(response)) {
this.onTransportError();
if (onFailure) {
onFailure();
}
} else {
this.stateChanged(C.STATUS_COMPLETED);
this.H = window.setTimeout(function() {
tr.timer_H();
}, JsSIP.Timers.TIMER_H);
if (onSuccess) {
onSuccess();
}
}
break;
}
}
};
/**
* INVITE:
* _true_ if retransmission
* _false_ new request
*
* ACK:
* _true_ ACK to non2xx response
* _false_ ACK must be passed to TU (accepted state)
* ACK to 2xx response
*
* CANCEL:
* _true_ no matching invite transaction
* _false_ matching invite transaction and no final response sent
*
* OTHER:
* _true_ retransmission
* _false_ new request
*/
var checkTransaction = function(ua, request) {
var tr;
switch(request.method) {
case JsSIP.C.INVITE:
tr = ua.transactions.ist[request.via_branch];
if(tr) {
switch(tr.state) {
case C.STATUS_PROCEEDING:
tr.transport.send(tr.last_response);
break;
// RFC 6026 7.1 Invite retransmission
//received while in C.STATUS_ACCEPTED state. Absorb it.
case C.STATUS_ACCEPTED:
break;
}
return true;
}
break;
case JsSIP.C.ACK:
tr = ua.transactions.ist[request.via_branch];
// RFC 6026 7.1
if(tr) {
if(tr.state === C.STATUS_ACCEPTED) {
return false;
} else if(tr.state === C.STATUS_COMPLETED) {
tr.state = C.STATUS_CONFIRMED;
tr.I = window.setTimeout(function() {tr.timer_I();}, JsSIP.Timers.TIMER_I);
return true;
}
}
// ACK to 2XX Response.
else {
return false;
}
break;
case JsSIP.C.CANCEL:
tr = ua.transactions.ist[request.via_branch];
if(tr) {
request.reply_sl(200);
if(tr.state === C.STATUS_PROCEEDING) {
return false;
} else {
return true;
}
} else {
request.reply_sl(481);
return true;
}
break;
default:
// Non-INVITE Server Transaction RFC 3261 17.2.2
tr = ua.transactions.nist[request.via_branch];
if(tr) {
switch(tr.state) {
case C.STATUS_TRYING:
break;
case C.STATUS_PROCEEDING:
case C.STATUS_COMPLETED:
tr.transport.send(tr.last_response);
break;
}
return true;
}
break;
}
};
JsSIP.Transactions = {
C: C,
checkTransaction: checkTransaction,
NonInviteClientTransaction: NonInviteClientTransaction,
InviteClientTransaction: InviteClientTransaction,
AckClientTransaction: AckClientTransaction,
NonInviteServerTransaction: NonInviteServerTransaction,
InviteServerTransaction: InviteServerTransaction
};
}(JsSIP));<|fim▁end|>
|
via += ' ' + request_sender.ua.configuration.via_host + ';branch=' + this.id;
this.request.setHeader('via', via);
|
<|file_name|>p3.rs<|end_file_name|><|fim▁begin|>//! Find the k'th element of a list. (easy)
///# example
///```
///use p99::p3::at;
///assert_eq!(at(3, &['a','b','c','d']), Some('c'));
///assert_eq!(at(3, &['a','b',]), None)
///```
pub fn at<T: Copy>(k: usize, list: &[T]) -> Option<T> {
if list.len() >= k {<|fim▁hole|> None
}
}<|fim▁end|>
|
Some(list[k - 1])
} else {
|
<|file_name|>add_poem.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Copyright 2015 Stefano Benvenuti <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
<|fim▁hole|>distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import json
import shutil
# helper function for reading a file content
def read_file(filename):
f = None
try:
f = open(filename)
content = json.load(f)
except Exception as e:
print("File \"%s\" cannot be opened or read: %s", filename, e)
sys.exit(1)
finally:
if f is not None:
f.close()
return content
if len(sys.argv) is not 2:
print("""
USAGE: ./add_poem.py JSON_DELTA_FILE_PATH
""")
sys.exit(1)
conf_file = os.path.join("..","poems","poems.json")
# reads old configuration file and new content
content = read_file(conf_file)
new_content = read_file(sys.argv[1])
# merge the values
content.update(new_content)
# write file
shutil.copyfile(conf_file, conf_file + ".bak")
f = None
try:
f = open(conf_file,'w')
json.dump(content, f)
except Exception as e:
print("File \"%s\" cannot be opened or written: %s", filename, e)
sys.exit(1)
finally:
if f is not None:
f.close()<|fim▁end|>
|
Unless required by applicable law or agreed to in writing, software
|
<|file_name|>wall.js<|end_file_name|><|fim▁begin|>var icms = icms || {};
icms.wall = (function ($) {
var self = this;
this.add = function (parent_id) {
var form = $('#wall_add_form');
if (typeof (parent_id) === 'undefined') {
parent_id = 0;
}
$('#wall_widget #wall_add_link').show();
$('#wall_widget #entries_list .links *').removeClass('disabled');
if (parent_id == 0){
$('#wall_widget #wall_add_link').hide();
form.detach().prependTo('#wall_widget #entries_list');
} else {
$('#wall_widget #entries_list #entry_'+parent_id+' > .media-body > .links .reply').addClass('disabled');
form.detach().appendTo('#wall_widget #entries_list #entry_'+parent_id+' > .media-body');
}
form.show();
$('input[name=parent_id]', form).val(parent_id);
$('input[name=id]', form).val('');
$('input[name=action]', form).val('add');
$('input[name=submit]', form).val( LANG_SEND );
icms.forms.wysiwygInit('content').wysiwygInsertText('content', '');
return false;
};
this.submit = function (action) {
var form = $('#wall_add_form form');
var form_data = icms.forms.toJSON( form );
var url = form.attr('action');
if (action) {form_data.action = action;}
$('.buttons > *', form).addClass('disabled');
$('.button-'+form_data.action, form).addClass('is-busy');
$('textarea', form).prop('disabled', true);
$.post(url, form_data, function(result){
if (form_data.action === 'add') { self.result(result);}
if (form_data.action === 'preview') { self.previewResult(result);}
if (form_data.action === 'update') { self.updateResult(result);}
}, "json");
};
this.preview = function () {
this.submit('preview');
};
this.previewResult = function (result) {
if (result.error){
this.error(result.message);
return;
}
var form = $('#wall_add_form');
var preview_box = $('.preview_box', form).html(result.html);
$(preview_box).addClass('shadow').removeClass('d-none');
setTimeout(function (){ $(preview_box).removeClass('shadow'); }, 1000);
this.restoreForm(false);
};
this.more = function(){
var widget = $('#wall_widget');
$('.show_more', widget).hide();
$('.entry', widget).show();
$('.wall_pages', widget).show();
return false;
};
this.replies = function(id, callback){
var e = $('#wall_widget #entry_'+id);
if (!e.data('replies')) { return false; }
var url = $('#wall_urls').data('replies-url');
$('.icms-wall-item__btn_replies', e).addClass('is-busy');
$.post(url, {id: id}, function(result){
$('.icms-wall-item__btn_replies', e).removeClass('is-busy').hide();
if (result.error){
self.error(result.message);
return false;
}
$('.replies', e).html( result.html );
if (typeof(callback)=='function'){
callback();
}
}, "json");
return false;
};
this.append = function(entry){
$('#wall_widget #entries_list .no_entries').remove();
if (entry.parent_id == 0){
$('#wall_widget #entries_list').prepend( entry.html );
return;
}
if (entry.parent_id > 0){
$('#wall_widget #entry_'+entry.parent_id+' .replies').append( entry.html );
return;
}
};
this.result = function(result){
if (result.error){
this.error(result.message);
return;
}
this.append(result);
this.restoreForm();
};
this.updateResult = function(result){
if (result.error){
this.error(result.message);
return;
}
$('#entries_list #entry_'+result.id+'> .media-body > .icms-wall-html').html(result.html);
this.restoreForm();
};
this.edit = function (id){
var form = $('#wall_add_form');
$('#wall_widget #wall_add_link').show();
$('#wall_widget #entries_list .links *').removeClass('disabled');
$('#wall_widget #entries_list #entry_'+id+' > .media-body > .links .edit').addClass('is-busy disabled');
form.detach().insertAfter('#wall_widget #entries_list #entry_'+id+' > .media-body > .links').show();
$('input[name=id]', form).val(id);
$('input[name=action]', form).val('update');
$('input[name=submit]', form).val( LANG_SAVE );
$('textarea', form).prop('disabled', true);
icms.forms.wysiwygInit('content');
var url = $('#wall_urls').data('get-url');
$.post(url, {id: id}, function(result){
$('#wall_widget #entries_list #entry_'+id+' > .media-body > .links .edit').removeClass('is-busy');
if (result.error){
self.error(result.message);
return;
}
self.restoreForm(false);
icms.forms.wysiwygInsertText('content', result.html);
}, 'json');
return false;
};
this.remove = function (id){
var c = $('#entries_list #entry_'+id);
var username = $('> .media-body > h6 .user', c).text();
if (!confirm(LANG_WALL_ENTRY_DELETE_CONFIRM.replace('%s', username))) {
return false;
}
var url = $('#wall_urls').data('delete-url');
$.post(url, {id: id}, function(result){
if (result.error){
self.error(result.message);
return;
}
c.remove();
self.restoreForm();
}, "json");
return false;
};
this.show = function(id, reply_id, go_reply){
var e = $('#entry_'+id);
if (e.length){
$.scrollTo( e, 500, {
offset: {
left:0,
top:-10
},
onAfter: function(){
self.replies(id, function(){
if (reply_id>0){
self.show(reply_id);
}
});
if (go_reply){
self.add(id);
}
}
});
} else {
if (go_reply){
$.scrollTo( $('#wall_widget'), 500, {
offset: {
left:0,
top:-10
},
onAfter: function(){
self.add();
}
});
}
}
return false;
};
<|fim▁hole|> icms.modal.alert(message);
this.restoreForm(false);
};
this.restoreForm = function(clear_text){
if (typeof (clear_text) === 'undefined') {
clear_text = true;
}
var form = $('#wall_add_form');
$('.buttons *', form).removeClass('disabled is-busy');
$('textarea', form).prop('disabled', false);
if (clear_text) {
form.hide();
icms.forms.wysiwygInsertText('content', '');
$('#wall_widget #wall_add_link').show();
$('#wall_widget #entries_list .links *').removeClass('disabled');
$('.preview_box', form).html('').hide();
}
};
return this;
}).call(icms.wall || {},jQuery);<|fim▁end|>
|
this.error = function(message){
|
<|file_name|>GetEmailStage.java<|end_file_name|><|fim▁begin|>package fi.helsinki.cs.okkopa.main.stage;
import fi.helsinki.cs.okkopa.mail.read.EmailRead;
import fi.helsinki.cs.okkopa.main.ExceptionLogger;
import java.io.IOException;
import java.io.InputStream;<|fim▁hole|>import javax.mail.MessagingException;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Retrieves new emails and processes the attachments one by one by calling the next stage repeatedly.
*/
@Component
public class GetEmailStage extends Stage<Object, InputStream> {
private static final Logger LOGGER = Logger.getLogger(GetEmailStage.class.getName());
private EmailRead emailReader;
private ExceptionLogger exceptionLogger;
/**
*
* @param server
* @param exceptionLogger
*/
@Autowired
public GetEmailStage(EmailRead server, ExceptionLogger exceptionLogger) {
this.exceptionLogger = exceptionLogger;
this.emailReader = server;
}
@Override
public void process(Object in) {
try {
emailReader.connect();
LOGGER.debug("Kirjauduttu sisään.");
loopMessagesAsLongAsThereAreNew();
LOGGER.debug("Ei lisää viestejä.");
} catch (MessagingException | IOException ex) {
exceptionLogger.logException(ex);
} finally {
emailReader.closeQuietly();
}
}
private void processAttachments(Message nextMessage) throws MessagingException, IOException {
List<InputStream> messagesAttachments = emailReader.getMessagesAttachments(nextMessage);
for (InputStream attachment : messagesAttachments) {
LOGGER.debug("Käsitellään liitettä.");
processNextStages(attachment);
IOUtils.closeQuietly(attachment);
}
}
private void loopMessagesAsLongAsThereAreNew() throws MessagingException, IOException {
Message nextMessage = emailReader.getNextMessage();
while (nextMessage != null) {
LOGGER.debug("Sähköposti haettu.");
processAttachments(nextMessage);
emailReader.cleanUpMessage(nextMessage);
nextMessage = emailReader.getNextMessage();
}
}
}<|fim▁end|>
|
import java.util.List;
import javax.mail.Message;
|
<|file_name|>Calc_Angle.py<|end_file_name|><|fim▁begin|>"""
MUSE -- A Multi-algorithm-collaborative Universal Structure-prediction Environment
Copyright (C) 2010-2017 by Zhong-Li Liu
This program is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software Foundation
version 2 of the License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
E-mail: [email protected]
"""
import numpy as np
import math
def calc_angle(l1, l2):
v10 = l1
v12 = l2
v10 /= np.linalg.norm(v10)
v12 /= np.linalg.norm(v12)
angle = np.vdot(v10, v12)<|fim▁hole|> return angle
def calc_3atoms_angle(pos1, pos2, pos3):
v10 = pos2 - pos1
v12 = pos3 - pos1
v10 /= np.linalg.norm(v10)
v12 /= np.linalg.norm(v12)
angle = np.vdot(v10, v12)
angle = np.arccos(angle)*180/math.pi
return angle
def dihedral_angle(cell_a, cell_b, cell_c):
a = cell_a
b = cell_b
c = cell_c
axb = np.cross(a, b)
axb /= np.linalg.norm(axb)
bxc = np.cross(b, c)
bxc /= np.linalg.norm(bxc)
angle = np.vdot(axb, bxc)
dangle = 180-np.arccos(angle)*180/math.pi
# print dangle
return dangle
if __name__ == '__main__':
cell = np.array([[2.7085009435849550, -2.7085009435849550, -0.0000000000000000],
[-2.7085009435849550, 0.0000000000000000, -2.7085009435849550],
[2.7085009435849550, 2.7085009435849550, -0.0000000000000000]])
cell_a = cell[0]
cell_b = cell[1]
cell_c = cell[2]
dihedral_angle(cell_a, cell_b, cell_c)
dihedral_angle(cell_b, cell_c, cell_a)
dihedral_angle(cell_c, cell_a, cell_b)<|fim▁end|>
|
angle = np.arccos(angle)*180/math.pi
|
<|file_name|>match-implicit-copy-unique.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>#![allow(unknown_features)]
#![feature(box_syntax)]
struct Pair { a: Box<isize>, b: Box<isize> }
pub fn main() {
let mut x: Box<_> = box Pair {a: box 10, b: box 20};
let x_internal = &mut *x;
match *x_internal {
Pair {a: ref mut a, b: ref mut _b} => {
assert!(**a == 10); *a = box 30; assert!(**a == 30);
}
}
}<|fim▁end|>
|
// pretty-expanded FIXME #23616
|
<|file_name|>sysctl.py<|end_file_name|><|fim▁begin|>"""
Linux kernel system control from Python.<|fim▁hole|><|fim▁end|>
|
"""
|
<|file_name|>network-task.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Expose 'NetworkTask'
*/
module.exports = NetworkTask;
/**
* Module dependencies
*/
var networkObject = require('./network-object');
var readLine = require('readline');
var childProcess = require('child_process');
/**
* Constants
*/
var NETWORK_TOPIC = 'monitor/network';
/**
* Constructor
* Initialize a new NetworkTask
*/
function NetworkTask(info){
this.noInstance = null;
this.generalInfo = info;
}
/**
* Class Methods
*/
NetworkTask.prototype.runAndParse = function(callback){
if(this.generalInfo){
//run the command, parse the command, return a result
console.log('running network command');
//make sure this is a new instance everytime
this.noInstance = new networkObject(this.generalInfo.thingId);
//lets run ifconfig to get mac address and ip
if(this.generalInfo.os === 'darwin'){
var res = getNetworkInterfacesMac();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else if(this.generalInfo.os === 'linux'){
var res = getNetworkInterfacesLinux();
if(res){
this.noInstance.nInterface=res.iname;
this.noInstance.ipAddress=res.ip;
this.noInstance.macAddress=res.mac;
}
}
else{
console.log('not implemented');
}
//create the child process to execute $ iftop -t -s 2 -P -N
//must run as ROOT on ubuntu side
//add the interface - from the active network
var commandLine = childProcess.spawn('iftop', ['-t','-s','3','-P','-N','-b','-B','-i',this.noInstance.nInterface]);
var noPass = this.noInstance;
var lineReader = readLine.createInterface(commandLine.stdout, commandLine.stdin);
lineReader.on('line', function(line){
noPass.read(line);
});
commandLine.on('close', function(code, signal){
//console.log('read ' + noPass.counter + ' lines');
callback(NETWORK_TOPIC, noPass);
});
}
else{
//skipping execution
console.log('skipping network task due to missing general information');
}
}
/**
* Helper Methods
*/
// get all available network interfaces for mac
// return an object with {iname, ip, mac, status}
function getNetworkInterfacesMac(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('flags=')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split(':');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0];
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet ') >=0){
var ipSplit = temp.split(' ');
if(ipSplit.length >= 4){
currInterface.ip=ipSplit[1].trim();
}
}
if(temp.indexOf('ether')>=0){
var macSplit = temp.split(' ');
if(macSplit.length >= 2){
currInterface.mac=macSplit[1].trim();
}
}
//we'll use a different algo on mac osx since
//it actually returns the current
if(temp.indexOf('status')>=0){
var statusSplit = temp.split(':');
if(statusSplit.length >= 2){
currInterface.status=statusSplit[1].trim();
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];
if(tRes){
//we still have a possibility of seeing 2 interfaces available
if(tRes.status==='active' && tRes.ip && tRes.mac){
result=tRes;
return result;
}
}
}
}
return result;
}
function getNetworkInterfacesLinux(){
var result={};
var availableInterfaces=[];
var returnObject = childProcess.spawnSync('ifconfig', ['-a']);
if(returnObject.stdout){
var displayStr = returnObject.stdout.toString().trim().toLowerCase();
if(displayStr){
var ifSplit = displayStr.split('\n');
if(ifSplit){
//declare a point array
var currInterface={};
for(var i=0; i<ifSplit.length; i++){
var temp = ifSplit[i].reduceWhiteSpace().trim();
//search for the first line of each
if(temp.indexOf('link encap')>=0){
if(currInterface.iname){
//lets save this interface
availableInterfaces.push(currInterface);
}
//this is the first line
var interfaceSplit = temp.split('link encap:');
if(interfaceSplit.length == 2){
//lets get the name
var iName = interfaceSplit[0].trim();
var macAddr='';
//lets get the macaddr
var macSplit = interfaceSplit[1].trim().split(' ');
if(macSplit.length==3){
macAddr = macSplit[2];
}
//create a new interface and point current to this one
var tempInterface = {};
tempInterface.iname=iName;
if(macAddr){
tempInterface.mac=macAddr;
}
currInterface = tempInterface;
}
}
else{
//this is a regular line
//search for ether - which contains the mac address
//search for inet which should contain the ip address
//search for status, which indicates status
//space is important here to diffrentiate between inet6
if(temp.indexOf('inet addr:') >=0){
var ipBlockSplit = temp.split(' ');
if(ipBlockSplit.length >= 2){
//take the second entry
var ipSplit=ipBlockSplit[1].split(':');
if(ipSplit.length >= 2){
currInterface.ip=ipSplit[1].trim();
//if both ip and mac exist
if(currInterface.mac){
currInterface.status='active';
}
}
}
}
}
}
//lets save the last interface
if(currInterface.iname){
availableInterfaces.push(currInterface);
}
}
}
}
//currently only returns the first active link - if there are multiple
//interfaces active, we will probably need to handle multiple
if(availableInterfaces.length > 0){
for(var j=0; j<availableInterfaces.length; j++){
var tRes = availableInterfaces[j];<|fim▁hole|> if(tRes.status==='active'){
result=tRes;
}
}
}
}
return result;
}<|fim▁end|>
|
if(tRes){
|
<|file_name|>KernelProxy.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.management.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Hashtable;
import java.util.List;
import java.util.NoSuchElementException;
import javax.management.MBeanServerConnection;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import org.neo4j.jmx.ManagementInterface;
/**
* Does not have any public methods - since the public interface of
* {@link org.neo4j.management.Neo4jManager} should be defined completely in
* that class.
*
* Does not have any (direct or transitive) dependencies on any part of the jmx
* component - since this class is used in
* {@link org.neo4j.management.impl.jconsole.Neo4jPlugin the JConsole plugin},
* and the jmx component is not on the class path in JConsole.
*
* @author Tobias Ivarsson <[email protected]>
*/
public abstract class KernelProxy
{
static final String KERNEL_BEAN_TYPE = "org.neo4j.jmx.Kernel";
protected static final String KERNEL_BEAN_NAME = "Kernel";
static final String MBEAN_QUERY = "MBeanQuery";
protected final MBeanServerConnection server;
protected final ObjectName kernel;
protected KernelProxy( MBeanServerConnection server, ObjectName kernel )
{
String className = null;
try
{
className = server.getMBeanInfo( kernel ).getClassName();
}
catch ( Exception e )
{
// fall through
}
if ( !KERNEL_BEAN_TYPE.equals( className ) )
{
throw new IllegalArgumentException(
"The specified ObjectName does not represent a Neo4j Kernel bean in the specified MBean server." );
}
this.server = server;
this.kernel = kernel;
}
protected List<Object> allBeans()
{
List<Object> beans = new ArrayList<Object>();
Iterable<ObjectInstance> mbeans;
try
{
mbeans = server.queryMBeans( mbeanQuery(), null );
}
catch ( IOException handled )
{
return beans;
}
for ( ObjectInstance instance : mbeans )
{
String className = instance.getClassName();
Class<?> beanType = null;
try
{
if ( className != null ) beanType = Class.forName( className );
}
catch ( Exception ignored )
{
// fall through
}
catch ( LinkageError ignored )
{
// fall through
}
if ( beanType != null )
{
try
{
beans.add( BeanProxy.load( server, beanType, instance.getObjectName() ) );
}
catch ( Exception ignored )
{
// fall through
}
}
}
return beans;
}
private ObjectName assertExists( ObjectName name )
{
try
{
if ( !server.queryNames( name, null ).isEmpty() )
{
return name;
}
}
catch ( IOException handled )
{
// fall through
}
throw new NoSuchElementException( "No MBeans matching " + name );
}
protected <T> T getBean( Class<T> beanInterface )
{
return BeanProxy.load( server, beanInterface, createObjectName( beanInterface ) );
}
protected <T> Collection<T> getBeans( Class<T> beanInterface )
{
return BeanProxy.loadAll( server, beanInterface, createObjectNameQuery( beanInterface ) );
}
private ObjectName createObjectNameQuery( Class<?> beanInterface )
{
return createObjectNameQuery( mbeanQuery(), beanInterface );
}
private ObjectName createObjectName( Class<?> beanInterface )
{
return assertExists( createObjectName( mbeanQuery(), beanInterface ) );
}
protected ObjectName createObjectName( String beanName )
{
return assertExists( createObjectName( mbeanQuery(), beanName, false ) );
}
protected ObjectName mbeanQuery()
{
try
{
return (ObjectName) server.getAttribute( kernel, MBEAN_QUERY );
}
catch ( Exception cause )
{
throw new IllegalStateException( "Could not get MBean query.", cause );
}
}
protected static ObjectName createObjectName( String kernelIdentifier, Class<?> beanInterface )
{
return createObjectName( kernelIdentifier, beanName( beanInterface ) );
}
protected static ObjectName createObjectName( String kernelIdentifier, String beanName, String... extraNaming )
{
Hashtable<String, String> properties = new Hashtable<String, String>();
properties.put( "instance", "kernel#" + kernelIdentifier );
return createObjectName( "org.neo4j", properties, beanName, false, extraNaming );
}
static ObjectName createObjectNameQuery( String kernelIdentifier, String beanName, String... extraNaming )
{
Hashtable<String, String> properties = new Hashtable<String, String>();
properties.put( "instance", "kernel#" + kernelIdentifier );
return createObjectName( "org.neo4j", properties, beanName, true, extraNaming );
}
static ObjectName createObjectName( ObjectName query, Class<?> beanInterface )
{
return createObjectName( query, beanName( beanInterface ), false );
}
static ObjectName createObjectNameQuery( ObjectName query, Class<?> beanInterface )
{
return createObjectName( query, beanName( beanInterface ), true );
}
private static ObjectName createObjectName( ObjectName query, String beanName, boolean isQuery )
{
Hashtable<String, String> properties = new Hashtable<String, String>(query.getKeyPropertyList());
return createObjectName( query.getDomain(), properties, beanName, isQuery );
}
static String beanName( Class<?> beanInterface )
{
if ( beanInterface.isInterface() )
{
ManagementInterface management = beanInterface.getAnnotation( ManagementInterface.class );
if ( management != null )
{
return management.name();
}
}
throw new IllegalArgumentException( beanInterface + " is not a Neo4j Management Been interface" );
}
<|fim▁hole|> boolean query, String... extraNaming )
{
properties.put( "name", beanName );
for ( int i = 0; i < extraNaming.length; i++ )
{
properties.put( "name" + i, extraNaming[i] );
}
ObjectName result;
try
{
result = new ObjectName( domain, properties );
if ( query ) result = ObjectName.getInstance( result.toString() + ",*" );
}
catch ( MalformedObjectNameException e )
{
return null;
}
return result;
}
}<|fim▁end|>
|
private static ObjectName createObjectName( String domain, Hashtable<String, String> properties, String beanName,
|
<|file_name|>regexs.rs<|end_file_name|><|fim▁begin|>extern crate regex;
use self::regex::Regex;
pub fn demo_regex() {<|fim▁hole|> let haystack = r#"regex = "0.2.5""#;
let captures = semver.captures(haystack)
.ok_or("semver regex should have matched").unwrap();
println!("regex captures: {:?}", captures);
}<|fim▁end|>
|
let semver = Regex::new(r"(\d+)\.(\d+)\.(\d+)(-[-.[:alnum:]]*)?").unwrap();
|
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>"""
This module implements Exception classes<|fim▁hole|>class Error(StandardError):
"""
Base class for all driver error exceptions
"""
def __init__(self, err_id = None, err_msg = None):
self._err_id = err_id or -1
self._err_msg = err_msg
#def __str__(self):
# return '%d: %s' % (self._err_id, self._err_msg)
class Warning(StandardError):
"""
Warnings exception
"""
pass
class InterfaceError(Error):
"""
Exception for interface errors
"""
pass
class DatabaseError(Error):
"""
Exception for database errors
"""
pass
class InternalError(DatabaseError):
"""
Exception for internal errors
"""
pass
class OperationalError(DatabaseError):
"""
Exception for database operations errors
"""
pass
class ProgrammingError(DatabaseError):
"""
Exception for programming errors
"""
pass
class IntegrityError(DatabaseError):
"""
Exception for data relational integrity errors
"""
pass
class DataError(DatabaseError):
"""
Exception for data errors
"""
pass
class NotSupportedError(DatabaseError):
"""
Exception for unsupported database operations
"""
pass<|fim▁end|>
|
http://www.python.org/dev/peps/pep-0249/
"""
|
<|file_name|>Tests.py<|end_file_name|><|fim▁begin|># BridgeDB by Nick Mathewson.
# Copyright (c) 2007-2009, The Tor Project, Inc.
# See LICENSE for licensing information
from __future__ import print_function
import doctest
import os
import random
import sqlite3
import tempfile
import unittest
import warnings
import time
from datetime import datetime
import bridgedb.Bridges
import bridgedb.Main
import bridgedb.Dist
import bridgedb.Time
import bridgedb.Storage
import re
import ipaddr
from bridgedb.Filters import filterBridgesByIP4
from bridgedb.Filters import filterBridgesByIP6
from bridgedb.Filters import filterBridgesByOnlyIP4
from bridgedb.Filters import filterBridgesByOnlyIP6
from bridgedb.Filters import filterBridgesByTransport
from bridgedb.Filters import filterBridgesByNotBlockedIn
from bridgedb.Stability import BridgeHistory
from bridgedb.parse import addr
from bridgedb.parse import networkstatus
from math import log
def suppressWarnings():
warnings.filterwarnings('ignore', '.*tmpnam.*')
def randomIP():
if random.choice(xrange(2)):
return randomIP4()
return randomIP6()
def randomIP4():
return ipaddr.IPv4Address(random.getrandbits(32))
def randomIP4String():
return randomIP4().compressed
def randomIP6():
return ipaddr.IPv6Address(random.getrandbits(128))
def randomIP6String():
return bracketIP6(randomIP6().compressed)
def randomIPString():
if random.choice(xrange(2)):
return randomIP4String()
return randomIP6String()
def bracketIP6(ip):
"""Put brackets around an IPv6 address, just as tor does."""
return "[%s]" % ip
def random16IP():
upper = "123.123." # same 16
lower = ".".join([str(random.randrange(1,256)) for _ in xrange(2)])
return upper+lower
def randomPort():
return random.randint(1,65535)
def randomPortSpec():
"""
returns a random list of ports
"""
ports = []
for i in range(0,24):
ports.append(random.randint(1,65535))
ports.sort(reverse=True)
portspec = ""
for i in range(0,16):
portspec += "%d," % random.choice(ports)
portspec = portspec.rstrip(',') #remove trailing ,
return portspec
def randomCountry():
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
#XXX: load from geoip
return random.choice(countries)
def randomCountrySpec():
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
#XXX: load from geoip
spec = ""
choices = []
for i in xrange(10):
choices.append(random.choice(countries))
choices = set(choices) #dedupe
choices = list(choices)
spec += ",".join(choices)
return spec
def fakeBridge(orport=8080, running=True, stable=True, or_addresses=False,
transports=False):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = ipaddr.IPAddress(randomIP4())
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
oraddrs = []
if or_addresses:
for i in xrange(8):
# Only add or_addresses if they are valid. Otherwise, the test
# will randomly fail if an invalid address is chosen:
address = randomIP4String()
portlist = addr.PortList(randomPortSpec())
if addr.isValidIP(address):
oraddrs.append((address, portlist,))
for address, portlist in oraddrs:
networkstatus.parseALine("{0}:{1}".format(address, portlist))
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
if transports:
for i in xrange(0,8):
b.transports.append(bridgedb.Bridges.PluggableTransport(b,
random.choice(["obfs", "obfs2", "pt1"]),
randomIP(), randomPort()))
return b
def fakeBridge6(orport=8080, running=True, stable=True, or_addresses=False,
transports=False):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = ipaddr.IPAddress(randomIP6())
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
oraddrs = []
if or_addresses:
for i in xrange(8):
# Only add or_addresses if they are valid. Otherwise, the test
# will randomly fail if an invalid address is chosen:
address = randomIP6()
portlist = addr.PortList(randomPortSpec())
if addr.isValidIP(address):
address = bracketIP6(address)
oraddrs.append((address, portlist,))
for address, portlist in oraddrs:
networkstatus.parseALine("{0}:{1}".format(address, portlist))
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
if transports:
for i in xrange(0,8):
b.transports.append(bridgedb.Bridges.PluggableTransport(b,
random.choice(["obfs", "obfs2", "pt1"]),
randomIP(), randomPort()))
return b
def fake16Bridge(orport=8080, running=True, stable=True):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = random16IP()
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
return b
simpleDesc = "router Unnamed %s %s 0 9030\n"\
"opt fingerprint DEAD BEEF F00F DEAD BEEF F00F DEAD BEEF F00F DEAD\n"\
"opt @purpose bridge\n"
orAddress = "or-address %s:%s\n"
def gettimestamp():
ts = time.strftime("%Y-%m-%d %H:%M:%S")
return "opt published %s\n" % ts
class RhymesWith255Category:
def contains(self, ip):
return ip.endswith(".255")
class EmailBridgeDistTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
bridgedb.Storage.setGlobalDB(self.db)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def testEmailRateLimit(self):
db = self.db
EMAIL_DOMAIN_MAP = {'example.com':'example.com'}
d = bridgedb.Dist.EmailBasedDistributor(
"Foo",
{'example.com': 'example.com',
'dkim.example.com': 'dkim.example.com'},
{'example.com': [], 'dkim.example.com': ['dkim']})
for _ in xrange(256):
d.insert(fakeBridge())
d.getBridgesForEmail('[email protected]', 1, 3)
self.assertRaises(bridgedb.Dist.TooSoonEmail,
d.getBridgesForEmail, '[email protected]', 1, 3)
self.assertRaises(bridgedb.Dist.IgnoreEmail,
d.getBridgesForEmail, '[email protected]', 1, 3)
def testUnsupportedDomain(self):
db = self.db
self.assertRaises(bridgedb.Dist.UnsupportedDomain,
bridgedb.Dist.normalizeEmail, '[email protected]',
{'example.com':'example.com'},
{'example.com':[]})
class IPBridgeDistTests(unittest.TestCase):
def dumbAreaMapper(self, ip):
return ip
def testBasicDist(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(256):
d.insert(fakeBridge())
n = d.getBridgesForIP("1.2.3.4", "x", 2)
n2 = d.getBridgesForIP("1.2.3.4", "x", 2)
self.assertEquals(n, n2)
def testDistWithCategories(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo",
[RhymesWith255Category()])
assert len(d.categories) == 1
for _ in xrange(256):
d.insert(fakeBridge())
for _ in xrange(256):
# Make sure that the categories do not overlap
f = lambda: ".".join([str(random.randrange(1,255)) for _ in xrange(4)])
g = lambda: ".".join([str(random.randrange(1,255)) for _ in xrange(3)] + ['255'])
n = d.getBridgesForIP(g(), "x", 10)
n2 = d.getBridgesForIP(f(), "x", 10)
assert(len(n) > 0)
assert(len(n2) > 0)
for b in n:
assert (b not in n2)
for b in n2:
assert (b not in n)
#XXX: #6175 breaks this test!
#def testDistWithPortRestrictions(self):
# param = bridgedb.Bridges.BridgeRingParameters(needPorts=[(443, 1)])
# d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Baz",
# answerParameters=param)
# for _ in xrange(32):
# d.insert(fakeBridge(443))
# for _ in range(256):
# d.insert(fakeBridge())
# for _ in xrange(32):
# i = randomIP()
# n = d.getBridgesForIP(i, "x", 5)
# count = 0
# fps = {}
# for b in n:
# fps[b.getID()] = 1
# if b.orport == 443:
# count += 1
# self.assertEquals(len(fps), len(n))
# self.assertEquals(len(fps), 5)
# self.assertTrue(count >= 1)
#def testDistWithFilter16(self):
# d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
# for _ in xrange(256):
# d.insert(fake16Bridge())
# n = d.getBridgesForIP("1.2.3.4", "x", 10)
# slash16s = dict()
# for bridge in n:
# m = re.match(r'(\d+\.\d+)\.\d+\.\d+', bridge.ip)
# upper16 = m.group(1)
# self.assertTrue(upper16 not in slash16s)
# slash16s[upper16] = True
def testDistWithFilterIP6(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(500):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp",
bridgeFilterRules=[filterBridgesByIP6])
bridge = random.choice(bridges)
bridge_line = bridge.getConfigLine(addressClass=ipaddr.IPv6Address)
address, portlist = networkstatus.parseALine(bridge_line)
assert type(address) is ipaddr.IPv6Address
assert filterBridgesByIP6(random.choice(bridges))
def testDistWithFilterIP4(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(500):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp",
bridgeFilterRules=[filterBridgesByIP4])
bridge = random.choice(bridges)
bridge_line = bridge.getConfigLine(addressClass=ipaddr.IPv4Address)
address, portlist = networkstatus.parseALine(bridge_line)
assert type(address) is ipaddr.IPv4Address
assert filterBridgesByIP4(random.choice(bridges))
def testDistWithFilterBoth(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(50):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp", 1,
bridgeFilterRules=[
filterBridgesByIP4,
filterBridgesByIP6])
if bridges:
t = bridges.pop()
assert filterBridgesByIP4(t)
assert filterBridgesByIP6(t)
address, portlist = networkstatus.parseALine(
t.getConfigLine(addressClass=ipaddr.IPv4Address))
assert type(address) is ipaddr.IPv4Address
address, portlist = networkstatus.parseALine(
t.getConfigLine(addressClass=ipaddr.IPv6Address))
assert type(address) is ipaddr.IPv6Address
def testDistWithFilterAll(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(5):
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByOnlyIP4, filterBridgesByOnlyIP6])
assert len(b) == 0
def testDistWithFilterBlockedCountries(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for b in d.splitter.bridges:
# china blocks all :-(
for pt in b.transports:
key = "%s:%s" % (pt.address, pt.port)
b.blockingCountries[key] = set(['cn'])
for address, portlist in b.or_addresses.items():
for port in portlist:
key = "%s:%s" % (address, port)
b.blockingCountries[key] = set(['cn'])
key = "%s:%s" % (b.ip, b.orport)
b.blockingCountries[key] = set(['cn'])
for i in xrange(5):
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("cn")])
assert len(b) == 0
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("us")])
assert len(b) > 0
def testDistWithFilterBlockedCountriesAdvanced(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True, transports=True))
d.insert(fakeBridge(or_addresses=True, transports=True))
for b in d.splitter.bridges:
# china blocks some transports
for pt in b.transports:
if random.choice(xrange(2)) > 0:
key = "%s:%s" % (pt.address, pt.port)
b.blockingCountries[key] = set(['cn'])
for address, portlist in b.or_addresses.items():
# china blocks some transports
for port in portlist:
if random.choice(xrange(2)) > 0:
key = "%s:%s" % (address, port)
b.blockingCountries[key] = set(['cn'])
key = "%s:%s" % (b.ip, b.orport)
b.blockingCountries[key] = set(['cn'])
# we probably will get at least one bridge back!
# it's pretty unlikely to lose a coin flip 250 times in a row
for i in xrange(5):
b = d.getBridgesForIP(randomIPString(), "x", 1,
bridgeFilterRules=[
filterBridgesByNotBlockedIn("cn", methodname='obfs2'),
filterBridgesByTransport('obfs2'),
])
try: assert len(b) > 0
except AssertionError:
print("epic fail")
b = d.getBridgesForIP(randomIPString(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("us")])
assert len(b) > 0
class DictStorageTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.conn = sqlite3.Connection(self.fname)
def tearDown(self):
self.conn.close()
os.close(self.fd)
os.unlink(self.fname)
def testSimpleDict(self):
self.conn.execute("CREATE TABLE A ( X PRIMARY KEY, Y )")
d = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"A", (), (), "X", "Y")
self.basictests(d)
def testComplexDict(self):
self.conn.execute("CREATE TABLE B ( X, Y, Z, "
"CONSTRAINT B_PK PRIMARY KEY (X,Y) )")
d = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"B", ("X",), ("x1",), "Y", "Z")
d2 = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"B", ("X",), ("x2",), "Y", "Z")
self.basictests(d)
self.basictests(d2)
def basictests(self, d):
d["hello"] = "goodbye"
d["hola"] = "adios"
self.assertEquals(d["hola"], "adios")
d["hola"] = "hasta luego"
self.assertEquals(d["hola"], "hasta luego")
self.assertEquals(sorted(d.keys()), [u"hello", u"hola"])
self.assertRaises(KeyError, d.__getitem__, "buongiorno")
self.assertEquals(d.get("buongiorno", "ciao"), "ciao")
self.conn.commit()
d["buongiorno"] = "ciao"
del d['hola']
self.assertRaises(KeyError, d.__getitem__, "hola")
self.conn.rollback()
self.assertEquals(d["hola"], "hasta luego")
self.assertEquals(d.setdefault("hola","bye"), "hasta luego")
self.assertEquals(d.setdefault("yo","bye"), "bye")
self.assertEquals(d['yo'], "bye")
class SQLStorageTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def assertCloseTo(self, a, b, delta=60):
self.assertTrue(abs(a-b) <= delta)
def testBridgeStorage(self):
db = self.db
B = bridgedb.Bridges.Bridge
t = time.time()
cur = self.cur
k1 = "aaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbb"
k2 = "abababababababababababababababababababab"
k3 = "cccccccccccccccccccccccccccccccccccccccc"
b1 = B("serv1", "1.2.3.4", 999, fingerprint=k1)
b1_v2 = B("serv1", "1.2.3.5", 9099, fingerprint=k1)
b2 = B("serv2", "2.3.4.5", 9990, fingerprint=k2)
b3 = B("serv3", "2.3.4.6", 9008, fingerprint=k3)
validRings = ["ring1", "ring2", "ring3"]
r = db.insertBridgeAndGetRing(b1, "ring1", t, validRings)
self.assertEquals(r, "ring1")
r = db.insertBridgeAndGetRing(b1, "ring10", t+500, validRings)
self.assertEquals(r, "ring1")
cur.execute("SELECT distributor, address, or_port, first_seen, "
"last_seen FROM Bridges WHERE hex_key = ?", (k1,))
v = cur.fetchone()
self.assertEquals(v,
("ring1", "1.2.3.4", 999,
bridgedb.Storage.timeToStr(t),
bridgedb.Storage.timeToStr(t+500)))
r = db.insertBridgeAndGetRing(b1_v2, "ring99", t+800, validRings)
self.assertEquals(r, "ring1")
cur.execute("SELECT distributor, address, or_port, first_seen, "
"last_seen FROM Bridges WHERE hex_key = ?", (k1,))
v = cur.fetchone()
self.assertEquals(v,
("ring1", "1.2.3.5", 9099,
bridgedb.Storage.timeToStr(t),
bridgedb.Storage.timeToStr(t+800)))
db.insertBridgeAndGetRing(b2, "ring2", t, validRings)
db.insertBridgeAndGetRing(b3, "ring3", t, validRings)
cur.execute("SELECT COUNT(distributor) FROM Bridges")
v = cur.fetchone()
self.assertEquals(v, (3,))
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
db.setEmailTime("[email protected]", t)
db.setEmailTime("[email protected]", t+1000)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t+1000)
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
db.cleanEmailedBridges(t+200)
db.setEmailTime("[email protected]", t+5000)
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t+5000)
cur.execute("SELECT * FROM EmailedBridges")
self.assertEquals(len(cur.fetchall()), 1)
db.addBridgeBlock(b2.fingerprint, 'us')
self.assertEquals(db.isBlocked(b2.fingerprint, 'us'), True)
db.delBridgeBlock(b2.fingerprint, 'us')
self.assertEquals(db.isBlocked(b2.fingerprint, 'us'), False)
db.addBridgeBlock(b2.fingerprint, 'uk')
db.addBridgeBlock(b3.fingerprint, 'uk')
self.assertEquals(set([b2.fingerprint, b3.fingerprint]),
set(db.getBlockedBridges('uk')))
db.addBridgeBlock(b2.fingerprint, 'cn')
db.addBridgeBlock(b2.fingerprint, 'de')
db.addBridgeBlock(b2.fingerprint, 'jp')
db.addBridgeBlock(b2.fingerprint, 'se')
db.addBridgeBlock(b2.fingerprint, 'kr')
self.assertEquals(set(db.getBlockingCountries(b2.fingerprint)),
set(['uk', 'cn', 'de', 'jp', 'se', 'kr']))
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
db.setWarnedEmail("[email protected]")
self.assertEquals(db.getWarnedEmail("[email protected]"), True)
db.setWarnedEmail("[email protected]", False)
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
db.setWarnedEmail("[email protected]")
self.assertEquals(db.getWarnedEmail("[email protected]"), True)
db.cleanWarnedEmails(t+200)
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
class ParseDescFileTests(unittest.TestCase):
def testSimpleDesc(self):
test = ""
for i in range(100):
test+= "".join(simpleDesc % (randomIP(), randomPort()))
test+=gettimestamp()
test+="router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testSingleOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIP(), randomPort())
test+= orAddress % (randomIP(),randomPort())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testMultipleOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIPString(), randomPort())
for i in xrange(8):
test+= orAddress % (randomIPString(),randomPortSpec())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testConvolutedOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIPString(), randomPort())
for i in xrange(8):
test+= orAddress % (randomIPString(),randomPortSpec())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testParseCountryBlockFile(self):
simpleBlock = "%s:%s %s\n"
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
test = str()
for i in range(100):
test += simpleBlock % (randomIPString(), randomPort(),
randomCountrySpec())
test+=gettimestamp()
for a,p,c in bridgedb.Bridges.parseCountryBlockFile(test.split('\n')):
assert type(a) in (ipaddr.IPv6Address, ipaddr.IPv4Address)
assert isinstance(p, addr.PortList)
assert isinstance(c, list)
assert len(c) > 0
for y in c:
assert y in countries
#print "address: %s" % a
#print "portlist: %s" % p
#print "countries: %s" % c
class BridgeStabilityTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
bridgedb.Storage.setGlobalDB(self.db)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def testAddOrUpdateSingleBridgeHistory(self):
db = self.db
b = fakeBridge()
timestamp = time.time()
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b, timestamp)
assert isinstance(bhe, BridgeHistory)
assert isinstance(db.getBridgeHistory(b.fingerprint), BridgeHistory)
assert len([y for y in db.getAllBridgeHistory()]) == 1
def testDeletingSingleBridgeHistory(self):
db = self.db
b = fakeBridge()
timestamp = time.time()
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b, timestamp)
assert isinstance(bhe, BridgeHistory)
assert isinstance(db.getBridgeHistory(b.fingerprint), BridgeHistory)
db.delBridgeHistory(b.fingerprint)
assert db.getBridgeHistory(b.fingerprint) is None
assert len([y for y in db.getAllBridgeHistory()]) == 0
def testTOSA(self):
db = self.db
b = random.choice([fakeBridge,fakeBridge6])()
def timestampSeries(x):
for i in xrange(61):
yield (i+1)*60*30 + x # 30 minute intervals
now = time.time()
time_on_address = long(60*30*60) # 30 hours
downtime = 60*60*random.randint(0,4) # random hours of downtime
for t in timestampSeries(now):
bridgedb.Stability.addOrUpdateBridgeHistory(b,t)
assert db.getBridgeHistory(b.fingerprint).tosa == time_on_address
b.orport += 1
for t in timestampSeries(now + time_on_address + downtime):
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b,t)
assert db.getBridgeHistory(b.fingerprint).tosa == time_on_address + downtime
def testLastSeenWithDifferentAddressAndPort(self):
db = self.db
for i in xrange(10):
num_desc = 30
time_start = time.time()
ts = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
b = random.choice([fakeBridge(), fakeBridge6()])
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, t) for t in ts ]
# change the port
b.orport = b.orport+1
last_seen = ts[-1]
ts = [ 60*30*(i+1) + last_seen for i in xrange(num_desc) ]
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, t) for t in ts ]
b = db.getBridgeHistory(b.fingerprint)
assert b.tosa == ts[-1] - last_seen
assert (long(last_seen*1000) == b.lastSeenWithDifferentAddressAndPort)
assert (long(ts[-1]*1000) == b.lastSeenWithThisAddressAndPort)
def testFamiliar(self):
# create some bridges
# XXX: slow
num_bridges = 10
num_desc = 4*48 # 30m intervals, 48 per day
time_start = time.time()
bridges = [ fakeBridge() for x in xrange(num_bridges) ]
t = time.time()
ts = [ (i+1)*60*30+t for i in xrange(num_bridges) ]
for b in bridges:
time_series = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, i) for i in time_series ]
assert None not in bridges
# +1 to avoid rounding errors
assert bridges[-(num_bridges/8 + 1)].familiar == True
def testDiscountAndPruneBridgeHistory(self):
""" Test pruning of old Bridge History """
if os.environ.get('TRAVIS'):
self.skipTest("Hangs on Travis-CI.")
db = self.db
# make a bunch of bridges
num_bridges = 20
time_start = time.time()
bridges = [random.choice([fakeBridge, fakeBridge6])()
for i in xrange(num_bridges)]
# run some of the bridges for the full time series
running = bridges[:num_bridges/2]
# and some that are not
expired = bridges[num_bridges/2:]<|fim▁hole|>
# Solving:
# 1 discount event per 12 hours, 24 descriptors 30m apart
num_successful = random.randint(2,60)
# figure out how many intervals it will take for weightedUptime to
# decay to < 1
num_desc = int(30*log(1/float(num_successful*30*60))/(-0.05))
timeseries = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
for i in timeseries:
for b in running:
bridgedb.Stability.addOrUpdateBridgeHistory(b, i)
if num_successful > 0:
for b in expired:
bridgedb.Stability.addOrUpdateBridgeHistory(b, i)
num_successful -= 1
# now we expect to see the bridge has been removed from history
for bridge in expired:
b = db.getBridgeHistory(bridge.fingerprint)
assert b is None
# and make sure none of the others have
for bridge in running:
b = db.getBridgeHistory(bridge.fingerprint)
assert b is not None
def testSuite():
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for klass in [ IPBridgeDistTests, DictStorageTests, SQLStorageTests,
EmailBridgeDistTests, ParseDescFileTests, BridgeStabilityTests ]:
suite.addTest(loader.loadTestsFromTestCase(klass))
for module in [ bridgedb.Bridges,
bridgedb.Main,
bridgedb.Dist,
bridgedb.Time ]:
suite.addTest(doctest.DocTestSuite(module))
return suite
def main():
suppressWarnings()
unittest.TextTestRunner(verbosity=1).run(testSuite())<|fim▁end|>
|
for b in running: assert b not in expired
|
<|file_name|>cfgconstants.go<|end_file_name|><|fim▁begin|>package securenet
const (
// PSPName ..
PSPName string = "securenet"
// CfgDeveloperID ..
CfgDeveloperID string = "developer_id"
// CfgPublicKey ..
CfgPublicKey string = "public_key"
// CfgSecureKey ..
CfgSecureKey string = "secure_key"
// CfgSecureNetID ..
CfgSecureNetID string = "secure_net_id"
// CfgAppVersion ..<|fim▁hole|> CfgHTTPProxy string = "http_proxy"
)<|fim▁end|>
|
CfgAppVersion string = "app_version"
// CfgAPIEndpoint ..
CfgAPIEndpoint string = "api_endpoint"
// CfgHTTPProxy ..
|
<|file_name|>mutex.pass.cpp<|end_file_name|><|fim▁begin|>//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <mutex>
// template <class Mutex> class unique_lock;
// explicit unique_lock(mutex_type& m);
#include <mutex>
#include <thread>
#include <cstdlib>
#include <cassert>
std::mutex m;
typedef std::chrono::system_clock Clock;
typedef Clock::time_point time_point;
typedef Clock::duration duration;
typedef std::chrono::milliseconds ms;
typedef std::chrono::nanoseconds ns;
void f()
{<|fim▁hole|> std::unique_lock<std::mutex> ul(m);
t1 = Clock::now();
}
ns d = t1 - t0 - ms(250);
assert(d < ms(50)); // within 50ms
}
int main()
{
m.lock();
std::thread t(f);
std::this_thread::sleep_for(ms(250));
m.unlock();
t.join();
}<|fim▁end|>
|
time_point t0 = Clock::now();
time_point t1;
{
|
<|file_name|>_agent_pools_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._agent_pools_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_available_agent_pool_versions_request, build_get_request, build_get_upgrade_profile_request, build_list_request, build_upgrade_node_image_version_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AgentPoolsOperations:
"""AgentPoolsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2022_01_02_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> AsyncIterable["_models.AgentPoolListResult"]:
"""Gets a list of agent pools in the specified managed cluster.
Gets a list of agent pools in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AgentPoolListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AgentPoolListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPool":
"""Gets the specified managed cluster agent pool.
Gets the specified managed cluster agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPool, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> "_models.AgentPool":
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))<|fim▁hole|>
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'AgentPool')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AgentPool', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> AsyncLROPoller["_models.AgentPool"]:
"""Creates or updates an agent pool in the specified managed cluster.
Creates or updates an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:param parameters: The agent pool to create or update.
:type parameters: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AgentPool or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes an agent pool in the specified managed cluster.
Deletes an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def get_upgrade_profile(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPoolUpgradeProfile":
"""Gets the upgrade profile for an agent pool.
Gets the upgrade profile for an agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolUpgradeProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolUpgradeProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolUpgradeProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_upgrade_profile_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get_upgrade_profile.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolUpgradeProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeProfiles/default'} # type: ignore
@distributed_trace_async
async def get_available_agent_pool_versions(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.AgentPoolAvailableVersions":
"""Gets a list of supported Kubernetes versions for the specified agent pool.
See `supported Kubernetes versions
<https://docs.microsoft.com/azure/aks/supported-kubernetes-versions>`_ for more details about
the version lifecycle.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolAvailableVersions, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolAvailableVersions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolAvailableVersions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_available_agent_pool_versions_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get_available_agent_pool_versions.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolAvailableVersions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_agent_pool_versions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/availableAgentPoolVersions'} # type: ignore
async def _upgrade_node_image_version_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> Optional["_models.AgentPool"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AgentPool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_upgrade_node_image_version_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._upgrade_node_image_version_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 202:
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_upgrade_node_image_version_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore
@distributed_trace_async
async def begin_upgrade_node_image_version(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.AgentPool"]:
"""Upgrades the node image version of an agent pool to the latest.
Upgrading the node image version of an agent pool applies the newest OS and runtime updates to
the nodes. AKS provides one new image per week with the latest updates. For more details on
node image versions, see: https://docs.microsoft.com/azure/aks/node-image-upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AgentPool or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._upgrade_node_image_version_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_upgrade_node_image_version.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore<|fim▁end|>
| |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
import datetime
import pytz
import six
ISO8601_DATE_FORMAT = '%Y-%m-%d'
ISO8601_DATETIME_FORMAT = ISO8601_DATE_FORMAT + 'T' + '%H:%M:%S'
def parse_iso8601(value):
"""
Parses a datetime as a UTC ISO8601 date
"""
if not value:
return None
if 'T' in value: # has time<|fim▁hole|> if '.' in value: # has microseconds. Some values from RapidPro don't include this.
_format += '.%f'
if 'Z' in value: # has zero offset marker
_format += 'Z'
else:
_format = ISO8601_DATE_FORMAT
return datetime.datetime.strptime(value, _format).replace(tzinfo=pytz.utc)
def format_iso8601(value):
"""
Formats a datetime as a UTC ISO8601 date
"""
_format = ISO8601_DATETIME_FORMAT + '.%f'
return six.text_type(value.astimezone(pytz.UTC).strftime(_format))<|fim▁end|>
|
_format = ISO8601_DATETIME_FORMAT
|
<|file_name|>dogecall_test.go<|end_file_name|><|fim▁begin|>package main
import (
"testing"
)
func TestCheckPhoneNumber(t *testing.T) {
numbers := []string{
"07700900390",
"+447700900497",
"202-555-0188",<|fim▁hole|> }
for _, number := range numbers {
check := checkNumber(number)
if check != true {
t.Errorf("CheckNumber(\"%s\") == %t, want %t", number, check, true)
}
}
}<|fim▁end|>
|
"+1-202-555-0188",
|
<|file_name|>information.js<|end_file_name|><|fim▁begin|>/* http://fiidmi.fi/documentation/customer_order_history */
module.exports = {
"bonus": {
"type": "object",
"properties": {
"session_id": { "type": "string", "minLength": 2, "maxLength": 50 },
"restaurant_id": { "type": "string", "minLength": 1, "maxLength": 50 }
},
"required": ["session_id", "restaurant_id"]
},
"po_credit": {
"type": "object",
"properties": {<|fim▁hole|> },
"required": ["session_id", "order_id"]
}
};<|fim▁end|>
|
"session_id": { "type": "string", "minLength": 2, "maxLength": 50 }
|
<|file_name|>strategy_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package csidriver
import (
"testing"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/validation/field"
genericapirequest "k8s.io/apiserver/pkg/endpoints/request"
utilfeature "k8s.io/apiserver/pkg/util/feature"
featuregatetesting "k8s.io/component-base/featuregate/testing"
"k8s.io/kubernetes/pkg/apis/storage"
"k8s.io/kubernetes/pkg/features"
)
func getValidCSIDriver(name string) *storage.CSIDriver {
enabled := true
return &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: name,
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
},
}
}
func TestCSIDriverStrategy(t *testing.T) {
ctx := genericapirequest.WithRequestInfo(genericapirequest.NewContext(), &genericapirequest.RequestInfo{
APIGroup: "storage.k8s.io",
APIVersion: "v1",
Resource: "csidrivers",
})
if Strategy.NamespaceScoped() {
t.Errorf("CSIDriver must not be namespace scoped")
}
if Strategy.AllowCreateOnUpdate() {
t.Errorf("CSIDriver should not allow create on update")
}
csiDriver := getValidCSIDriver("valid-csidriver")
Strategy.PrepareForCreate(ctx, csiDriver)
errs := Strategy.Validate(ctx, csiDriver)
if len(errs) != 0 {
t.Errorf("unexpected error validating %v", errs)
}
// Update of spec is disallowed
newCSIDriver := csiDriver.DeepCopy()
attachNotRequired := false
newCSIDriver.Spec.AttachRequired = &attachNotRequired
Strategy.PrepareForUpdate(ctx, newCSIDriver, csiDriver)
errs = Strategy.ValidateUpdate(ctx, newCSIDriver, csiDriver)
if len(errs) == 0 {
t.Errorf("Expected a validation error")
}
}
func TestCSIDriverPrepareForCreate(t *testing.T) {
ctx := genericapirequest.WithRequestInfo(genericapirequest.NewContext(), &genericapirequest.RequestInfo{
APIGroup: "storage.k8s.io",
APIVersion: "v1",
Resource: "csidrivers",
})
attachRequired := true
podInfoOnMount := true
storageCapacity := true
tests := []struct {
name string
withCapacity bool
withInline bool
}{
{
name: "inline enabled",
withInline: true,
},
{
name: "inline disabled",
withInline: false,
},
{
name: "capacity enabled",
withCapacity: true,
},
{
name: "capacity disabled",
withCapacity: false,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIStorageCapacity, test.withCapacity)()
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIInlineVolume, test.withInline)()
csiDriver := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &attachRequired,
PodInfoOnMount: &podInfoOnMount,
StorageCapacity: &storageCapacity,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecyclePersistent,
},
},
}
Strategy.PrepareForCreate(ctx, csiDriver)
errs := Strategy.Validate(ctx, csiDriver)
if len(errs) != 0 {
t.Errorf("unexpected validating errors: %v", errs)
}
if test.withCapacity {
if csiDriver.Spec.StorageCapacity == nil || *csiDriver.Spec.StorageCapacity != storageCapacity {
t.Errorf("StorageCapacity modified: %v", csiDriver.Spec.StorageCapacity)
}
} else {
if csiDriver.Spec.StorageCapacity != nil {
t.Errorf("StorageCapacity not stripped: %v", csiDriver.Spec.StorageCapacity)
}
}
if test.withInline {
if len(csiDriver.Spec.VolumeLifecycleModes) != 1 {
t.Errorf("VolumeLifecycleModes modified: %v", csiDriver.Spec)
}
} else {
if len(csiDriver.Spec.VolumeLifecycleModes) != 0 {
t.Errorf("VolumeLifecycleModes not stripped: %v", csiDriver.Spec)
}
}
})
}
}
func TestCSIDriverPrepareForUpdate(t *testing.T) {
ctx := genericapirequest.WithRequestInfo(genericapirequest.NewContext(), &genericapirequest.RequestInfo{
APIGroup: "storage.k8s.io",
APIVersion: "v1",
Resource: "csidrivers",
})
attachRequired := true
podInfoOnMount := true
driverWithoutModes := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &attachRequired,
PodInfoOnMount: &podInfoOnMount,
},
}
driverWithPersistent := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &attachRequired,
PodInfoOnMount: &podInfoOnMount,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecyclePersistent,
},
},
}
driverWithEphemeral := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &attachRequired,
PodInfoOnMount: &podInfoOnMount,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecycleEphemeral,
},
},
}
enabled := true
disabled := false
driverWithoutCapacity := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
}
driverWithCapacityEnabled := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
StorageCapacity: &enabled,
},
}
driverWithCapacityDisabled := &storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
StorageCapacity: &disabled,
},
}
var resultEmpty []storage.VolumeLifecycleMode
resultPersistent := []storage.VolumeLifecycleMode{storage.VolumeLifecyclePersistent}
resultEphemeral := []storage.VolumeLifecycleMode{storage.VolumeLifecycleEphemeral}
tests := []struct {
name string
old, update *storage.CSIDriver
withCapacity, withoutCapacity *bool
withInline, withoutInline []storage.VolumeLifecycleMode
}{
{
name: "before: no capacity, update: no capacity",
old: driverWithoutCapacity,
update: driverWithoutCapacity,
withCapacity: nil,
withoutCapacity: nil,
},
{
name: "before: no capacity, update: enabled",
old: driverWithoutCapacity,
update: driverWithCapacityEnabled,
withCapacity: &enabled,
withoutCapacity: nil,
},
{
name: "before: capacity enabled, update: disabled",
old: driverWithCapacityEnabled,
update: driverWithCapacityDisabled,
withCapacity: &disabled,
withoutCapacity: &disabled,
},
{
name: "before: capacity enabled, update: no capacity",
old: driverWithCapacityEnabled,
update: driverWithoutCapacity,
withCapacity: nil,
withoutCapacity: nil,
},
{
name: "before: no mode, update: no mode",
old: driverWithoutModes,
update: driverWithoutModes,
withInline: resultEmpty,
withoutInline: resultEmpty,
},
{
name: "before: no mode, update: persistent",
old: driverWithoutModes,
update: driverWithPersistent,
withInline: resultPersistent,
withoutInline: resultEmpty,
},
{
name: "before: persistent, update: ephemeral",
old: driverWithPersistent,
update: driverWithEphemeral,
withInline: resultEphemeral,
withoutInline: resultEphemeral,
},
{
name: "before: persistent, update: no mode",
old: driverWithPersistent,
update: driverWithoutModes,
withInline: resultEmpty,
withoutInline: resultEmpty,
},
}
runAll := func(t *testing.T, withCapacity, withInline bool) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIStorageCapacity, withCapacity)()
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.CSIInlineVolume, withInline)()
csiDriver := test.update.DeepCopy()
Strategy.PrepareForUpdate(ctx, csiDriver, test.old)
if withCapacity {
require.Equal(t, test.withCapacity, csiDriver.Spec.StorageCapacity)
} else {
require.Equal(t, test.withoutCapacity, csiDriver.Spec.StorageCapacity)
}
if withInline {
require.Equal(t, test.withInline, csiDriver.Spec.VolumeLifecycleModes)
} else {
require.Equal(t, test.withoutInline, csiDriver.Spec.VolumeLifecycleModes)
}
})
}
}
t.Run("with capacity", func(t *testing.T) {
runAll(t, true, false)
})
t.Run("without capacity", func(t *testing.T) {
runAll(t, false, false)
})
t.Run("with inline volumes", func(t *testing.T) {
runAll(t, false, true)
})
t.Run("without inline volumes", func(t *testing.T) {
runAll(t, false, false)
})
}
func TestCSIDriverValidation(t *testing.T) {
enabled := true
disabled := true
tests := []struct {
name string
csiDriver *storage.CSIDriver
expectError bool
}{
{
"valid csidriver",
getValidCSIDriver("foo"),
false,
},
{
"true for all flags",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
},
},
false,
},
{
"false for all flags",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &disabled,
PodInfoOnMount: &disabled,
StorageCapacity: &disabled,
},
},
false,
},
{
"invalid driver name",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "*foo#",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
},
},
true,
},
{
"invalid volume mode",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecycleMode("no-such-mode"),
},
},
},
true,
},
{
"persistent volume mode",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},<|fim▁hole|> StorageCapacity: &enabled,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecyclePersistent,
},
},
},
false,
},
{
"ephemeral volume mode",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecycleEphemeral,
},
},
},
false,
},
{
"both volume modes",
&storage.CSIDriver{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
StorageCapacity: &enabled,
VolumeLifecycleModes: []storage.VolumeLifecycleMode{
storage.VolumeLifecyclePersistent,
storage.VolumeLifecycleEphemeral,
},
},
},
false,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
testValidation := func(csiDriver *storage.CSIDriver, apiVersion string) field.ErrorList {
ctx := genericapirequest.WithRequestInfo(genericapirequest.NewContext(), &genericapirequest.RequestInfo{
APIGroup: "storage.k8s.io",
APIVersion: "v1",
Resource: "csidrivers",
})
return Strategy.Validate(ctx, csiDriver)
}
err := testValidation(test.csiDriver, "v1")
if len(err) > 0 && !test.expectError {
t.Errorf("Validation of v1 object failed: %+v", err)
}
if len(err) == 0 && test.expectError {
t.Errorf("Validation of v1 object unexpectedly succeeded")
}
})
}
}<|fim▁end|>
|
Spec: storage.CSIDriverSpec{
AttachRequired: &enabled,
PodInfoOnMount: &enabled,
|
<|file_name|>JsonSimpleOrderedTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util.json;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class JsonSimpleOrderedTest {
@Test
public void testOrdered() throws Exception {
InputStream is = new FileInputStream("src/test/resources/bean.json");
String json = loadText(is);
JsonObject output = Jsoner.deserialize(json, new JsonObject());
assertNotNull(output);
// should preserve order
Map<?, ?> map = output.getMap("component");
assertTrue(map instanceof LinkedHashMap);
Iterator<?> it = map.keySet().iterator();
assertEquals("kind", it.next());
assertEquals("scheme", it.next());
assertEquals("syntax", it.next());
assertEquals("title", it.next());
assertEquals("description", it.next());
assertEquals("label", it.next());
assertEquals("deprecated", it.next());
assertEquals("deprecationNote", it.next());
assertEquals("async", it.next());
assertEquals("consumerOnly", it.next());
assertEquals("producerOnly", it.next());
assertEquals("lenientProperties", it.next());
assertEquals("javaType", it.next());
assertEquals("firstVersion", it.next());
assertEquals("groupId", it.next());
assertEquals("artifactId", it.next());
assertEquals("version", it.next());
assertFalse(it.hasNext());
}<|fim▁hole|> public static String loadText(InputStream in) throws IOException {
StringBuilder builder = new StringBuilder();
InputStreamReader isr = new InputStreamReader(in);
try {
BufferedReader reader = new BufferedReader(isr);
while (true) {
String line = reader.readLine();
if (line == null) {
line = builder.toString();
return line;
}
builder.append(line);
builder.append("\n");
}
} finally {
isr.close();
in.close();
}
}
}<|fim▁end|>
| |
<|file_name|>pages.module.js<|end_file_name|><|fim▁begin|>/**
* Created by zura on 9/27/2016.
*/
(function () {
'use strict';
angular
.module('app.pages', [
'app.pages.auth.login'
])
.config(Config);
/** @ngInject */
function Config(){
<|fim▁hole|><|fim▁end|>
|
}
})();
|
<|file_name|>starter.py<|end_file_name|><|fim▁begin|>"""
RESTx: Sane, simple and effective data publishing and integration.
Copyright (C) 2010 MuleSoft Inc. http://www.mulesoft.com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""
Simple starter for stand-alone RESTx server.
"""
import os
import sys
import time
import getopt
# RESTx imports
import restx.settings as settings<|fim▁hole|>
from restx.core import RequestDispatcher
from restx.platform_specifics import *
from org.mulesoft.restx import Settings
from org.mulesoft.restx.util import Url
from org.mulesoft.restx.component.api import *
def print_help():
print \
"""
RESTx server (c) 2010 MuleSoft
Usage: jython starter.py [options]
Options:
-h, --help
Print this help screen.
-P, --port <num>
Port on which the server listens for requests.
-p, --pidfile <filename>
If specified, the PID of the server is stored in <filename>.
-l, --logfile <filename>
If specified, the filename for the logfile. If not specified,
output will go to the console.
-r, --rootdir <dirname>
Root directory of the RESTx install
"""
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], "hl:P:p:r:", ["help", "logfile=", "port=", "pidfile=", "rootdir="])
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
print_help()
sys.exit(1)
port = settings.LISTEN_PORT
for o, a in opts:
if o in ("-p", "--pidfile"):
# Writing our process ID
pid = os.getpid()
f = open(a, "w")
f.write(str(pid))
f.close()
elif o in ("-h", "--help"):
print_help()
sys.exit(0)
elif o in ("-P", "--port"):
port = int(a)
elif o in ("-r", "--rootdir"):
rootdir = str(a)
settings.set_root_dir(rootdir)
elif o in ("-l", "--logfile"):
logger.set_logfile(a)
my_server = HttpServer(port, RequestDispatcher())<|fim▁end|>
|
import restx.logger as logger
|
<|file_name|>YoutubeBatch.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: Walter Purcaro
"""
from urlparse import urljoin
import re
from module.common.json_layer import json_loads
from module.plugins.Crypter import Crypter
from module.utils import save_join
API_KEY = "AIzaSyCKnWLNlkX-L4oD1aEzqqhRw1zczeD6_k0"<|fim▁hole|>class YoutubeBatch(Crypter):
__name__ = "YoutubeBatch"
__type__ = "crypter"
__pattern__ = r"https?://(?:www\.)?(m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w-]+)"
__version__ = "1.00"
__description__ = """Youtube.com channel & playlist decrypter"""
__config__ = [("likes", "bool", "Grab user (channel) liked videos", "False"),
("favorites", "bool", "Grab user (channel) favorite videos", "False"),
("uploads", "bool", "Grab channel unplaylisted videos", "True")]
__author_name__ = ("Walter Purcaro")
__author_mail__ = ("[email protected]")
def api_response(self, ref, req):
req.update({"key": API_KEY})
url = urljoin("https://www.googleapis.com/youtube/v3/", ref)
page = self.load(url, get=req)
return json_loads(page)
def getChannel(self, user):
channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"})
if channels["items"]:
channel = channels["items"][0]
return {"id": channel["id"],
"title": channel["snippet"]["title"],
"relatedPlaylists": channel["contentDetails"]["relatedPlaylists"],
"user": user} # One lone channel for user?
def getPlaylist(self, p_id):
playlists = self.api_response("playlists", {"part": "snippet", "id": p_id})
if playlists["items"]:
playlist = playlists["items"][0]
return {"id": p_id,
"title": playlist["snippet"]["title"],
"channelId": playlist["snippet"]["channelId"],
"channelTitle": playlist["snippet"]["channelTitle"]}
def _getPlaylists(self, id, token=None):
req = {"part": "id", "maxResults": "50", "channelId": id}
if token:
req.update({"pageToken": token})
playlists = self.api_response("playlists", req)
for playlist in playlists["items"]:
yield playlist["id"]
if "nextPageToken" in playlists:
for item in self._getPlaylists(id, playlists["nextPageToken"]):
yield item
def getPlaylists(self, ch_id):
return map(self.getPlaylist, self._getPlaylists(ch_id))
def _getVideosId(self, id, token=None):
req = {"part": "contentDetails", "maxResults": "50", "playlistId": id}
if token:
req.update({"pageToken": token})
playlist = self.api_response("playlistItems", req)
for item in playlist["items"]:
yield item["contentDetails"]["videoId"]
if "nextPageToken" in playlist:
for item in self._getVideosId(id, playlist["nextPageToken"]):
yield item
def getVideosId(self, p_id):
return list(self._getVideosId(p_id))
def decrypt(self, pyfile):
match = re.match(self.__pattern__, pyfile.url)
m_id = match.group("ID")
m_type = match.group("TYPE")
if m_type == "user":
self.logDebug("Url recognized as Channel")
user = m_id
channel = self.getChannel(user)
if channel:
playlists = self.getPlaylists(channel["id"])
self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel["title"]))
relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()}
self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
relatedplaylist["uploads"]["title"] = "Unplaylisted videos"
relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag
for p_name, p_data in relatedplaylist.iteritems():
if self.getConfig(p_name):
p_data["title"] += " of " + user
playlists.append(p_data)
else:
playlists = []
else:
self.logDebug("Url recognized as Playlist")
playlists = [self.getPlaylist(m_id)]
if not playlists:
self.fail("No playlist available")
addedvideos = []
urlize = lambda x: "https://www.youtube.com/watch?v=" + x
for p in playlists:
p_name = p["title"]
p_videos = self.getVideosId(p["id"])
p_folder = save_join(self.config['general']['download_folder'], p["channelTitle"], p_name)
self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
if not p_videos:
continue
elif "checkDups" in p:
p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name))
else:
p_urls = map(urlize, p_videos)
self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
addedvideos.extend(p_videos)<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate nemo;
#[macro_use]
extern crate clap;
extern crate bounded_spsc_queue as queue;
use std::io::{stdin, stdout, Write};
use std::cell::RefCell;
use std::sync::{Arc, Mutex};
use std::io;
use std::io::prelude::*;
use std::fs::File;
use std::thread;
use clap::{Arg, App};
<|fim▁hole|> .about("The nemo interpreter")
.arg(Arg::with_name("INPUT")
.help("Sets the input file to interpret"))
.arg(Arg::with_name("REPL")
.short("r")
.long("repl")
.help("Starts the REPL")
.conflicts_with("INPUT"))
.get_matches();
if matches.is_present("REPL") || matches.value_of("INPUT").is_none() {
repl();
} else {
run_progam_in_file(matches.value_of("INPUT").unwrap());
}
}
fn repl() {
let env = nemo::interpreter::initial_enviroment();
let stdin = stdin();
let mut stdout = stdout();
let (repl_producer, consumer) = queue::make(1);
let (repl_producer, consumer) = (Arc::new(Mutex::new(repl_producer)), Arc::new(Mutex::new(consumer)));
let (producer, repl_consumer) = queue::make(1);
let (producer, repl_consumer) = (Arc::new(Mutex::new(producer)), Arc::new(Mutex::new(repl_consumer)));
let p = repl_producer.clone();
thread::spawn(move|| {
loop {
let lock = p.lock().unwrap();
match lock.try_push(nemo::interpreter::Value::FinishedPipe) {
Some(_) => {},
None => thread::sleep_ms(200),
}
}
});
let c = repl_consumer.clone();
thread::spawn(move|| {
loop {
let lock = c.lock().unwrap();
match lock.try_pop() {
Some(_) => {},
None => thread::sleep_ms(200),
}
}
});
println!("><> nemo v{} <><", crate_version!());
println!("Use Ctrl-C to exit.");
loop {
print!("> ");
stdout.flush().unwrap();
let mut input = String::new();
stdin.read_line(&mut input).unwrap();
if let Ok(nemo::ast::Top::Definition(def)) = nemo::parser::parse_Definition(&input) {
nemo::interpreter::define_function(def, env.clone());
} else if let Ok(nemo::ast::Top::Use(module_path)) = nemo::parser::parse_Use(&input) {
let mut file = File::open(&module_path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let module_env = nemo::interpreter::initial_enviroment();
match nemo::interpreter::load_module_into_env(&contents, module_env.clone(), ".") {
Ok(_) => {},
Err(e) => println!("Syntax error in module {:?}: {:?}", module_path, e),
};
let name = ::std::path::Path::new(&module_path).file_stem().unwrap().to_str().unwrap().to_owned();
let lock = env.lock().unwrap();
lock.borrow_mut().set(name, Some(nemo::interpreter::Value::Module(module_env)));
} else {
let expr = match nemo::parser::parse_Expr(&input) {
Ok(expr) => expr,
Err(e) => {
println!("Error: {:?}", e);
continue;
}
};
match nemo::interpreter::eval(&expr, env.clone(), consumer.clone(), producer.clone()) {
Ok(res) | Err(nemo::interpreter::Error::EarlyReturn(res)) => println!("{:?}", res),
Err(e) => println!("Error: {:?}", e),
};
}
}
}
fn run_progam_in_file(path: &str) {
let mut file = File::open(path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let env = nemo::interpreter::initial_enviroment();
// Set up pipes
let (repl_producer, consumer) = queue::make(1);
let (repl_producer, consumer) = (Arc::new(Mutex::new(repl_producer)), Arc::new(Mutex::new(consumer)));
let (producer, repl_consumer) = queue::make(1);
let (producer, repl_consumer) = (Arc::new(Mutex::new(producer)), Arc::new(Mutex::new(repl_consumer)));
let p = repl_producer.clone();
thread::spawn(move|| {
loop {
let lock = p.lock().unwrap();
lock.push(nemo::interpreter::Value::FinishedPipe);
}
});
let c = repl_consumer.clone();
thread::spawn(move|| {
loop {
let lock = c.lock().unwrap();
lock.pop();
}
});
match nemo::interpreter::load_module_into_env(&contents, env.clone(), ::std::path::Path::new(path).parent().unwrap().to_str().unwrap()) {
Ok(_) => {},
Err(e) => println!("Syntax Error: {:?}", e),
};
let nemo_main = nemo::parser::parse_Expr("main()").unwrap();
match nemo::interpreter::eval(&nemo_main, env, consumer, producer) {
Ok(_) => {},
Err(e) => println!("Runtime Error: {:?}", e),
};
}<|fim▁end|>
|
fn main() {
let matches = App::new("nemo")
.version(crate_version!())
.author("Matthew S. <[email protected]>")
|
<|file_name|>imgt2fasta.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# Copyright 2014 Uri Laserson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import optparse<|fim▁hole|>parser = optparse.OptionParser()
(options, args) = parser.parse_args()
if len(args) == 2:
inhandle = open(args[0],'r')
outhandle = open(args[1],'w')
elif len(args) == 1:
inhandle = open(args[0],'r')
outhandle = sys.stdout
elif len(args) == 0:
inhandle = sys.stdin
outhandle = sys.stdout
else:
raise Exception, "Wrong number of arguments."
for chain in vdj.parse_imgt(inhandle):
# print >>outhandle, chain.format('fasta') # causes chain.description output instead of chain.id
print >>outhandle, ">%s\n%s" % (chain.id,chain.seq)<|fim▁end|>
|
import vdj
|
<|file_name|>panel.js<|end_file_name|><|fim▁begin|>define('controllers/panel',['require','jquery','backbone','utils/metrics','utils/browser','utils/video-player','utils/pubsub','controllers/panel-display'],function(require) {
var $ = require('jquery'),
Backbone = require('backbone'),
Metrics = require('utils/metrics'),
Browser = require('utils/browser'),
VideoPlayer = require('utils/video-player'),
PubSub = require('utils/pubsub'),
PanelDisplay = require('controllers/panel-display')
;
var PanelView = Backbone.View.extend({
events: {
},
panelOn: false,
minWidth: 320,
minHeight: 180,
leftLimit: 100,
topLimit: 100,
offset: 10,
border: 5,<|fim▁hole|>
initialize: function() {
Browser.checkMobileTabletDevice();
this.logger = new eventsCore.util.Logger('PanelView');
this.panel1 = new PanelDisplay({el: '#panel1'}).render();
this.panel2 = new PanelDisplay({el: '#panel2'}).render();
//this.logger.info('initialize - panel1:%o panel2:%o', this.panel1, this.panel2);
this.on('route:change', this.checkPage);
this.listenTo(PubSub, 'video:playPanel', this.onPlayPanel);
this.listenTo(PubSub, 'video:exitPanel', this.onExitPanel);
this.listenTo(PubSub, 'video:resetHero', this.onResetHero);
this.listenTo(PubSub, 'video:resetVod', this.onResetVod);
this.listenTo(VideoPlayer, 'player:play', this.onPlayEvent);
this.listenTo(VideoPlayer, 'player:panelOpen', this.onPanelOpenEvent);
this.listenTo(VideoPlayer, 'player:panelClosed', this.onPanelClosedEvent);
},
render: function() {
return this;
},
/**
* checks state/position of each panel and updates initial mute status
* executes on play and route change
*/
checkVolume: function() {
this.logger.info('checkVolume - panel1:%o panel2:%o', this.panel1.state(), this.panel2.state());
if (this.panel1.state() != '' && this.panel2.state() == '') {
this.panel1.mute(false);
this.panel2.mute(true);
}
else if (this.panel2.state() != '' && this.panel1.state() == '') {
this.panel2.mute(false);
this.panel1.mute(true);
}
else if (this.panel1.state() == 'floatVideo' && this.panel2.state() == 'heroVideo') {
this.panel2.mute(false);
this.panel1.mute(true);
}
else if (this.panel1.state() == 'heroVideo' && this.panel2.state() == 'floatVideo') {
this.panel1.mute(false);
this.panel2.mute(true);
}
},
/**
* close any open hero panel
* - used for mobile internal ref to a different watch live channel
*/
onResetHero: function() {
if(this.panel1.state() === 'heroVideo') {
this.panel1.onPanelExit();
} else if(this.panel2.state() === 'heroVideo') {
this.panel2.onPanelExit();
}
},
/**
* close any open vod floated panel
*/
onResetVod: function() {
if(this.panel1.state() === 'floatVideo') {
this.panel1.onPanelExit();
} else if(this.panel2.state() === 'floatVideo') {
this.panel2.onPanelExit();
}
},
/**
* play video in a panel, check for channel existing in panel first and use existing if playing
* @param data - panel video data
* @param channel - this live video data
* @param options - options to be passed to video player, consisting of:
* floated - optional boolean indicating if should start in float mode
* vod - indicates if playing a vod
*/
onPlayPanel: function(data, channel, options) {
options = _.extend({ floated: false, vod: false }, options);
this.logger.info('onPlayPanel - panel1:%o chan1:%o panel2:%o chan2:%o data:%o', this.panel1.state(), this.panel1.channelId(), this.panel2.state(), this.panel2.channelId(), data);
//if panel1 floating and opening the same channel, close to hero (to force back to hero when return to live channel page)
// do not close if float is true, call is trying to open same channel in already open float panel
if (this.panel1.channelId() == data[0].id) {
// if panel has no state, reset it to play channel
if(this.panel1.state() === '') {
this.panel1.playPanel(data, channel, options);
} else if (this.panel1.state() == 'floatVideo' && !options.floated)
this.panel1.panelClose(data, false);
else
this.logger.warn('onPlayPanel - ignoring call, attempted to open same channel already active');
}
//if panel2 floating and opening the same channel, close to hero (to force back to hero when return to live channel page)
// do not close if float is true, call i trying to open same channel in already open float panel
else if (this.panel2.channelId() == data[0].id){
// if panel has no state, reset it to play channel
if(this.panel2.state() === '') {
this.panel2.playPanel(data, channel, options);
} else if (this.panel2.state() == 'floatVideo' && !options.floated)
this.panel2.panelClose(data, false);
else
this.logger.warn('onPlayPanel - ignoring call, attempted to open same channel to floating panel');
}
//if panel1 in hero use it, (if not playing this channel)
else if ((this.panel1.state() == 'heroVideo' || this.panel1.state() == '') && this.panel1.channelId() != data[0].id) {
this.panel1.playPanel(data, channel, options);
}
//else use panel2 (if not playing this channel)
else if (this.panel2.channelId() != data[0].id){
this.panel2.playPanel(data, channel, options);
}
},
/**
* exit video playing in panel, whichever panel is open
*/
onExitPanel: function() {
this.logger.info('onExitPanel - panel1:%o chan1:%o panel2:%o chan2:%o', this.panel1.state(), this.panel1.channelId(), this.panel2.state(), this.panel2.channelId());
// close whichever one is floated
if(this.panel1.state() === 'floatVideo') {
this.panel1.onPanelExit();
} else if(this.panel2.state() === 'floatVideo') {
this.panel2.onPanelExit();
}
},
/**
* on play, initiates check for setting initial mute
* @param data - event data with panel id
*/
onPlayEvent: function(data) {
//this.logger.info('onPlayEvent - data:%o', data.id);
if (data.id == 'panel1' || data.id == 'panel2')
this.checkVolume();
},
/**
* handle panel open event from video player
* triggers panel to transition to float state
* @param data - event data with panel id
*/
onPanelOpenEvent: function(data) {
this.logger.info('onPanelOpenEvent - panel1:%o panel2:%o id:%o', this.panel1.state(), this.panel2.state(), data.id);
if(data.id == 'panel1') {
if (this.panel2.state() == 'floatVideo') {
this.panel2.panelClose(null, false);
}
this.panel1.panelOpen();
}
else if(data.id == 'panel2') {
if (this.panel1.state() == 'floatVideo') {
this.panel1.panelClose(null, false);
}
this.panel2.panelOpen();
}
},
/**
* handle panel close event from video player
* triggers panel to return to hero state
* @param data - event data with panel id
*/
onPanelClosedEvent: function(data) {
this.logger.info('onPanelClosedEvent - panel1:%o panel2:%o id:%o', this.panel1.state(), this.panel2.state(), data.id);
if(data.id == 'panel1') {
this.panel1.panelClose(data, true);
if (this.panel2.state() == 'heroVideo') {
this.panel2.panelClose(data, false);
this.checkVolume();
}
}
else if(data.id == 'panel2') {
this.panel2.panelClose(data, true);
if (this.panel1.state() == 'heroVideo') {
this.panel1.panelClose(data, false);
this.checkVolume();
}
}
},
/**
* initiate page check for closing hero on route change
* also check volume on route change
*/
checkPage: function(){
var route = Backbone.history.getFragment();
this.panel1.checkPage(route);
this.panel2.checkPage(route);
this.checkVolume();
}
});
return PanelView;
})
;<|fim▁end|>
| |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
fn main() {<|fim▁hole|>}<|fim▁end|>
|
println!("cargo:rustc-flags=-l thunk32");
|
<|file_name|>hsts.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use net::hsts::{HstsEntry, HstsList};
use net_traits::IncludeSubdomains;
use std::collections::HashMap;
#[test]
fn test_hsts_entry_is_not_expired_when_it_has_no_timestamp() {
let entry = HstsEntry {
host: "mozilla.org".to_owned(),
include_subdomains: false,
max_age: Some(20),
timestamp: None,
};
assert!(!entry.is_expired());
}
#[test]
fn test_hsts_entry_is_not_expired_when_it_has_no_max_age() {
let entry = HstsEntry {
host: "mozilla.org".to_owned(),
include_subdomains: false,
max_age: None,
timestamp: Some(time::get_time().sec as u64),
};
assert!(!entry.is_expired());
}
#[test]
fn test_hsts_entry_is_expired_when_it_has_reached_its_max_age() {
let entry = HstsEntry {
host: "mozilla.org".to_owned(),
include_subdomains: false,
max_age: Some(10),
timestamp: Some(time::get_time().sec as u64 - 20u64),
};
assert!(entry.is_expired());
}
#[test]
fn test_hsts_entry_cant_be_created_with_ipv6_address_as_host() {
let entry = HstsEntry::new(
"2001:0db8:0000:0000:0000:ff00:0042:8329".to_owned(),
IncludeSubdomains::NotIncluded,
None,
);
assert!(entry.is_none(), "able to create HstsEntry with IPv6 host");
}
#[test]
fn test_hsts_entry_cant_be_created_with_ipv4_address_as_host() {
let entry = HstsEntry::new("4.4.4.4".to_owned(), IncludeSubdomains::NotIncluded, None);
assert!(entry.is_none(), "able to create HstsEntry with IPv4 host");
}
#[test]
fn test_base_domain_in_entries_map() {
let entries_map = HashMap::new();
let mut list = HstsList {
entries_map: entries_map,
};
list.push(
HstsEntry::new(
"servo.mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
list.push(
HstsEntry::new(
"firefox.mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
list.push(
HstsEntry::new(
"bugzilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
assert_eq!(list.entries_map.len(), 2);
assert_eq!(list.entries_map.get("mozilla.org").unwrap().len(), 2);
}
#[test]
fn test_push_entry_with_0_max_age_evicts_entry_from_list() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
Some(500000u64),
)
.unwrap(),
],
);
let mut list = HstsList {
entries_map: entries_map,
};
list.push(
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
Some(0),
)
.unwrap(),
);
assert_eq!(list.is_host_secure("mozilla.org"), false)
}
#[test]
fn test_push_entry_to_hsts_list_should_not_add_subdomains_whose_superdomain_is_already_matched() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap()],
);
let mut list = HstsList {
entries_map: entries_map,
};
list.push(
HstsEntry::new(
"servo.mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
assert_eq!(list.entries_map.get("mozilla.org").unwrap().len(), 1)
}
#[test]
fn test_push_entry_to_hsts_list_should_update_existing_domain_entrys_include_subdomains() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap()],
);
let mut list = HstsList {
entries_map: entries_map,
};
assert!(list.is_host_secure("servo.mozilla.org"));
list.push(
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
assert!(!list.is_host_secure("servo.mozilla.org"))
}
#[test]
fn test_push_entry_to_hsts_list_should_not_create_duplicate_entry() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
],
);
let mut list = HstsList {
entries_map: entries_map,
};
list.push(
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
);
assert_eq!(list.entries_map.get("mozilla.org").unwrap().len(), 1)
}
#[test]
fn test_push_multiple_entrie_to_hsts_list_should_add_them_all() {
let mut list = HstsList {
entries_map: HashMap::new(),
};
assert!(!list.is_host_secure("mozilla.org"));
assert!(!list.is_host_secure("bugzilla.org"));
list.push(HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap());
list.push(
HstsEntry::new("bugzilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap(),
);
assert!(list.is_host_secure("mozilla.org"));
assert!(list.is_host_secure("bugzilla.org"));
}
#[test]
fn test_push_entry_to_hsts_list_should_add_an_entry() {
let mut list = HstsList {
entries_map: HashMap::new(),
};
assert!(!list.is_host_secure("mozilla.org"));
list.push(HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap());<|fim▁hole|>
assert!(list.is_host_secure("mozilla.org"));
}
#[test]
fn test_parse_hsts_preload_should_return_none_when_json_invalid() {
let mock_preload_content = "derp";
assert!(
HstsList::from_preload(mock_preload_content).is_none(),
"invalid preload list should not have parsed"
)
}
#[test]
fn test_parse_hsts_preload_should_return_none_when_json_contains_no_entries_map_key() {
let mock_preload_content = "{\"nothing\": \"to see here\"}";
assert!(
HstsList::from_preload(mock_preload_content).is_none(),
"invalid preload list should not have parsed"
)
}
#[test]
fn test_parse_hsts_preload_should_decode_host_and_includes_subdomains() {
let mock_preload_content = "{\
\"entries\": [\
{\"host\": \"mozilla.org\",\
\"include_subdomains\": false}\
]\
}";
let hsts_list = HstsList::from_preload(mock_preload_content);
let entries_map = hsts_list.unwrap().entries_map;
assert_eq!(
entries_map.get("mozilla.org").unwrap()[0].host,
"mozilla.org"
);
assert!(!entries_map.get("mozilla.org").unwrap()[0].include_subdomains);
}
#[test]
fn test_hsts_list_with_no_entries_map_does_not_is_host_secure() {
let hsts_list = HstsList {
entries_map: HashMap::new(),
};
assert!(!hsts_list.is_host_secure("mozilla.org"));
}
#[test]
fn test_hsts_list_with_exact_domain_entry_is_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(hsts_list.is_host_secure("mozilla.org"));
}
#[test]
fn test_hsts_list_with_subdomain_when_include_subdomains_is_true_is_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap()],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(hsts_list.is_host_secure("servo.mozilla.org"));
}
#[test]
fn test_hsts_list_with_subdomain_when_include_subdomains_is_false_is_not_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![
HstsEntry::new(
"mozilla.org".to_owned(),
IncludeSubdomains::NotIncluded,
None,
)
.unwrap(),
],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(!hsts_list.is_host_secure("servo.mozilla.org"));
}
#[test]
fn test_hsts_list_with_subdomain_when_host_is_not_a_subdomain_is_not_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap()],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(!hsts_list.is_host_secure("servo-mozilla.org"));
}
#[test]
fn test_hsts_list_with_subdomain_when_host_is_exact_match_is_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry::new("mozilla.org".to_owned(), IncludeSubdomains::Included, None).unwrap()],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(hsts_list.is_host_secure("mozilla.org"));
}
#[test]
fn test_hsts_list_with_expired_entry_is_not_is_host_secure() {
let mut entries_map = HashMap::new();
entries_map.insert(
"mozilla.org".to_owned(),
vec![HstsEntry {
host: "mozilla.org".to_owned(),
include_subdomains: false,
max_age: Some(20),
timestamp: Some(time::get_time().sec as u64 - 100u64),
}],
);
let hsts_list = HstsList {
entries_map: entries_map,
};
assert!(!hsts_list.is_host_secure("mozilla.org"));
}
#[test]
fn test_preload_hsts_domains_well_formed() {
let hsts_list = HstsList::from_servo_preload();
assert!(!hsts_list.entries_map.is_empty());
}<|fim▁end|>
| |
<|file_name|>fontconfig.py<|end_file_name|><|fim▁begin|>from _external import *<|fim▁hole|><|fim▁end|>
|
fontconfig = LibWithHeaderChecker('fontconfig', 'fontconfig/fontconfig.h', 'c')
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export default function isNull(x) {
return x === null;
}
|
<|file_name|>noise_image.rs<|end_file_name|><|fim▁begin|>use crate::utils::color_gradient::Color;
#[cfg(feature = "image")]
use std::{self, path::Path};
const RASTER_MAX_WIDTH: u16 = 32_767;
const RASTER_MAX_HEIGHT: u16 = 32_767;
pub struct NoiseImage {
size: (usize, usize),
border_color: Color,
map: Vec<Color>,
}
impl NoiseImage {
pub fn new(width: usize, height: usize) -> Self {
Self::initialize().set_size(width, height)
}
pub fn set_size(self, width: usize, height: usize) -> Self {
// Check for invalid width or height.
assert!(width < RASTER_MAX_WIDTH as usize);
assert!(height < RASTER_MAX_HEIGHT as usize);
if width == 0 || height == 0 {
// An empty noise image was specified. Return a new blank, empty map.
Self::initialize()
} else {
// New noise map size specified. Allocate a new Vec unless the current Vec is large
// enough.
let map_size = width * height;
if self.map.capacity() < map_size {
// New size is too big for the current Vec. Create a new Vec with a large enough
// capacity now so we're not reallocating when filling the map.
Self {
map: vec![[0; 4]; map_size],
size: (width, height),
..self
}
} else {
// Vec capacity is already big enough, so leave it alone and just change the set size.
Self {
size: (width, height),
..self
}
}
}
}
pub fn set_border_color(self, color: Color) -> Self {
Self {
border_color: color,
..self
}
}
pub fn set_value(&mut self, x: usize, y: usize, value: Color) {
let (width, height) = self.size;
if x < width && y < height {
self.map[x + y * width] = value;
} else {
eprintln!("input point out of bounds")
}
}
pub fn size(&self) -> (usize, usize) {
self.size
}
pub fn border_color(&self) -> Color {
self.border_color
}
pub fn get_value(&self, x: usize, y: usize) -> Color {
let (width, height) = self.size;
if x < width && y < height {
self.map[x + y * width]
} else {
self.border_color
}
}
fn initialize() -> Self {
Self {
size: (0, 0),
border_color: [0; 4],
map: Vec::new(),
}
}
#[cfg(feature = "image")]
pub fn write_to_file(&self, filename: &str) {
// Create the output directory for the images, if it doesn't already exist
let target_dir = Path::new("example_images/");
if !target_dir.exists() {
std::fs::create_dir(target_dir).expect("failed to create example_images directory");
}
//concatenate the directory to the filename string
let directory: String = "example_images/".to_owned();
let file_path = directory + filename;
// collect the values from the map vector into an array
let (width, height) = self.size;
let mut result = Vec::with_capacity(width * height);
for i in &self.map {
for j in i.iter() {
result.push(*j);
}
}
let _ = image::save_buffer(
&Path::new(&file_path),
&*result,
self.size.0 as u32,
self.size.1 as u32,
image::ColorType::Rgba8,
);
println!("\nFinished generating {}", filename);
}
}
impl Default for NoiseImage {
fn default() -> Self {
Self::initialize()
}<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>cython_special.py<|end_file_name|><|fim▁begin|>import re
import numpy as np
from scipy import special
from .common import with_attributes, safe_import
with safe_import():
from scipy.special import cython_special
FUNC_ARGS = {
'airy_d': (1,),
'airy_D': (1,),
'beta_dd': (0.25, 0.75),
'erf_d': (1,),
'erf_D': (1+1j,),
'exprel_d': (1e-6,),
'gamma_d': (100,),
'gamma_D': (100+100j,),
'jv_dd': (1, 1),
'jv_dD': (1, (1+1j)),
'loggamma_D': (20,),
'logit_d': (0.5,),
'psi_d': (1,),
'psi_D': (1,),
}<|fim▁hole|>class _CythonSpecialMeta(type):
"""
Add time_* benchmarks corresponding to cython_special._bench_*_cy
"""
def __new__(cls, cls_name, bases, dct):
params = [(10, 100, 1000), ('python', 'numpy', 'cython')]
param_names = ['N', 'api']
def get_time_func(name, args):
@with_attributes(params=[(name,), (args,)] + params,
param_names=['name', 'argument'] + param_names)
def func(self, name, args, N, api):
if api == 'python':
self.py_func(N, *args)
elif api == 'numpy':
self.np_func(*self.obj)
else:
self.cy_func(N, *args)
func.__name__ = 'time_' + name
return func
for name in FUNC_ARGS.keys():
func = get_time_func(name, FUNC_ARGS[name])
dct[func.__name__] = func
return type.__new__(cls, cls_name, bases, dct)
class CythonSpecial(metaclass=_CythonSpecialMeta):
def setup(self, name, args, N, api):
self.py_func = getattr(cython_special, '_bench_{}_py'.format(name))
self.cy_func = getattr(cython_special, '_bench_{}_cy'.format(name))
m = re.match('^(.*)_[dDl]+$', name)
self.np_func = getattr(special, m.group(1))
self.obj = []
for arg in args:
self.obj.append(arg*np.ones(N))
self.obj = tuple(self.obj)<|fim▁end|>
| |
<|file_name|>custom.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
console.log("ready!");
$("#subs").click(function () {
var name = $('#name').val();
var email = $('#email').val();
if (name != '' && email != '') {
$('#subs_err').html('');
var subs = {name: name, email: email};
var url = "/index.php/index/subscribe_user";
$.post(url, {subs: JSON.stringify(subs)}).done(function (data) {
$('#subscribe_content').html(data);
}); // end of post
} // end if
else {
$('#subs_err').html('Please provide name and email');
}
});
var getUrlParameter = function getUrlParameter(sParam) {
var sPageURL = decodeURIComponent(window.location.search.substring(1)),
sURLVariables = sPageURL.split('&'),
sParameterName,
i;
<|fim▁hole|>
if (sParameterName[0] === sParam) {
return sParameterName[1] === undefined ? true : sParameterName[1];
}
}
};
var code = getUrlParameter('errorcode');
if (code == 3) {
$('#login_err').html('Invalid email address or password');
}
$("#contact_submit").click(function () {
var name = $('#name').val();
var email = $('#email').val();
var phone = $('#phone').val();
var comment = $('#comment').val();
if (name != '' && email != '' && phone != '' && comment != '') {
$('#contact_err').html('');
var contact = {name: name, email: email, phone: phone, comment: comment};
var url = "/index.php/index/send_contact_request";
$.post(url, {contact: JSON.stringify(contact)}).done(function (data) {
$('#contact_container').html(data);
}); // end of post
} // end if
else {
$('#contact_err').html('Please provide all required fields');
}
});
});<|fim▁end|>
|
for (i = 0; i < sURLVariables.length; i++) {
sParameterName = sURLVariables[i].split('=');
|
<|file_name|>rectangles.js<|end_file_name|><|fim▁begin|>//
// This is only a SKELETON file for the 'Rectangles' exercise. It's been provided as a
// convenience to get you started writing code faster.
//
export function count() {<|fim▁hole|>}<|fim▁end|>
|
throw new Error('Remove this statement and implement this function');
|
<|file_name|>kernel.cpp<|end_file_name|><|fim▁begin|>__global__ void /*{kernel_name}*/(/*{parameters}*/)
{
int _tid_ = threadIdx.x + blockIdx.x * blockDim.x;
if (_tid_ < /*{num_threads}*/)
{
/*{execution}*/<|fim▁hole|><|fim▁end|>
|
_result_[_tid_] = /*{block_invocation}*/;
}
}
|
<|file_name|>coordinator-setup.js<|end_file_name|><|fim▁begin|>import Coordinator from '../models/coordinator';<|fim▁hole|> initialize: function() {
let app = arguments[1] || arguments[0];
app.register("drag:coordinator",Coordinator);
app.inject("component","coordinator","drag:coordinator");
}
};<|fim▁end|>
|
export default {
name: "setup coordinator",
|
<|file_name|>mysql.py<|end_file_name|><|fim▁begin|>import pymysql
from flask_restful import Resource
from flask import abort
ALLOWED_SHOW = ('processlist', 'databases', 'plugins', 'privileges')
class Mysql(Resource):
def __init__(self):
self.connection = pymysql.connect(user='root')
self.cursor = self.connection.cursor()
def _execute(self, sql):
self.cursor.execute(sql)
desc_id = tuple(x[0] for x in self.cursor.description)
query_result = self.cursor.fetchall()
results = [dict(zip(desc_id, item)) for item in query_result]
return results
def get(self, cmd):
if cmd in ALLOWED_SHOW:
return self._execute('show ' + cmd)
else:
abort(404)
class MysqlDatabase(Mysql):
def get(self, dbname):
try:
self.connection.select_db(dbname)
except pymysql.InternalError as e:
abort(400, e.args)
return self._execute('show tables')
def post(self, dbname):
try:
self.cursor.execute('create database ' + dbname)
except pymysql.ProgrammingError as e:<|fim▁hole|>
def delete(self, dbname):
try:
self.cursor.execute('drop database if exists ' + dbname)
except pymysql.ProgrammingError as e:
abort(400, e.args)<|fim▁end|>
|
abort(400, e.args)
|
<|file_name|>test.js<|end_file_name|><|fim▁begin|>var assert = require('chai').assert
var sendgrid = require('../lib/sendgrid');
describe('test_access_settings_activity_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.method = 'GET'
request.path = '/v3/access_settings/activity'
request.headers['X-Mock'] = 200
it('test_access_settings_activity_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_access_settings_whitelist_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"ips": [
{
"ip": "192.168.1.1"
},
{
"ip": "192.*.*.*"
},
{
"ip": "192.168.1.3/32"
}
]
};
request.method = 'POST'
request.path = '/v3/access_settings/whitelist'
request.headers['X-Mock'] = 201
it('test_access_settings_whitelist_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_access_settings_whitelist_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/access_settings/whitelist'
request.headers['X-Mock'] = 200
it('test_access_settings_whitelist_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_access_settings_whitelist_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"ids": [
1,
2,
3
]
};
request.method = 'DELETE'
request.path = '/v3/access_settings/whitelist'
request.headers['X-Mock'] = 204
it('test_access_settings_whitelist_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_access_settings_whitelist__rule_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/access_settings/whitelist/{rule_id}'
request.headers['X-Mock'] = 200
it('test_access_settings_whitelist__rule_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_access_settings_whitelist__rule_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/access_settings/whitelist/{rule_id}'
request.headers['X-Mock'] = 204
it('test_access_settings_whitelist__rule_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_alerts_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email_to": "[email protected]",
"frequency": "daily",
"type": "stats_notification"
};
request.method = 'POST'
request.path = '/v3/alerts'
request.headers['X-Mock'] = 201
it('test_alerts_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_alerts_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/alerts'
request.headers['X-Mock'] = 200
it('test_alerts_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_alerts__alert_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email_to": "[email protected]"
};
request.method = 'PATCH'
request.path = '/v3/alerts/{alert_id}'
request.headers['X-Mock'] = 200
it('test_alerts__alert_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_alerts__alert_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/alerts/{alert_id}'
request.headers['X-Mock'] = 200
it('test_alerts__alert_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_alerts__alert_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/alerts/{alert_id}'
request.headers['X-Mock'] = 204
it('test_alerts__alert_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_api_keys_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "My API Key",
"sample": "data",
"scopes": [
"mail.send",
"alerts.create",
"alerts.read"
]
};
request.method = 'POST'
request.path = '/v3/api_keys'
request.headers['X-Mock'] = 201
it('test_api_keys_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_api_keys_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.method = 'GET'
request.path = '/v3/api_keys'
request.headers['X-Mock'] = 200
it('test_api_keys_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_api_keys__api_key_id__put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "A New Hope",
"scopes": [
"user.profile.read",
"user.profile.update"
]
};
request.method = 'PUT'
request.path = '/v3/api_keys/{api_key_id}'
request.headers['X-Mock'] = 200
it('test_api_keys__api_key_id__put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_api_keys__api_key_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "A New Hope"
};
request.method = 'PATCH'
request.path = '/v3/api_keys/{api_key_id}'
request.headers['X-Mock'] = 200
it('test_api_keys__api_key_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_api_keys__api_key_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/api_keys/{api_key_id}'
request.headers['X-Mock'] = 200
it('test_api_keys__api_key_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_api_keys__api_key_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/api_keys/{api_key_id}'
request.headers['X-Mock'] = 204
it('test_api_keys__api_key_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"description": "Suggestions for products our users might like.",
"is_default": true,
"name": "Product Suggestions"
};
request.method = 'POST'
request.path = '/v3/asm/groups'
request.headers['X-Mock'] = 201
it('test_asm_groups_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["id"] = '1'
request.method = 'GET'
request.path = '/v3/asm/groups'
request.headers['X-Mock'] = 200
it('test_asm_groups_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"description": "Suggestions for items our users might like.",
"id": 103,
"name": "Item Suggestions"
};
request.method = 'PATCH'
request.path = '/v3/asm/groups/{group_id}'
request.headers['X-Mock'] = 201
it('test_asm_groups__group_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/asm/groups/{group_id}'
request.headers['X-Mock'] = 200
it('test_asm_groups__group_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/asm/groups/{group_id}'
request.headers['X-Mock'] = 204
it('test_asm_groups__group_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__suppressions_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"recipient_emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'POST'
request.path = '/v3/asm/groups/{group_id}/suppressions'
request.headers['X-Mock'] = 201
it('test_asm_groups__group_id__suppressions_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__suppressions_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/asm/groups/{group_id}/suppressions'
request.headers['X-Mock'] = 200
it('test_asm_groups__group_id__suppressions_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__suppressions_search_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"recipient_emails": [
"[email protected]",
"[email protected]",
"[email protected]"
]
};
request.method = 'POST'
request.path = '/v3/asm/groups/{group_id}/suppressions/search'
request.headers['X-Mock'] = 200
it('test_asm_groups__group_id__suppressions_search_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_groups__group_id__suppressions__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/asm/groups/{group_id}/suppressions/{email}'
request.headers['X-Mock'] = 204
it('test_asm_groups__group_id__suppressions__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_asm_suppressions_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/asm/suppressions'
request.headers['X-Mock'] = 200
it('test_asm_suppressions_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_suppressions_global_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"recipient_emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'POST'
request.path = '/v3/asm/suppressions/global'
request.headers['X-Mock'] = 201
it('test_asm_suppressions_global_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_asm_suppressions_global__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/asm/suppressions/global/{email}'
request.headers['X-Mock'] = 200
it('test_asm_suppressions_global__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_asm_suppressions_global__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/asm/suppressions/global/{email}'
request.headers['X-Mock'] = 204
it('test_asm_suppressions_global__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_asm_suppressions__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/asm/suppressions/{email}'
request.headers['X-Mock'] = 200
it('test_asm_suppressions__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_browsers_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["browsers"] = 'test_string'
request.queryParams["limit"] = 'test_string'
request.queryParams["offset"] = 'test_string'
request.queryParams["start_date"] = '2016-01-01'
request.method = 'GET'
request.path = '/v3/browsers/stats'
request.headers['X-Mock'] = 200
it('test_browsers_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"categories": [
"spring line"
],
"custom_unsubscribe_url": "",
"html_content": "<html><head><title></title></head><body><p>Check out our spring line!</p></body></html>",
"ip_pool": "marketing",
"list_ids": [
110,
124
],
"plain_content": "Check out our spring line!",
"segment_ids": [
110
],
"sender_id": 124451,
"subject": "New Products for Spring!",
"suppression_group_id": 42,
"title": "March Newsletter"
};
request.method = 'POST'
request.path = '/v3/campaigns'
request.headers['X-Mock'] = 201
it('test_campaigns_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_campaigns_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/campaigns'
request.headers['X-Mock'] = 200
it('test_campaigns_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"categories": [
"summer line"
],
"html_content": "<html><head><title></title></head><body><p>Check out our summer line!</p></body></html>",
"plain_content": "Check out our summer line!",
"subject": "New Products for Summer!",
"title": "May Newsletter"
};
request.method = 'PATCH'
request.path = '/v3/campaigns/{campaign_id}'
request.headers['X-Mock'] = 200
it('test_campaigns__campaign_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/campaigns/{campaign_id}'
request.headers['X-Mock'] = 200
it('test_campaigns__campaign_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/campaigns/{campaign_id}'
request.headers['X-Mock'] = 204
it('test_campaigns__campaign_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"send_at": 1489451436
};
request.method = 'PATCH'
request.path = '/v3/campaigns/{campaign_id}/schedules'
request.headers['X-Mock'] = 200
it('test_campaigns__campaign_id__schedules_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"send_at": 1489771528
};
request.method = 'POST'
request.path = '/v3/campaigns/{campaign_id}/schedules'
request.headers['X-Mock'] = 201
it('test_campaigns__campaign_id__schedules_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/campaigns/{campaign_id}/schedules'
request.headers['X-Mock'] = 200
it('test_campaigns__campaign_id__schedules_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/campaigns/{campaign_id}/schedules'
request.headers['X-Mock'] = 204
it('test_campaigns__campaign_id__schedules_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_now_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/campaigns/{campaign_id}/schedules/now'
request.headers['X-Mock'] = 201
it('test_campaigns__campaign_id__schedules_now_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_campaigns__campaign_id__schedules_test_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"to": "[email protected]"
};
request.method = 'POST'
request.path = '/v3/campaigns/{campaign_id}/schedules/test'
request.headers['X-Mock'] = 204
it('test_campaigns__campaign_id__schedules_test_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_categories_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["category"] = 'test_string'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/categories'
request.headers['X-Mock'] = 200
it('test_categories_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_categories_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["categories"] = 'test_string'
request.method = 'GET'
request.path = '/v3/categories/stats'
request.headers['X-Mock'] = 200
it('test_categories_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_categories_stats_sums_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["sort_by_metric"] = 'test_string'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["sort_by_direction"] = 'asc'
request.method = 'GET'
request.path = '/v3/categories/stats/sums'
request.headers['X-Mock'] = 200
it('test_categories_stats_sums_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_clients_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["aggregated_by"] = 'day'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["end_date"] = '2016-04-01'
request.method = 'GET'
request.path = '/v3/clients/stats'
request.headers['X-Mock'] = 200
it('test_clients_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_clients__client_type__stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["aggregated_by"] = 'day'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["end_date"] = '2016-04-01'
request.method = 'GET'
request.path = '/v3/clients/{client_type}/stats'
request.headers['X-Mock'] = 200
it('test_clients__client_type__stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_custom_fields_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "pet",
"type": "text"
};
request.method = 'POST'
request.path = '/v3/contactdb/custom_fields'
request.headers['X-Mock'] = 201
it('test_contactdb_custom_fields_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_custom_fields_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/custom_fields'
request.headers['X-Mock'] = 200
it('test_contactdb_custom_fields_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_custom_fields__custom_field_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/custom_fields/{custom_field_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_custom_fields__custom_field_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_custom_fields__custom_field_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/contactdb/custom_fields/{custom_field_id}'
request.headers['X-Mock'] = 202
it('test_contactdb_custom_fields__custom_field_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 202, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "your list name"
};
request.method = 'POST'
request.path = '/v3/contactdb/lists'
request.headers['X-Mock'] = 201
it('test_contactdb_lists_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/lists'
request.headers['X-Mock'] = 200
it('test_contactdb_lists_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
1,
2,
3,
4
];
request.method = 'DELETE'
request.path = '/v3/contactdb/lists'
request.headers['X-Mock'] = 204
it('test_contactdb_lists_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "newlistname"
};
request.queryParams["list_id"] = '1'
request.method = 'PATCH'
request.path = '/v3/contactdb/lists/{list_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_lists__list_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["list_id"] = '1'
request.method = 'GET'
request.path = '/v3/contactdb/lists/{list_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_lists__list_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.queryParams["delete_contacts"] = 'true'
request.method = 'DELETE'
request.path = '/v3/contactdb/lists/{list_id}'
request.headers['X-Mock'] = 202
it('test_contactdb_lists__list_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 202, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__recipients_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
"recipient_id1",
"recipient_id2"
];
request.method = 'POST'
request.path = '/v3/contactdb/lists/{list_id}/recipients'
request.headers['X-Mock'] = 201
it('test_contactdb_lists__list_id__recipients_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__recipients_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["page"] = '1'
request.queryParams["page_size"] = '1'
request.queryParams["list_id"] = '1'
request.method = 'GET'
request.path = '/v3/contactdb/lists/{list_id}/recipients'
request.headers['X-Mock'] = 200
it('test_contactdb_lists__list_id__recipients_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__recipients__recipient_id__post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/contactdb/lists/{list_id}/recipients/{recipient_id}'
request.headers['X-Mock'] = 201
it('test_contactdb_lists__list_id__recipients__recipient_id__post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_lists__list_id__recipients__recipient_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.queryParams["recipient_id"] = '1'
request.queryParams["list_id"] = '1'
request.method = 'DELETE'
request.path = '/v3/contactdb/lists/{list_id}/recipients/{recipient_id}'
request.headers['X-Mock'] = 204
it('test_contactdb_lists__list_id__recipients__recipient_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
{
"email": "[email protected]",
"first_name": "Guy",
"last_name": "Jones"
}
];
request.method = 'PATCH'
request.path = '/v3/contactdb/recipients'
request.headers['X-Mock'] = 201
it('test_contactdb_recipients_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
{
"age": 25,
"email": "[email protected]",
"first_name": "",
"last_name": "User"
},
{
"age": 25,
"email": "[email protected]",
"first_name": "Example",
"last_name": "User"
}
];
request.method = 'POST'
request.path = '/v3/contactdb/recipients'
request.headers['X-Mock'] = 201
it('test_contactdb_recipients_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["page"] = '1'
request.queryParams["page_size"] = '1'
request.method = 'GET'
request.path = '/v3/contactdb/recipients'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
"recipient_id1",
"recipient_id2"
];
request.method = 'DELETE'
request.path = '/v3/contactdb/recipients'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_billable_count_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/recipients/billable_count'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients_billable_count_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_count_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/recipients/count'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients_count_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients_search_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["%7Bfield_name%7D"] = 'test_string'
request.queryParams["{field_name}"] = 'test_string'
request.method = 'GET'
request.path = '/v3/contactdb/recipients/search'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients_search_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients__recipient_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/recipients/{recipient_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients__recipient_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients__recipient_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/contactdb/recipients/{recipient_id}'
request.headers['X-Mock'] = 204
it('test_contactdb_recipients__recipient_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_recipients__recipient_id__lists_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/recipients/{recipient_id}/lists'
request.headers['X-Mock'] = 200
it('test_contactdb_recipients__recipient_id__lists_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_reserved_fields_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/reserved_fields'
request.headers['X-Mock'] = 200
it('test_contactdb_reserved_fields_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"conditions": [
{
"and_or": "",
"field": "last_name",
"operator": "eq",
"value": "Miller"
},
{
"and_or": "and",
"field": "last_clicked",
"operator": "gt",
"value": "01/02/2015"
},
{
"and_or": "or",
"field": "clicks.campaign_identifier",
"operator": "eq",
"value": "513"
}
],
"list_id": 4,
"name": "Last Name Miller"
};
request.method = 'POST'
request.path = '/v3/contactdb/segments'
request.headers['X-Mock'] = 200
it('test_contactdb_segments_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/contactdb/segments'
request.headers['X-Mock'] = 200
it('test_contactdb_segments_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments__segment_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"conditions": [
{
"and_or": "",
"field": "last_name",
"operator": "eq",
"value": "Miller"
}
],
"list_id": 5,
"name": "The Millers"
};
request.queryParams["segment_id"] = 'test_string'
request.method = 'PATCH'
request.path = '/v3/contactdb/segments/{segment_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_segments__segment_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments__segment_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["segment_id"] = '1'
request.method = 'GET'
request.path = '/v3/contactdb/segments/{segment_id}'
request.headers['X-Mock'] = 200
it('test_contactdb_segments__segment_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments__segment_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.queryParams["delete_contacts"] = 'true'
request.method = 'DELETE'
request.path = '/v3/contactdb/segments/{segment_id}'
request.headers['X-Mock'] = 204
it('test_contactdb_segments__segment_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_contactdb_segments__segment_id__recipients_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["page"] = '1'
request.queryParams["page_size"] = '1'
request.method = 'GET'
request.path = '/v3/contactdb/segments/{segment_id}/recipients'
request.headers['X-Mock'] = 200
it('test_contactdb_segments__segment_id__recipients_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_devices_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/devices/stats'
request.headers['X-Mock'] = 200
it('test_devices_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_geo_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["country"] = 'US'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.method = 'GET'
request.path = '/v3/geo/stats'
request.headers['X-Mock'] = 200
it('test_geo_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["subuser"] = 'test_string'
request.queryParams["ip"] = 'test_string'
request.queryParams["limit"] = '1'
request.queryParams["exclude_whitelabels"] = 'true'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/ips'
request.headers['X-Mock'] = 200
it('test_ips_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_assigned_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/assigned'
request.headers['X-Mock'] = 200
it('test_ips_assigned_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "marketing"
};
request.method = 'POST'
request.path = '/v3/ips/pools'
request.headers['X-Mock'] = 200
it('test_ips_pools_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/pools'
request.headers['X-Mock'] = 200
it('test_ips_pools_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools__pool_name__put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "new_pool_name"
};
request.method = 'PUT'
request.path = '/v3/ips/pools/{pool_name}'
request.headers['X-Mock'] = 200
it('test_ips_pools__pool_name__put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools__pool_name__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/pools/{pool_name}'
request.headers['X-Mock'] = 200
it('test_ips_pools__pool_name__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools__pool_name__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/ips/pools/{pool_name}'
request.headers['X-Mock'] = 204
it('test_ips_pools__pool_name__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools__pool_name__ips_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"ip": "0.0.0.0"
};
request.method = 'POST'
request.path = '/v3/ips/pools/{pool_name}/ips'
request.headers['X-Mock'] = 201
it('test_ips_pools__pool_name__ips_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_ips_pools__pool_name__ips__ip__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/ips/pools/{pool_name}/ips/{ip}'
request.headers['X-Mock'] = 204
it('test_ips_pools__pool_name__ips__ip__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_ips_warmup_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"ip": "0.0.0.0"
};
request.method = 'POST'
request.path = '/v3/ips/warmup'
request.headers['X-Mock'] = 200
it('test_ips_warmup_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_warmup_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/warmup'
request.headers['X-Mock'] = 200
it('test_ips_warmup_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_warmup__ip_address__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/warmup/{ip_address}'
request.headers['X-Mock'] = 200
it('test_ips_warmup__ip_address__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_ips_warmup__ip_address__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/ips/warmup/{ip_address}'
request.headers['X-Mock'] = 204
it('test_ips_warmup__ip_address__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_ips__ip_address__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/ips/{ip_address}'
request.headers['X-Mock'] = 200
it('test_ips__ip_address__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_batch_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/mail/batch'
request.headers['X-Mock'] = 201
it('test_mail_batch_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_mail_batch__batch_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail/batch/{batch_id}'
request.headers['X-Mock'] = 200
it('test_mail_batch__batch_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_send_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"asm": {
"group_id": 1,
"groups_to_display": [
1,
2,
3
]
},
"attachments": [
{
"content": "[BASE64 encoded content block here]",
"content_id": "ii_139db99fdb5c3704",
"disposition": "inline",
"filename": "file1.jpg",
"name": "file1",
"type": "jpg"
}
],
"batch_id": "[YOUR BATCH ID GOES HERE]",
"categories": [
"category1",
"category2"
],
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"custom_args": {
"New Argument 1": "New Value 1",
"activationAttempt": "1",
"customerAccountNumber": "[CUSTOMER ACCOUNT NUMBER GOES HERE]"
},
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"ip_pool_name": "[YOUR POOL NAME GOES HERE]",
"mail_settings": {
"bcc": {
"email": "[email protected]",
"enable": true
},
"bypass_list_management": {
"enable": true
},
"footer": {
"enable": true,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
"sandbox_mode": {
"enable": false
},
"spam_check": {
"enable": true,
"post_to_url": "http://example.com/compliance",
"threshold": 3
}
},
"personalizations": [
{
"bcc": [
{
"email": "[email protected]",
"name": "Sam Doe"
}
],
"cc": [
{
"email": "[email protected]",
"name": "Jane Doe"
}
],
"custom_args": {
"New Argument 1": "New Value 1",
"activationAttempt": "1",
"customerAccountNumber": "[CUSTOMER ACCOUNT NUMBER GOES HERE]"
},
"headers": {
"X-Accept-Language": "en",
"X-Mailer": "MyApp"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"substitutions": {
"id": "substitutions",
"type": "object"
},
"to": [
{
"email": "[email protected]",
"name": "John Doe"
}
]
}
],
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"sections": {
"section": {
":sectionName1": "section 1 text",
":sectionName2": "section 2 text"
}
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"click_tracking": {
"enable": true,
"enable_text": true
},
"ganalytics": {
"enable": true,
"utm_campaign": "[NAME OF YOUR REFERRER SOURCE]",
"utm_content": "[USE THIS SPACE TO DIFFERENTIATE YOUR EMAIL FROM ADS]",
"utm_medium": "[NAME OF YOUR MARKETING MEDIUM e.g. email]",
"utm_name": "[NAME OF YOUR CAMPAIGN]",
"utm_term": "[IDENTIFY PAID KEYWORDS HERE]"
},
"open_tracking": {
"enable": true,
"substitution_tag": "%opentrack"
},
"subscription_tracking": {
"enable": true,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiveing these emails <% click here %>."
}
}
};
request.method = 'POST'
request.path = '/v3/mail/send'
request.headers['X-Mock'] = 202
it('test_mail_send_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 202, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/mail_settings'
request.headers['X-Mock'] = 200
it('test_mail_settings_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_address_whitelist_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"list": [
"[email protected]",
"example.com"
]
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/address_whitelist'
request.headers['X-Mock'] = 200
it('test_mail_settings_address_whitelist_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_address_whitelist_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'<|fim▁hole|> sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_bcc_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]",
"enabled": false
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/bcc'
request.headers['X-Mock'] = 200
it('test_mail_settings_bcc_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_bcc_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/bcc'
request.headers['X-Mock'] = 200
it('test_mail_settings_bcc_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_bounce_purge_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"hard_bounces": 5,
"soft_bounces": 5
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/bounce_purge'
request.headers['X-Mock'] = 200
it('test_mail_settings_bounce_purge_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_bounce_purge_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/bounce_purge'
request.headers['X-Mock'] = 200
it('test_mail_settings_bounce_purge_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_footer_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"html_content": "...",
"plain_content": "..."
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/footer'
request.headers['X-Mock'] = 200
it('test_mail_settings_footer_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_footer_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/footer'
request.headers['X-Mock'] = 200
it('test_mail_settings_footer_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_forward_bounce_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]",
"enabled": true
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/forward_bounce'
request.headers['X-Mock'] = 200
it('test_mail_settings_forward_bounce_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_forward_bounce_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/forward_bounce'
request.headers['X-Mock'] = 200
it('test_mail_settings_forward_bounce_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_forward_spam_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "",
"enabled": false
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/forward_spam'
request.headers['X-Mock'] = 200
it('test_mail_settings_forward_spam_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_forward_spam_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/forward_spam'
request.headers['X-Mock'] = 200
it('test_mail_settings_forward_spam_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_plain_content_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": false
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/plain_content'
request.headers['X-Mock'] = 200
it('test_mail_settings_plain_content_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_plain_content_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/plain_content'
request.headers['X-Mock'] = 200
it('test_mail_settings_plain_content_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_spam_check_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"max_score": 5,
"url": "url"
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/spam_check'
request.headers['X-Mock'] = 200
it('test_mail_settings_spam_check_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_spam_check_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/spam_check'
request.headers['X-Mock'] = 200
it('test_mail_settings_spam_check_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_template_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"html_content": "<% body %>"
};
request.method = 'PATCH'
request.path = '/v3/mail_settings/template'
request.headers['X-Mock'] = 200
it('test_mail_settings_template_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mail_settings_template_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/mail_settings/template'
request.headers['X-Mock'] = 200
it('test_mail_settings_template_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_mailbox_providers_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["mailbox_providers"] = 'test_string'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.method = 'GET'
request.path = '/v3/mailbox_providers/stats'
request.headers['X-Mock'] = 200
it('test_mailbox_providers_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_partner_settings_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/partner_settings'
request.headers['X-Mock'] = 200
it('test_partner_settings_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_partner_settings_new_relic_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enable_subuser_statistics": true,
"enabled": true,
"license_key": ""
};
request.method = 'PATCH'
request.path = '/v3/partner_settings/new_relic'
request.headers['X-Mock'] = 200
it('test_partner_settings_new_relic_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_partner_settings_new_relic_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/partner_settings/new_relic'
request.headers['X-Mock'] = 200
it('test_partner_settings_new_relic_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_scopes_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/scopes'
request.headers['X-Mock'] = 200
it('test_scopes_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/stats'
request.headers['X-Mock'] = 200
it('test_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]",
"ips": [
"1.1.1.1",
"2.2.2.2"
],
"password": "johns_password",
"username": "[email protected]"
};
request.method = 'POST'
request.path = '/v3/subusers'
request.headers['X-Mock'] = 200
it('test_subusers_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["username"] = 'test_string'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/subusers'
request.headers['X-Mock'] = 200
it('test_subusers_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_reputations_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["usernames"] = 'test_string'
request.method = 'GET'
request.path = '/v3/subusers/reputations'
request.headers['X-Mock'] = 200
it('test_subusers_reputations_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["subusers"] = 'test_string'
request.method = 'GET'
request.path = '/v3/subusers/stats'
request.headers['X-Mock'] = 200
it('test_subusers_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_stats_monthly_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["subuser"] = 'test_string'
request.queryParams["limit"] = '1'
request.queryParams["sort_by_metric"] = 'test_string'
request.queryParams["offset"] = '1'
request.queryParams["date"] = 'test_string'
request.queryParams["sort_by_direction"] = 'asc'
request.method = 'GET'
request.path = '/v3/subusers/stats/monthly'
request.headers['X-Mock'] = 200
it('test_subusers_stats_monthly_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers_stats_sums_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = '1'
request.queryParams["sort_by_metric"] = 'test_string'
request.queryParams["offset"] = '1'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["sort_by_direction"] = 'asc'
request.method = 'GET'
request.path = '/v3/subusers/stats/sums'
request.headers['X-Mock'] = 200
it('test_subusers_stats_sums_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"disabled": false
};
request.method = 'PATCH'
request.path = '/v3/subusers/{subuser_name}'
request.headers['X-Mock'] = 204
it('test_subusers__subuser_name__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/subusers/{subuser_name}'
request.headers['X-Mock'] = 204
it('test_subusers__subuser_name__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__ips_put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = [
"127.0.0.1"
];
request.method = 'PUT'
request.path = '/v3/subusers/{subuser_name}/ips'
request.headers['X-Mock'] = 200
it('test_subusers__subuser_name__ips_put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__monitor_put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]",
"frequency": 500
};
request.method = 'PUT'
request.path = '/v3/subusers/{subuser_name}/monitor'
request.headers['X-Mock'] = 200
it('test_subusers__subuser_name__monitor_put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__monitor_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]",
"frequency": 50000
};
request.method = 'POST'
request.path = '/v3/subusers/{subuser_name}/monitor'
request.headers['X-Mock'] = 200
it('test_subusers__subuser_name__monitor_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__monitor_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/subusers/{subuser_name}/monitor'
request.headers['X-Mock'] = 200
it('test_subusers__subuser_name__monitor_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__monitor_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/subusers/{subuser_name}/monitor'
request.headers['X-Mock'] = 204
it('test_subusers__subuser_name__monitor_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_subusers__subuser_name__stats_monthly_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["date"] = 'test_string'
request.queryParams["sort_by_direction"] = 'asc'
request.queryParams["limit"] = '1'
request.queryParams["sort_by_metric"] = 'test_string'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/subusers/{subuser_name}/stats/monthly'
request.headers['X-Mock'] = 200
it('test_subusers__subuser_name__stats_monthly_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_blocks_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["start_time"] = '1'
request.queryParams["limit"] = '1'
request.queryParams["end_time"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/suppression/blocks'
request.headers['X-Mock'] = 200
it('test_suppression_blocks_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_blocks_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"delete_all": false,
"emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'DELETE'
request.path = '/v3/suppression/blocks'
request.headers['X-Mock'] = 204
it('test_suppression_blocks_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_blocks__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/suppression/blocks/{email}'
request.headers['X-Mock'] = 200
it('test_suppression_blocks__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_blocks__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/suppression/blocks/{email}'
request.headers['X-Mock'] = 204
it('test_suppression_blocks__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_bounces_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["start_time"] = '1'
request.queryParams["end_time"] = '1'
request.method = 'GET'
request.path = '/v3/suppression/bounces'
request.headers['X-Mock'] = 200
it('test_suppression_bounces_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_bounces_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"delete_all": true,
"emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'DELETE'
request.path = '/v3/suppression/bounces'
request.headers['X-Mock'] = 204
it('test_suppression_bounces_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_bounces__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/suppression/bounces/{email}'
request.headers['X-Mock'] = 200
it('test_suppression_bounces__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_bounces__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.queryParams["email_address"] = '[email protected]'
request.method = 'DELETE'
request.path = '/v3/suppression/bounces/{email}'
request.headers['X-Mock'] = 204
it('test_suppression_bounces__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_invalid_emails_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["start_time"] = '1'
request.queryParams["limit"] = '1'
request.queryParams["end_time"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/suppression/invalid_emails'
request.headers['X-Mock'] = 200
it('test_suppression_invalid_emails_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_invalid_emails_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"delete_all": false,
"emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'DELETE'
request.path = '/v3/suppression/invalid_emails'
request.headers['X-Mock'] = 204
it('test_suppression_invalid_emails_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_invalid_emails__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/suppression/invalid_emails/{email}'
request.headers['X-Mock'] = 200
it('test_suppression_invalid_emails__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_invalid_emails__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/suppression/invalid_emails/{email}'
request.headers['X-Mock'] = 204
it('test_suppression_invalid_emails__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_spam_reports__email__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/suppression/spam_reports/{email}'
request.headers['X-Mock'] = 200
it('test_suppression_spam_reports__email__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_spam_report__email__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/suppression/spam_report/{email}'
request.headers['X-Mock'] = 204
it('test_suppression_spam_report__email__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_spam_reports_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["start_time"] = '1'
request.queryParams["limit"] = '1'
request.queryParams["end_time"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/suppression/spam_reports'
request.headers['X-Mock'] = 200
it('test_suppression_spam_reports_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_suppression_spam_reports_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"delete_all": false,
"emails": [
"[email protected]",
"[email protected]"
]
};
request.method = 'DELETE'
request.path = '/v3/suppression/spam_reports'
request.headers['X-Mock'] = 204
it('test_suppression_spam_reports_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_suppression_unsubscribes_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["start_time"] = '1'
request.queryParams["limit"] = '1'
request.queryParams["end_time"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/suppression/unsubscribes'
request.headers['X-Mock'] = 200
it('test_suppression_unsubscribes_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "example_name"
};
request.method = 'POST'
request.path = '/v3/templates'
request.headers['X-Mock'] = 201
it('test_templates_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_templates_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/templates'
request.headers['X-Mock'] = 200
it('test_templates_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"name": "new_example_name"
};
request.method = 'PATCH'
request.path = '/v3/templates/{template_id}'
request.headers['X-Mock'] = 200
it('test_templates__template_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/templates/{template_id}'
request.headers['X-Mock'] = 200
it('test_templates__template_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/templates/{template_id}'
request.headers['X-Mock'] = 204
it('test_templates__template_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__versions_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"active": 1,
"html_content": "<%body%>",
"name": "example_version_name",
"plain_content": "<%body%>",
"subject": "<%subject%>",
"template_id": "ddb96bbc-9b92-425e-8979-99464621b543"
};
request.method = 'POST'
request.path = '/v3/templates/{template_id}/versions'
request.headers['X-Mock'] = 201
it('test_templates__template_id__versions_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__versions__version_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"active": 1,
"html_content": "<%body%>",
"name": "updated_example_name",
"plain_content": "<%body%>",
"subject": "<%subject%>"
};
request.method = 'PATCH'
request.path = '/v3/templates/{template_id}/versions/{version_id}'
request.headers['X-Mock'] = 200
it('test_templates__template_id__versions__version_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__versions__version_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/templates/{template_id}/versions/{version_id}'
request.headers['X-Mock'] = 200
it('test_templates__template_id__versions__version_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__versions__version_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/templates/{template_id}/versions/{version_id}'
request.headers['X-Mock'] = 204
it('test_templates__template_id__versions__version_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_templates__template_id__versions__version_id__activate_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/templates/{template_id}/versions/{version_id}/activate'
request.headers['X-Mock'] = 200
it('test_templates__template_id__versions__version_id__activate_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/tracking_settings'
request.headers['X-Mock'] = 200
it('test_tracking_settings_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_click_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true
};
request.method = 'PATCH'
request.path = '/v3/tracking_settings/click'
request.headers['X-Mock'] = 200
it('test_tracking_settings_click_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_click_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/tracking_settings/click'
request.headers['X-Mock'] = 200
it('test_tracking_settings_click_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_google_analytics_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"utm_campaign": "website",
"utm_content": "",
"utm_medium": "email",
"utm_source": "sendgrid.com",
"utm_term": ""
};
request.method = 'PATCH'
request.path = '/v3/tracking_settings/google_analytics'
request.headers['X-Mock'] = 200
it('test_tracking_settings_google_analytics_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_google_analytics_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/tracking_settings/google_analytics'
request.headers['X-Mock'] = 200
it('test_tracking_settings_google_analytics_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_open_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true
};
request.method = 'PATCH'
request.path = '/v3/tracking_settings/open'
request.headers['X-Mock'] = 200
it('test_tracking_settings_open_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_open_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/tracking_settings/open'
request.headers['X-Mock'] = 200
it('test_tracking_settings_open_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_subscription_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"enabled": true,
"html_content": "html content",
"landing": "landing page html",
"plain_content": "text content",
"replace": "replacement tag",
"url": "url"
};
request.method = 'PATCH'
request.path = '/v3/tracking_settings/subscription'
request.headers['X-Mock'] = 200
it('test_tracking_settings_subscription_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_tracking_settings_subscription_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/tracking_settings/subscription'
request.headers['X-Mock'] = 200
it('test_tracking_settings_subscription_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_account_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/account'
request.headers['X-Mock'] = 200
it('test_user_account_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_credits_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/credits'
request.headers['X-Mock'] = 200
it('test_user_credits_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_email_put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"email": "[email protected]"
};
request.method = 'PUT'
request.path = '/v3/user/email'
request.headers['X-Mock'] = 200
it('test_user_email_put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_email_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/email'
request.headers['X-Mock'] = 200
it('test_user_email_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_password_put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"new_password": "new_password",
"old_password": "old_password"
};
request.method = 'PUT'
request.path = '/v3/user/password'
request.headers['X-Mock'] = 200
it('test_user_password_put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_profile_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"city": "Orange",
"first_name": "Example",
"last_name": "User"
};
request.method = 'PATCH'
request.path = '/v3/user/profile'
request.headers['X-Mock'] = 200
it('test_user_profile_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_profile_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/profile'
request.headers['X-Mock'] = 200
it('test_user_profile_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_scheduled_sends_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"batch_id": "YOUR_BATCH_ID",
"status": "pause"
};
request.method = 'POST'
request.path = '/v3/user/scheduled_sends'
request.headers['X-Mock'] = 201
it('test_user_scheduled_sends_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_user_scheduled_sends_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/scheduled_sends'
request.headers['X-Mock'] = 200
it('test_user_scheduled_sends_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_scheduled_sends__batch_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"status": "pause"
};
request.method = 'PATCH'
request.path = '/v3/user/scheduled_sends/{batch_id}'
request.headers['X-Mock'] = 204
it('test_user_scheduled_sends__batch_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_user_scheduled_sends__batch_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/scheduled_sends/{batch_id}'
request.headers['X-Mock'] = 200
it('test_user_scheduled_sends__batch_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_scheduled_sends__batch_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/user/scheduled_sends/{batch_id}'
request.headers['X-Mock'] = 204
it('test_user_scheduled_sends__batch_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_user_settings_enforced_tls_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"require_tls": true,
"require_valid_cert": false
};
request.method = 'PATCH'
request.path = '/v3/user/settings/enforced_tls'
request.headers['X-Mock'] = 200
it('test_user_settings_enforced_tls_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_settings_enforced_tls_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/settings/enforced_tls'
request.headers['X-Mock'] = 200
it('test_user_settings_enforced_tls_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_username_put', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"username": "test_username"
};
request.method = 'PUT'
request.path = '/v3/user/username'
request.headers['X-Mock'] = 200
it('test_user_username_put had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_username_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/username'
request.headers['X-Mock'] = 200
it('test_user_username_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_event_settings_patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"bounce": true,
"click": true,
"deferred": true,
"delivered": true,
"dropped": true,
"enabled": true,
"group_resubscribe": true,
"group_unsubscribe": true,
"open": true,
"processed": true,
"spam_report": true,
"unsubscribe": true,
"url": "url"
};
request.method = 'PATCH'
request.path = '/v3/user/webhooks/event/settings'
request.headers['X-Mock'] = 200
it('test_user_webhooks_event_settings_patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_event_settings_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/webhooks/event/settings'
request.headers['X-Mock'] = 200
it('test_user_webhooks_event_settings_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_event_test_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"url": "url"
};
request.method = 'POST'
request.path = '/v3/user/webhooks/event/test'
request.headers['X-Mock'] = 204
it('test_user_webhooks_event_test_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_settings_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"hostname": "myhostname.com",
"send_raw": false,
"spam_check": true,
"url": "http://email.myhosthame.com"
};
request.method = 'POST'
request.path = '/v3/user/webhooks/parse/settings'
request.headers['X-Mock'] = 201
it('test_user_webhooks_parse_settings_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_settings_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/webhooks/parse/settings'
request.headers['X-Mock'] = 200
it('test_user_webhooks_parse_settings_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_settings__hostname__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"send_raw": true,
"spam_check": false,
"url": "http://newdomain.com/parse"
};
request.method = 'PATCH'
request.path = '/v3/user/webhooks/parse/settings/{hostname}'
request.headers['X-Mock'] = 200
it('test_user_webhooks_parse_settings__hostname__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_settings__hostname__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/user/webhooks/parse/settings/{hostname}'
request.headers['X-Mock'] = 200
it('test_user_webhooks_parse_settings__hostname__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_settings__hostname__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/user/webhooks/parse/settings/{hostname}'
request.headers['X-Mock'] = 204
it('test_user_webhooks_parse_settings__hostname__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_user_webhooks_parse_stats_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["aggregated_by"] = 'day'
request.queryParams["limit"] = 'test_string'
request.queryParams["start_date"] = '2016-01-01'
request.queryParams["end_date"] = '2016-04-01'
request.queryParams["offset"] = 'test_string'
request.method = 'GET'
request.path = '/v3/user/webhooks/parse/stats'
request.headers['X-Mock'] = 200
it('test_user_webhooks_parse_stats_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"automatic_security": false,
"custom_spf": true,
"default": true,
"domain": "example.com",
"ips": [
"192.168.1.1",
"192.168.1.2"
],
"subdomain": "news",
"username": "[email protected]"
};
request.method = 'POST'
request.path = '/v3/whitelabel/domains'
request.headers['X-Mock'] = 201
it('test_whitelabel_domains_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["username"] = 'test_string'
request.queryParams["domain"] = 'test_string'
request.queryParams["exclude_subusers"] = 'true'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/whitelabel/domains'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains_default_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/whitelabel/domains/default'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains_default_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains_subuser_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/whitelabel/domains/subuser'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains_subuser_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains_subuser_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/whitelabel/domains/subuser'
request.headers['X-Mock'] = 204
it('test_whitelabel_domains_subuser_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__domain_id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"custom_spf": true,
"default": false
};
request.method = 'PATCH'
request.path = '/v3/whitelabel/domains/{domain_id}'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains__domain_id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__domain_id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/whitelabel/domains/{domain_id}'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains__domain_id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__domain_id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/whitelabel/domains/{domain_id}'
request.headers['X-Mock'] = 204
it('test_whitelabel_domains__domain_id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__domain_id__subuser_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"username": "[email protected]"
};
request.method = 'POST'
request.path = '/v3/whitelabel/domains/{domain_id}/subuser'
request.headers['X-Mock'] = 201
it('test_whitelabel_domains__domain_id__subuser_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__id__ips_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"ip": "192.168.0.1"
};
request.method = 'POST'
request.path = '/v3/whitelabel/domains/{id}/ips'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains__id__ips_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__id__ips__ip__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/whitelabel/domains/{id}/ips/{ip}'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains__id__ips__ip__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_domains__id__validate_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/whitelabel/domains/{id}/validate'
request.headers['X-Mock'] = 200
it('test_whitelabel_domains__id__validate_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_ips_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"domain": "example.com",
"ip": "192.168.1.1",
"subdomain": "email"
};
request.method = 'POST'
request.path = '/v3/whitelabel/ips'
request.headers['X-Mock'] = 201
it('test_whitelabel_ips_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_ips_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["ip"] = 'test_string'
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'GET'
request.path = '/v3/whitelabel/ips'
request.headers['X-Mock'] = 200
it('test_whitelabel_ips_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_ips__id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/whitelabel/ips/{id}'
request.headers['X-Mock'] = 200
it('test_whitelabel_ips__id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_ips__id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/whitelabel/ips/{id}'
request.headers['X-Mock'] = 204
it('test_whitelabel_ips__id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_ips__id__validate_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/whitelabel/ips/{id}/validate'
request.headers['X-Mock'] = 200
it('test_whitelabel_ips__id__validate_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"default": true,
"domain": "example.com",
"subdomain": "mail"
};
request.queryParams["limit"] = '1'
request.queryParams["offset"] = '1'
request.method = 'POST'
request.path = '/v3/whitelabel/links'
request.headers['X-Mock'] = 201
it('test_whitelabel_links_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 201, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["limit"] = '1'
request.method = 'GET'
request.path = '/v3/whitelabel/links'
request.headers['X-Mock'] = 200
it('test_whitelabel_links_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links_default_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["domain"] = 'test_string'
request.method = 'GET'
request.path = '/v3/whitelabel/links/default'
request.headers['X-Mock'] = 200
it('test_whitelabel_links_default_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links_subuser_get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.queryParams["username"] = 'test_string'
request.method = 'GET'
request.path = '/v3/whitelabel/links/subuser'
request.headers['X-Mock'] = 200
it('test_whitelabel_links_subuser_get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links_subuser_delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.queryParams["username"] = 'test_string'
request.method = 'DELETE'
request.path = '/v3/whitelabel/links/subuser'
request.headers['X-Mock'] = 204
it('test_whitelabel_links_subuser_delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links__id__patch', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"default": true
};
request.method = 'PATCH'
request.path = '/v3/whitelabel/links/{id}'
request.headers['X-Mock'] = 200
it('test_whitelabel_links__id__patch had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links__id__get', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.method = 'GET'
request.path = '/v3/whitelabel/links/{id}'
request.headers['X-Mock'] = 200
it('test_whitelabel_links__id__get had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links__id__delete', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'DELETE'
request.path = '/v3/whitelabel/links/{id}'
request.headers['X-Mock'] = 204
it('test_whitelabel_links__id__delete had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 204, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links__id__validate_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = null;
request.method = 'POST'
request.path = '/v3/whitelabel/links/{id}/validate'
request.headers['X-Mock'] = 200
it('test_whitelabel_links__id__validate_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})
describe('test_whitelabel_links__link_id__subuser_post', function () {
this.timeout(30000);
var API_KEY = 'SendGrid API Key'
if(process.env.TRAVIS) {
var TEST_HOST = process.env.MOCK_HOST
} else {
var TEST_HOST = 'localhost'
}
var sg = sendgrid(API_KEY, TEST_HOST)
var request = sg.emptyRequest()
if(TEST_HOST == 'localhost') {
request.test = true
request.port = 4010
}
request.body = {
"username": "[email protected]"
};
request.method = 'POST'
request.path = '/v3/whitelabel/links/{link_id}/subuser'
request.headers['X-Mock'] = 200
it('test_whitelabel_links__link_id__subuser_post had the correct response code', function(done) {
sg.API(request, function (error, response) {
assert.equal(response.statusCode, 200, 'response code is not correct')
done();
})
});
})<|fim▁end|>
|
request.path = '/v3/mail_settings/address_whitelist'
request.headers['X-Mock'] = 200
it('test_mail_settings_address_whitelist_get had the correct response code', function(done) {
|
<|file_name|>widgets.hpp<|end_file_name|><|fim▁begin|>// A shortcut header to get all widgets
#include "guiengine/widgets/bubble_widget.hpp"
#include "guiengine/widgets/button_widget.hpp"
#include "guiengine/widgets/icon_button_widget.hpp"
#include "guiengine/widgets/list_widget.hpp"
#include "guiengine/widgets/dynamic_ribbon_widget.hpp"
#include "guiengine/widgets/spinner_widget.hpp"<|fim▁hole|>#include "guiengine/widgets/progress_bar_widget.hpp"
#include "guiengine/widgets/ribbon_widget.hpp"
#include "guiengine/widgets/model_view_widget.hpp"
#include "guiengine/widgets/text_box_widget.hpp"
#include "guiengine/widgets/label_widget.hpp"
#include "guiengine/widgets/check_box_widget.hpp"<|fim▁end|>
| |
<|file_name|>KF.ts<|end_file_name|><|fim▁begin|>import { AccountManager } from "./AccountManager";
import { PersistentTabState } from "./PersistentTabState";
import { jsonrpcClient } from "./jsonrpcClient";
import { ConfigSyncManager } from "./ConfigSyncManager";
import { NetworkAuth } from "./NetworkAuth";
import { AnimateIcon } from "./AnimateIcon";
import { NativeNotification } from "./NativeNotification";
import { commandManager } from "./commands";
import {
browserPopupMessageHandler,
pageMessageHandler,
vaultMessageHandler,
iframeMessageHandler
} from "./messageHandlers";
import { TabState } from "../common/TabState";
import { Utils, utils } from "../common/utils";
import { SearcherAll } from "../common/SearcherAll";
import { configManager } from "../common/ConfigManager";
import { KeeLog } from "../common/Logger";
import { AddonMessage } from "../common/AddonMessage";
import { FrameState } from "../common/FrameState";
import { VaultMessage } from "../common/VaultMessage";
import { KeeNotification } from "../common/KeeNotification";
import { VaultProtocol } from "../common/VaultProtocol";
import { SessionType } from "../common/SessionType";
import { Action } from "../common/Action";
import store from "../store";
import { SyncBackground } from "../store/syncBackground";
import { MutationPayload } from "vuex";
import { Database } from "../common/model/Database";
import { Entry } from "../common/model/Entry";
import { SaveEntryResult } from "../common/SaveEntryResult";
export class Kee {
accountManager: AccountManager;
tabStates: Map<number, TabState>;
persistentTabStates: Map<number, PersistentTabState>;
utils: Utils;
search: SearcherAll;
foregroundTabId: number;
regularKPRPCListenerQueueHandlerTimer: number;
currentSearchTermTimer: number;
// Our link to the JSON-RPC objects required for communication with KeePass
KeePassRPC: jsonrpcClient;
configSyncManager = new ConfigSyncManager();
_installerTabLoaded: boolean;
processingCallback: boolean;
pendingCallback: string;
urlToOpenOnStartup: string;
browserPopupPort: Partial<browser.runtime.Port>;
vaultPort: Partial<browser.runtime.Port>;
onPortConnected: any;
networkAuth: NetworkAuth;
animateIcon: AnimateIcon;
syncBackground: SyncBackground;
constructor() {
this.syncBackground = new SyncBackground(
store,
(mutation: MutationPayload, excludedPort: browser.runtime.Port) => {
const allPorts: Partial<browser.runtime.Port>[] = [];
allPorts.push(this.browserPopupPort);
allPorts.push(this.vaultPort);
const ts = window.kee.tabStates.get(window.kee.foregroundTabId);
if (ts) {
ts.framePorts.forEach(port => {
allPorts.push(port);
});
ts.ourIframePorts.forEach(port => {
allPorts.push(port);
});
}
for (const port of allPorts) {
if (port !== excludedPort) {
try {
port.postMessage({ mutation } as AddonMessage);
} catch (e) {
// Sometimes dead ports are left lying around by the browser (especially
// during upgrades, etc.). We can do nothing about this but must not let
// it cause this function to fail to execute to the end.
}
}
}
}
);
this.accountManager = new AccountManager();
this.tabStates = new Map<number, TabState>();
this.persistentTabStates = new Map<number, PersistentTabState>();
this.foregroundTabId = -1;
this.utils = utils;
this.search = new SearcherAll(store.state, {
version: 1,
searchAllDatabases: configManager.current.searchAllOpenDBs
});
this.networkAuth = new NetworkAuth();
this.animateIcon = new AnimateIcon();
// eslint-disable-next-line @typescript-eslint/no-empty-function
this.browserPopupPort = { postMessage: _msg => {} };
// eslint-disable-next-line @typescript-eslint/no-empty-function
this.vaultPort = { postMessage: _msg => {} };
this.onPortConnected = function (p: browser.runtime.Port) {
if (KeeLog && KeeLog.debug) KeeLog.debug(p.name + " port connected");
let name = p.name;
let parentFrameId: number;
if (name.startsWith("iframe")) {
parentFrameId = parseInt(name.substr(7));
name = "iframe";
}
switch (name) {
case "browserPopup": {
clearTimeout(window.kee.currentSearchTermTimer);
p.onMessage.addListener(browserPopupMessageHandler.bind(p));
p.onDisconnect.addListener(() => {
window.kee.browserPopupPort = {
// eslint-disable-next-line @typescript-eslint/no-empty-function
postMessage: _msg => {}
};
window.kee.currentSearchTermTimer = setTimeout(() => {
store.dispatch("updateCurrentSearchTerm", null);
store.dispatch("updateSearchResults", null);
}, configManager.current.currentSearchTermTimeout * 1000);
});
const connectMessage = {
initialState: store.state
} as AddonMessage;
let submittedData: any = null;
let loginsFound = false;
if (window.kee.persistentTabStates.get(window.kee.foregroundTabId)) {
window.kee.persistentTabStates
.get(window.kee.foregroundTabId)
.items.forEach(item => {
if (item.itemType === "submittedData") {
submittedData = item.submittedData;
}
});
}
if (window.kee.tabStates.has(window.kee.foregroundTabId)) {
const frames = window.kee.tabStates.get(window.kee.foregroundTabId).frames;
const matchedFrameID = window.kee.frameIdWithMatchedLogins(frames);
if (matchedFrameID >= 0) {
loginsFound = true;
connectMessage.entries = frames.get(matchedFrameID).entries;
connectMessage.frameId = matchedFrameID;
connectMessage.tabId = window.kee.foregroundTabId;
}
}
store.dispatch("updateSubmittedData", submittedData);
store.dispatch("updateLoginsFound", loginsFound);
p.postMessage(connectMessage);
window.kee.browserPopupPort = p;
window.kee.resetBrowserActionColor();
break;
}
case "page": {
p.onMessage.addListener(pageMessageHandler.bind(p));
const tabId = p.sender.tab.id;
const frameId = p.sender.frameId;
const connectMessage = {
initialState: store.state,
frameId,
tabId,
isForegroundTab: tabId === window.kee.foregroundTabId
} as AddonMessage;
window.kee.createTabStateIfMissing(tabId);
if (frameId === 0) {
window.kee.tabStates.get(tabId).url = p.sender.tab.url;
if (window.kee.persistentTabStates.get(tabId)?.items?.length > 0) {
window.kee.persistentTabStates.get(
tabId
).items = window.kee.persistentTabStates
.get(tabId)
.items.filter(
item =>
item.itemType !== "submittedData" ||
item.creationDate > new Date(Date.now() - 3600000)
);
}
}
window.kee.tabStates.get(tabId).frames.set(frameId, new FrameState());
window.kee.tabStates.get(tabId).framePorts.set(frameId, p);
p.postMessage(connectMessage);
break;
}
case "vault": {
p.onMessage.addListener(vaultMessageHandler.bind(p));
/* Potentially could/should call messageCloseSession() when the port is disconnected but
earlier experience suggests disconnection does not occur before a new port is connected in Firefox due to a
bug (works fine in Chrome) so we would risk closing the freshly opened session instead.
p.onDisconnect.addListener(this.messageCloseSession();)
*/
const connectMessage = {
initialState: store.state,
frameId: p.sender.frameId,
tabId: p.sender.tab.id,
isForegroundTab: p.sender.tab.id === window.kee.foregroundTabId
} as VaultMessage;
window.kee.vaultPort = p;
p.postMessage(connectMessage);
break;
}
case "iframe": {
p.onMessage.addListener(iframeMessageHandler.bind(p));
const connectMessage = {
initialState: store.state,
frameState: window.kee.tabStates
.get(p.sender.tab.id)
.frames.get(parentFrameId),
frameId: p.sender.frameId,
tabId: p.sender.tab.id,
isForegroundTab: p.sender.tab.id === window.kee.foregroundTabId
} as AddonMessage;
if (window.kee.persistentTabStates.get(p.sender.tab.id)) {
window.kee.persistentTabStates.get(p.sender.tab.id).items.forEach(item => {
if (item.itemType === "submittedData") {
connectMessage.submittedData = item.submittedData;
}
});
}
p.postMessage(connectMessage);
window.kee.tabStates
.get(p.sender.tab.id)
.ourIframePorts.set(p.sender.frameId, p);
break;
}
}
};
}
frameIdWithMatchedLogins(frames: Map<number, FrameState>) {
let frameId = -1;
frames.forEach((frame, i) => {
if (frameId == -1 && frame && frame.entries && frame.entries.length > 0) frameId = i;
});
return frameId;
}
async init() {
// Create a timer for KPRPC connection establishment
this.regularKPRPCListenerQueueHandlerTimer = setInterval(
this.RegularKPRPCListenerQueueHandler,
5000
);
this._keeBrowserStartup();
// This listener is called when a new account is logged in to within Kee Vault. It
// does not require an active KPRPC event session for delivery
this.accountManager.addListener(() => {
// If there is a vault port available but no active session, we poke the content script to
// reinitialise the connection if it now looks likely that it will succeed (as a result of
// a new account being logged in to which has the required multi-session feature).
// We don't bother with the WebSocket equivalent because that will automatically be tried
// regularly anyway.
// We also don't worry about kicking people off from active sessions if their license expires.
if (
this.accountManager.featureEnabledMultiSessionTypes &&
!this.KeePassRPC.eventSessionManagerIsActive
) {
this.inviteKeeVaultConnection();
}
});
browser.runtime.onConnect.addListener(this.onPortConnected);
this.networkAuth.startListening();
await browser.privacy.services.passwordSavingEnabled.set({
value: false
});
if (browser.runtime.lastError != null) {
KeeLog.warn(
"KeeFox was unable to disable built-in password manager saving - confusion may ensue! " +
browser.runtime.lastError.message
);
}
}
notifyUser(notification: KeeNotification, nativeNotification?: NativeNotification) {
window.kee.removeUserNotifications((n: KeeNotification) => n.name != notification.name);
store.dispatch("addNotification", notification);
browser.browserAction.setIcon({
path: "common/images/highlight-48.png"
});
if (nativeNotification) {
browser.notifications.create({
type: "basic",
iconUrl: browser.extension.getURL("common/images/128.png"),
title: nativeNotification.title,
message: nativeNotification.message
});
} else {
if (configManager.current.notificationCountGeneric < 5) {
browser.notifications.create({
type: "basic",
iconUrl: browser.extension.getURL("common/images/128.png"),
title: $STR("notification_raised_title"),
message:
$STR("notification_yellow_background") +
"\n" +
$STR("notification_only_shown_some_times")
});
configManager.setASAP({
notificationCountGeneric: configManager.current.notificationCountGeneric + 1
});
}
}
}
removeUserNotifications(unlessTrue: (notification: KeeNotification) => boolean) {
store.dispatch("updateNotifications", store.state.notifications.filter(unlessTrue));
}
animateBrowserActionIcon(duration = 1200) {
// Firefox claims that a janky icon animation is less intrusive for users
// than a smoothly animated one and therefore will not develop the smooth
// animation support available in other browsers. Our user testing confirms
// this is not the case so where we are able to (i.e. not Firefox) we
// enable a nice smooth animation to subtly hint that they might want to
// click on the icon. We have to make the animation in Firefox much less subtle :-(
// https://bugzilla.mozilla.org/show_bug.cgi?format=default&id=1309347
this.animateIcon.start(duration, !__KeeIsRunningInAWebExtensionsBrowser);
}<|fim▁hole|> browser.browserAction.setIcon({ path: "common/images/48.png" });
}
shutdown() {
// These log messages never appear. Does this function even get executed?
KeeLog.debug("Kee module shutting down...");
// if (this.KeePassRPC != undefined && this.KeePassRPC != null)
// this.KeePassRPC..session.shutdown();
// if (this.regularKPRPCListenerQueueHandlerTimer != undefined && this.regularKPRPCListenerQueueHandlerTimer != null)
// clearInterval(this.regularKPRPCListenerQueueHandlerTimer);
// this.KeePassRPC = null;
KeeLog.debug("Kee module shut down.");
}
_keeBrowserStartup() {
KeeLog.debug("Kee initialising");
this.KeePassRPC = new jsonrpcClient();
KeeLog.info(
"Kee initialised OK although the connection to a KeePassRPC server is probably not established just yet..."
);
}
// Temporarily disable Kee. Used (for e.g.) when KeePass is shut down.
_pauseKee() {
KeeLog.debug("Pausing Kee.");
store.dispatch("updateKeePassDatabases", []);
store.dispatch("updateActiveKeePassDatabaseIndex", -1);
store.dispatch("updateConnected", false);
store.dispatch("updateConnectedWebsocket", false);
store.dispatch("updateCurrentSearchTerm", null);
store.dispatch("updateSearchResults", null);
try {
this.refreshFormStatus(Action.ResetForms);
} catch (e) {
KeeLog.error(
"Uncaught exception posting message in _pauseKee: " + e.message + " : " + e.stack
);
}
browser.browserAction.setBadgeText({ text: "OFF" });
browser.browserAction.setBadgeBackgroundColor({ color: "red" });
commandManager.setupContextMenuItems();
KeeLog.info("Kee paused.");
}
_refreshKPDB() {
this.getAllDatabases();
KeeLog.debug("Refresh of Kee's view of the KeePass database initiated.");
}
inviteKeeVaultConnection() {
if (this.vaultPort) {
this.vaultPort.postMessage({
protocol: VaultProtocol.Reconnect
} as VaultMessage);
}
}
updateKeePassDatabases(newDatabases: Database[]) {
//TODO:4: To improve performance we might need to determine if anything
// has actually changed before doing the dispatches and poking the
// current tab frames to find entries
let newDatabaseActiveIndex = -1;
for (let i = 0; i < newDatabases.length; i++) {
if (newDatabases[i].active) {
newDatabaseActiveIndex = i;
break;
}
}
store.dispatch("updateConnected", true);
store.dispatch("updateConnectedWebsocket", this.KeePassRPC.websocketSessionManagerIsActive);
store.dispatch("updateKeePassDatabases", newDatabases);
store.dispatch("updateActiveKeePassDatabaseIndex", newDatabaseActiveIndex);
store.dispatch("updateSearchResults", null);
store.dispatch("updateCurrentSearchTerm", null);
KeeLog.info("Number of databases open: " + newDatabases.length);
if (newDatabases.length > 0) {
browser.browserAction.setBadgeText({ text: "" });
browser.browserAction.setBadgeBackgroundColor({ color: "blue" });
} else {
browser.browserAction.setBadgeText({ text: "OFF" });
browser.browserAction.setBadgeBackgroundColor({ color: "orange" });
}
if (configManager.current.rememberMRUDB) {
const MRUFN = this.getDatabaseFileName();
if (MRUFN != null && MRUFN != undefined) configManager.current.keePassMRUDB = MRUFN;
configManager.save();
}
try {
this.refreshFormStatus(Action.DetectForms);
} catch (e) {
KeeLog.error(
"Uncaught exception posting message in updateKeePassDatabases: " +
e.message +
" : " +
e.stack
);
}
commandManager.setupContextMenuItems();
}
private refreshFormStatus(action: Action) {
window.kee.tabStates.forEach((ts, tabId) => {
//TODO:4: This should be equivalent but much faster than testing in the inner
// loop. Unless tabId does not equal port.sender.tab.id?
//if (tabId !== this.foregroundTabId) return;
ts.framePorts.forEach((port, key, map) => {
try {
if (port.sender.tab.id === this.foregroundTabId) {
port.postMessage({ action } as AddonMessage);
}
} catch (e) {
if (KeeLog && KeeLog.info) {
KeeLog.info(
"failed to request form field reset/update on tab " +
tabId +
". Assuming port is broken (possible browser bug) and deleting the port. " +
"Kee may no longer work in the affected tab, if indeed the tab even " +
"exists any more. The exception that caused this is: " +
e.message +
" : " +
e.stack
);
}
map.delete(key);
}
}, this);
}, this);
}
// if the MRU database is known, open that but otherwise send empty string which will cause user
// to be prompted to choose a DB to open
getKeePassFileNameToOpen() {
let databaseFileName = configManager.current.keePassDBToOpen;
if (databaseFileName == "" || this.isKeeVaultFileName(databaseFileName)) {
databaseFileName = configManager.current.keePassMRUDB;
}
return !this.isKeeVaultFileName(databaseFileName) ? databaseFileName : "";
}
getVaultFileNameToOpen() {
let databaseFileName = configManager.current.keePassDBToOpen;
if (databaseFileName == "" || !this.isKeeVaultFileName(databaseFileName)) {
databaseFileName = configManager.current.keePassMRUDB;
}
return this.isKeeVaultFileName(databaseFileName) ? databaseFileName : "";
}
isKeeVaultFileName(name: string) {
if (name.indexOf("-") === -1) return false;
if (name.indexOf("/") >= 0 || name.indexOf("\\") >= 0) return false;
return true;
}
openKeePass() {
const hasWebsocketDBs = store.state.KeePassDatabases.some(
db => db.sessionType === SessionType.Websocket
);
const supportsWebsocketFocus = store.state.KeePassDatabases.some(
db =>
db.sessionType === SessionType.Websocket &&
db.sessionFeatures.indexOf("KPRPC_OPEN_AND_FOCUS_DATABASE") >= 0
);
if (hasWebsocketDBs && !supportsWebsocketFocus) {
KeeLog.warn(
"Can't open KeePass because KeePassRPC version does not support KPRPC_OPEN_AND_FOCUS_DATABASE"
);
return;
}
this.selectDatabase(
this.getKeePassFileNameToOpen(),
!hasWebsocketDBs,
SessionType.Websocket
);
}
async loginToPasswordManager() {
const sessionType = await this.selectAndFocusDatabase(
this.getVaultFileNameToOpen(),
this.getKeePassFileNameToOpen()
);
if (sessionType !== SessionType.Websocket) {
const vaultTabs = await browser.tabs.query({
url: [
"https://keevault.pm/*",
"https://app-beta.kee.pm/*",
"https://app-dev.kee.pm/*"
]
});
if (vaultTabs && vaultTabs[0]) {
browser.tabs.update(vaultTabs[0].id, { active: true });
browser.windows.update(vaultTabs[0].windowId, { focused: true });
} else {
browser.tabs.create({
url: "https://keevault.pm/",
active: true
});
}
}
}
recordEntrySaveResult(saveType: "updated" | "created", entry?: Entry) {
if (!entry) {
store.dispatch("updateSaveEntryResult", {
result: "error",
receivedAt: new Date()
} as SaveEntryResult);
return false;
} else {
store.dispatch("updateSaveEntryResult", {
result: saveType,
receivedAt: new Date(),
fileName: entry.database.fileName,
uuid: entry.uuid
} as SaveEntryResult);
return true;
}
}
/*******************************************
/ These functions are essentially wrappers for the actions that
/ Kee needs to take against KeePass via the KeePassRPC plugin connection.
/*******************************************/
getDatabaseName(index) {
if (index == undefined) index = store.state.ActiveKeePassDatabaseIndex;
if (
store.state.KeePassDatabases.length > 0 &&
store.state.KeePassDatabases[index] != null &&
store.state.KeePassDatabases[index].root != null
) {
return store.state.KeePassDatabases[index].name;
} else return null;
}
getDatabaseFileName(index?) {
if (index == undefined) index = store.state.ActiveKeePassDatabaseIndex;
if (
store.state.KeePassDatabases.length > 0 &&
store.state.KeePassDatabases[index] != null &&
store.state.KeePassDatabases[index].root != null
) {
return store.state.KeePassDatabases[index].fileName;
} else return null;
}
selectDatabase(fileName, requestReturnFocus, sessionType?: SessionType) {
try {
this.KeePassRPC.selectDB(fileName, requestReturnFocus, sessionType);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
selectAndFocusDatabase(vaultFileName: string, keepassFilename: string) {
try {
return this.KeePassRPC.selectAndFocusDatabase(vaultFileName, keepassFilename);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
async addLogin(
entry: Entry,
parentUUID: string,
dbFileName: string,
clearSubmittedData: () => void
) {
try {
const newEntry = await this.KeePassRPC.addLogin(entry, parentUUID, dbFileName);
const success = this.recordEntrySaveResult("created", newEntry);
if (success) clearSubmittedData();
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
async updateLogin(
entry: Entry,
oldLoginUUID: string,
dbFileName: string,
clearSubmittedData: () => void
) {
try {
const changedEntry = await this.KeePassRPC.updateLogin(entry, oldLoginUUID, dbFileName);
const success = this.recordEntrySaveResult("updated", changedEntry);
if (success) clearSubmittedData();
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
getAllDatabases() {
try {
return this.KeePassRPC.getAllDatabases();
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
async findLogins(fullURL, httpRealm, uuid, dbFileName, freeText, username) {
try {
return this.KeePassRPC.findLogins(
fullURL,
httpRealm,
uuid,
dbFileName,
freeText,
username
);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
launchLoginEditor(uuid, dbFileName) {
try {
this.KeePassRPC.launchLoginEditor(uuid, dbFileName);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
launchGroupEditor(uuid, dbFileName) {
try {
this.KeePassRPC.launchGroupEditor(uuid, dbFileName);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
async getPasswordProfiles() {
try {
return this.KeePassRPC.getPasswordProfiles();
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
async generatePassword(profileName: string, url: string) {
try {
return this.KeePassRPC.generatePassword(profileName, url);
} catch (e) {
KeeLog.error(
"Unexpected exception while connecting to KeePassRPC. Please inform the Kee team that they should be handling this exception: " +
e
);
throw e;
}
}
// Could use multiple callback functions but just one keeps KeePassRPC simpler
// this is only called once no matter how many windows are open. so functions
// within need to handle all open windows for now, that just means every
// window although in future maybe there could be a need to store a list of
// relevant windows and call those instead
KPRPCListener(sig) {
const sigTime = Date();
KeeLog.debug("Signal received by KPRPCListener (" + sig + ") @" + sigTime);
let executeNow = false;
let refresh = false;
switch (sig) {
case "0":
KeeLog.warn("KeePassRPC is requesting authentication [deprecated].");
break;
case "3":
KeeLog.info("KeePass' currently active DB is about to be opened.");
break;
case "4":
KeeLog.info("KeePass' currently active DB has just been opened.");
refresh = true;
break;
case "5":
KeeLog.info("KeePass' currently active DB is about to be closed.");
break;
case "6":
KeeLog.info("KeePass' currently active DB has just been closed.");
refresh = true;
break;
case "7":
KeeLog.info("KeePass' currently active DB is about to be saved.");
break;
case "8":
KeeLog.info("KeePass' currently active DB has just been saved.");
refresh = true;
break;
case "9":
KeeLog.info("KeePass' currently active DB is about to be deleted.");
break;
case "10":
KeeLog.info("KeePass' currently active DB has just been deleted.");
break;
case "11":
KeeLog.info("KeePass' active DB has been changed/selected.");
refresh = true;
break;
case "12":
KeeLog.info(
"KeePass is shutting down. [deprecated: Now inferred from connection loss]"
);
break;
default:
KeeLog.error("Invalid signal received by KPRPCListener (" + sig + ")");
break;
}
if (!refresh) return;
const now = new Date().getTime();
// If there is nothing in the queue at the moment we can process this callback straight away
if (!window.kee.processingCallback && window.kee.pendingCallback == "") {
KeeLog.debug("Signal executing now. @" + sigTime);
window.kee.processingCallback = true;
executeNow = true;
}
// Otherwise we need to add the action for this callback to a queue and leave it up to the regular callback processor to execute the action
if (refresh) {
if (executeNow) {
store.dispatch("updateLastKeePassRPCRefresh", now);
window.kee._refreshKPDB();
} else {
window.kee.pendingCallback = "_refreshKPDB";
}
}
KeeLog.debug("Signal handled or queued. @" + sigTime);
if (executeNow) {
//trigger any pending callback handler immediately rather than waiting for the timed handler to pick it up
try {
if (window.kee.pendingCallback == "_refreshKPDB") window.kee._refreshKPDB();
else KeeLog.debug("A pending signal was found and handled.");
} finally {
window.kee.pendingCallback = "";
window.kee.processingCallback = false;
}
KeeLog.debug("Signal handled. @" + sigTime);
}
}
RegularKPRPCListenerQueueHandler() {
// If there is nothing in the queue at the moment or we are already processing a callback, we give up for now
if (window.kee.processingCallback || window.kee.pendingCallback == "") return;
KeeLog.debug("RegularKPRPCListenerQueueHandler will execute the pending item now");
window.kee.processingCallback = true;
try {
if (window.kee.pendingCallback == "_refreshKPDB") window.kee._refreshKPDB();
} finally {
window.kee.pendingCallback = "";
window.kee.processingCallback = false;
}
KeeLog.debug("RegularKPRPCListenerQueueHandler has finished executing the item");
}
createTabStateIfMissing(tabId: number) {
if (!window.kee.tabStates.has(tabId)) {
window.kee.tabStates.set(tabId, new TabState());
}
}
deleteTabState(tabId: number) {
window.kee.tabStates.delete(tabId);
}
initiatePasswordGeneration() {
if (store.state.connected) {
const tabState = window.kee.tabStates.get(window.kee.foregroundTabId);
if (tabState) {
const framePort = tabState.framePorts.get(0);
if (framePort) {
framePort.postMessage({ action: Action.GeneratePassword });
return;
}
}
// Focussed on a Kee Vault tab or other tab we are not allowed to inject content scripts into
if (window.kee.vaultPort) {
window.kee.vaultPort.postMessage({
protocol: VaultProtocol.ShowGenerator
} as VaultMessage);
browser.tabs.update(window.kee.vaultPort.sender.tab.id, {
active: true
});
browser.windows.update(window.kee.vaultPort.sender.tab.windowId, { focused: true });
}
}
}
}<|fim▁end|>
|
resetBrowserActionColor() {
|
<|file_name|>data_export_script_new.py<|end_file_name|><|fim▁begin|>None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import os
# simple json is a python 2.5 library you need to install
import json
# json comes bundled with python 2.6. use one or the other
#import json
def run():
print "starting"
from receiver.models import Submission
from xformmanager.models import FormDefModel
# this part of the script walks through all the registered
# form definitions and bundles them with the original xsd
# schema for resubmission
domain = None
# you can manually set a single domain here. if you don't then
# all the data will be exported.
domain = "Grameen"
if domain:<|fim▁hole|> for schema in all_schemas:
print "processsing %s" % schema
file_loc = schema.xsd_file_location
print "xsd file: %s" % file_loc
if file_loc:
headers = {
"original-submit-time" : str(schema.submit_time),
"original-submit-ip" : str(schema.submit_ip),
"bytes-received" : schema.bytes_received,
"form-name" : schema.form_name,
"form-display-name" : schema.form_display_name,
"target-namespace" : schema.target_namespace,
"date-created" : str(schema.date_created),
"domain" : str(schema.get_domain)
}
dir, filename = os.path.split(file_loc)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
write_file = os.path.join(new_dir, filename.replace(".xml", ".xsdexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
print jsoned
fout.write(jsoned)
fout.write("\n\n")
xsd_file = open(file_loc, "r")
payload = xsd_file.read()
xsd_file.close()
fout.write(payload)
fout.close()
# this part of the script walks through all the submissions
# and bundles them in an exportable format with the original
# submitting IP and time, as well as a reference to the
# original post
#all_submissions = Submission.objects.all()
if domain:
all_submissions = Submission.objects.filter(domain__name__iexact=domain)
else:
all_submissions = Submission.objects.all()
for submission in all_submissions:
#print "processing %s (%s)" % (submission,submission.raw_post)
post_file = open(submission.raw_post, "r")
submit_time = str(submission.submit_time)
# first line is content type
content_type = post_file.readline().split(":")[1].strip()
# second line is content length
content_length = post_file.readline().split(":")[1].strip()
# third line is empty
post_file.readline()
# the rest is the actual body of the post
headers = { "content-type" : content_type,
"content-length" : content_length,
"time-received" : str(submission.submit_time),
"original-ip" : str(submission.submit_ip),
"domain" : submission.domain.name
}
# check the directory and create it if it doesn't exist
dir, filename = os.path.split(submission.raw_post)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
# the format will be:
# {headers} (dict)
# (empty line)
# <body>
write_file = os.path.join(new_dir, filename.replace("postdata", "postexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
fout.write(jsoned)
fout.write("\n\n")
try:
payload = post_file.read()
fout.write(payload)
except Exception:
print "error processing %s" % write_file
fout.close()
print "done"<|fim▁end|>
|
all_schemas = FormDefModel.objects.filter(domain__name__iexact=domain)
else:
all_schemas = FormDefModel.objects.all()
|
<|file_name|>test_code_in_readme.rs<|end_file_name|><|fim▁begin|>use std::{
fs,
fs::File,
io::{Read, Write},
path::Path,
};
use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag};
use flapigen::{CppConfig, Generator, JavaConfig, LanguageConfig};
use tempfile::tempdir;
#[test]
fn test_code_in_readme() {
let _ = env_logger::try_init();
let tests = parse_readme();
let tmp_dir = tempdir().expect("Can not create tmp dir");
println!("{}: tmp_dir {}", file!(), tmp_dir.path().display());
for test in &tests {
if test.text.contains("foreigner_class!") {
println!("{} with such code:\n{}", test.name, test.text);
fs::create_dir_all(&tmp_dir.path().join(&test.name)).unwrap();
let rust_path_src = tmp_dir.path().join(&test.name).join("test.rs.in");
let mut src = File::create(&rust_path_src).expect("can not create test.rs.in");
src.write_all(test.text.as_bytes()).unwrap();
let rust_path_dst = tmp_dir.path().join(&test.name).join("test.rs");
{
let java_path = tmp_dir.path().join(&test.name).join("java");
fs::create_dir_all(&java_path).unwrap();
let swig_gen = Generator::new(LanguageConfig::JavaConfig(JavaConfig::new(
java_path,
"com.example".into(),
)))
.with_pointer_target_width(64);
swig_gen.expand("flapigen_test_jni", &rust_path_src, &rust_path_dst);
}
{
let cpp_path = tmp_dir.path().join(&test.name).join("c++");
fs::create_dir_all(&cpp_path).unwrap();
let swig_gen = Generator::new(LanguageConfig::CppConfig(CppConfig::new(
cpp_path,
"com_example".into(),
)))<|fim▁hole|> }
}
struct CodeBlockInfo {
is_rust: bool,
should_panic: bool,
ignore: bool,
no_run: bool,
is_old_template: bool,
template: Option<String>,
}
#[derive(Debug)]
struct Test {
name: String,
text: String,
ignore: bool,
no_run: bool,
should_panic: bool,
template: Option<String>,
}
fn parse_readme() -> Vec<Test> {
let mut file = File::open(Path::new("../README.md")).expect("Can not open README");
let mut cnt = String::new();
file.read_to_string(&mut cnt).unwrap();
let parser = Parser::new(&cnt);
let mut test_number = 1;
let mut code_buffer = None;
let mut tests = Vec::new();
for event in parser {
match event {
Event::Start(Tag::CodeBlock(ref info)) => {
let code_block_info = parse_code_block_info(info);
if code_block_info.is_rust {
code_buffer = Some(Vec::new());
}
}
Event::Text(text) => {
if let Some(ref mut buf) = code_buffer {
buf.push(text.to_string());
}
}
Event::End(Tag::CodeBlock(ref info)) => {
let code_block_info = parse_code_block_info(info);
if let Some(buf) = code_buffer.take() {
tests.push(Test {
name: format!("test_{}", test_number),
text: buf.iter().fold(String::new(), |acc, x| acc + x.as_str()),
ignore: code_block_info.ignore,
no_run: code_block_info.no_run,
should_panic: code_block_info.should_panic,
template: code_block_info.template,
});
test_number += 1;
}
}
_ => (),
}
}
tests
}
fn parse_code_block_info(info: &CodeBlockKind) -> CodeBlockInfo {
// Same as rustdoc
let info: &str = match info {
CodeBlockKind::Indented => "",
CodeBlockKind::Fenced(x) => x.as_ref(),
};
let tokens = info.split(|c: char| !(c == '_' || c == '-' || c.is_alphanumeric()));
let mut seen_rust_tags = false;
let mut seen_other_tags = false;
let mut info = CodeBlockInfo {
is_rust: false,
should_panic: false,
ignore: false,
no_run: false,
is_old_template: false,
template: None,
};
for token in tokens {
match token {
"" => {}
"rust" => {
info.is_rust = true;
seen_rust_tags = true
}
"should_panic" => {
info.should_panic = true;
seen_rust_tags = true
}
"ignore" => {
info.ignore = true;
seen_rust_tags = true
}
"no_run" => {
info.no_run = true;
seen_rust_tags = true;
}
"skeptic-template" => {
info.is_old_template = true;
seen_rust_tags = true
}
_ if token.starts_with("skt-") => {
info.template = Some(token[4..].to_string());
seen_rust_tags = true;
}
_ => seen_other_tags = true,
}
}
info.is_rust &= !seen_other_tags || seen_rust_tags;
info
}<|fim▁end|>
|
.with_pointer_target_width(64);
swig_gen.expand("flapigen_test_c++", &rust_path_src, &rust_path_dst);
}
}
|
<|file_name|>ReadTimeoutStreamSourceConduit.java<|end_file_name|><|fim▁begin|>/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.conduits;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
import io.undertow.UndertowLogger;
import io.undertow.UndertowMessages;
import io.undertow.UndertowOptions;
import io.undertow.server.OpenListener;
import io.undertow.util.WorkerUtils;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.IoUtils;
import org.xnio.Options;
import org.xnio.StreamConnection;
import org.xnio.XnioExecutor;
import org.xnio.channels.ReadTimeoutException;
import org.xnio.channels.StreamSinkChannel;
import org.xnio.conduits.AbstractStreamSourceConduit;
import org.xnio.conduits.ConduitStreamSourceChannel;
import org.xnio.conduits.ReadReadyHandler;
import org.xnio.conduits.StreamSourceConduit;
/**
* Wrapper for read timeout. This should always be the first wrapper applied to the underlying channel.
*
* @author Stuart Douglas
* @see org.xnio.Options#READ_TIMEOUT
*/
public final class ReadTimeoutStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> {
private XnioExecutor.Key handle;
private final StreamConnection connection;
private volatile long expireTime = -1;
private final OpenListener openListener;
private static final int FUZZ_FACTOR = 50; //we add 50ms to the timeout to make sure the underlying channel has actually timed out
private volatile boolean expired;
private final Runnable timeoutCommand = new Runnable() {
@Override
public void run() {
handle = null;
if (expireTime == -1) {
return;
}
long current = System.currentTimeMillis();
if (current < expireTime) {
//timeout has been bumped, re-schedule
handle = WorkerUtils.executeAfter(connection.getIoThread(),timeoutCommand, (expireTime - current) + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
return;
}
UndertowLogger.REQUEST_LOGGER.tracef("Timing out channel %s due to inactivity", connection.getSourceChannel());
synchronized (ReadTimeoutStreamSourceConduit.this) {
expired = true;
}
boolean readResumed = connection.getSourceChannel().isReadResumed();
ChannelListener<? super ConduitStreamSourceChannel> readListener = connection.getSourceChannel().getReadListener();
if (readResumed) {
ChannelListeners.invokeChannelListener(connection.getSourceChannel(), readListener);
}
if (connection.getSinkChannel().isWriteResumed()) {
ChannelListeners.invokeChannelListener(connection.getSinkChannel(), connection.getSinkChannel().getWriteListener());
}
// close only after invoking listeners, to allow space for listener getting ReadTimeoutException
IoUtils.safeClose(connection);
}
};
public ReadTimeoutStreamSourceConduit(final StreamSourceConduit delegate, StreamConnection connection, OpenListener openListener) {
super(delegate);
this.connection = connection;
this.openListener = openListener;
final ReadReadyHandler handler = new ReadReadyHandler.ChannelListenerHandler<>(connection.getSourceChannel());
delegate.setReadReadyHandler(new ReadReadyHandler() {
@Override
public void readReady() {
handler.readReady();
}
@Override
public void forceTermination() {
cleanup();
handler.forceTermination();
}
@Override
public void terminated() {
cleanup();
handler.terminated();
}
});
}
private void handleReadTimeout(final long ret) throws IOException {
if (!connection.isOpen()) {
cleanup();
return;
}
if (ret == -1) {
cleanup();
return;
}
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
final long currentTime = System.currentTimeMillis();
if (ret == 0) {
final long expireTimeVar = expireTime;
if (expireTimeVar != -1 && currentTime > expireTimeVar) {
IoUtils.safeClose(connection);
throw UndertowMessages.MESSAGES.readTimedOut(this.getTimeout());
}
}
expireTime = currentTime + timeout;
if (handle == null) {
handle = connection.getIoThread().executeAfter(timeoutCommand, timeout, TimeUnit.MILLISECONDS);
}
}
@Override
public long transferTo(final long position, final long count, final FileChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(position, count, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(count, throughBuffer, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long read(final ByteBuffer[] dsts, final int offset, final int length) throws IOException {
checkExpired();
long ret = super.read(dsts, offset, length);
handleReadTimeout(ret);
return ret;
}
@Override
public int read(final ByteBuffer dst) throws IOException {
checkExpired();
int ret = super.read(dst);
handleReadTimeout(ret);
return ret;
}
@Override
public void awaitReadable() throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
super.awaitReadable(timeout + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
} else {
super.awaitReadable();
}
}
@Override
public void awaitReadable(long time, TimeUnit timeUnit) throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
long millis = timeUnit.toMillis(time);
super.awaitReadable(Math.min(millis, timeout + FUZZ_FACTOR), TimeUnit.MILLISECONDS);
} else {
super.awaitReadable(time, timeUnit);
}
}
private Integer getTimeout() {
Integer timeout = 0;
try {
timeout = connection.getSourceChannel().getOption(Options.READ_TIMEOUT);
} catch (IOException ignore) {
// should never happen
}
Integer idleTimeout = openListener.getUndertowOptions().get(UndertowOptions.IDLE_TIMEOUT);
if ((timeout == null || timeout <= 0) && idleTimeout != null) {
timeout = idleTimeout;
} else if (timeout != null && idleTimeout != null && idleTimeout > 0) {
timeout = Math.min(timeout, idleTimeout);
}<|fim▁hole|> @Override
public void terminateReads() throws IOException {
checkExpired();
super.terminateReads();
cleanup();
}
private void cleanup() {
if (handle != null) {
handle.remove();
handle = null;
expireTime = -1;
}
}
@Override
public void suspendReads() {
super.suspendReads();
cleanup();
}
private void checkExpired() throws ReadTimeoutException {
synchronized (this) {
if (expired) {
throw UndertowMessages.MESSAGES.readTimedOut(System.currentTimeMillis());
}
}
}
public String toString() {
return super.toString() + " (next: " + next + ")";
}
}<|fim▁end|>
|
return timeout;
}
|
<|file_name|>train.py<|end_file_name|><|fim▁begin|>from __future__ import division
from itertools import chain
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pandas as pd
from fisher import pvalue
import re
import collections
from nltk.stem.porter import PorterStemmer
import math
from percept.tasks.base import Task
from percept.fields.base import Complex, List, Dict, Float
from inputs.inputs import SimpsonsFormats
from percept.utils.models import RegistryCategories, get_namespace
from percept.conf.base import settings
import os
from percept.tasks.train import Train
from sklearn.ensemble import RandomForestClassifier
import pickle
import random
import logging
log = logging.getLogger(__name__)
MAX_FEATURES = 500
DISTANCE_MIN=1
CHARACTER_DISTANCE_MIN = .2
RESET_SCENE_EVERY = 5
def make_df(datalist, labels, name_prefix=""):
df = pd.DataFrame(datalist).T
if name_prefix!="":
labels = [name_prefix + "_" + l for l in labels]
labels = [l.replace(" ", "_").lower() for l in labels]
df.columns = labels
df.index = range(df.shape[0])
return df
def return_one():
return 1
class SpellCorrector(object):
"""
Taken and slightly adapted from peter norvig's post at http://norvig.com/spell-correct.html
"""
alphabet = 'abcdefghijklmnopqrstuvwxyz'
punctuation = [".", "!", "?", ","]
def __init__(self):
self.NWORDS = self.train(self.words(file(os.path.join(settings.PROJECT_PATH,'data/big.txt')).read()))
self.cache = {}
def words(self, text):
return re.findall('[a-z]+', text.lower())
def train(self, features):
model = collections.defaultdict(return_one)
for f in features:
model[f] += 1
return model
def edits1(self, word):
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in splits if b]
transposes = [a + b[1] + b[0] + b[2:] for a, b in splits if len(b)>1]
replaces = [a + c + b[1:] for a, b in splits for c in self.alphabet if b]
inserts = [a + c + b for a, b in splits for c in self.alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(self, word):
return set(e2 for e1 in self.edits1(word) for e2 in self.edits1(e1) if e2 in self.NWORDS)
def known(self, words): return set(w for w in words if w in self.NWORDS)
def correct(self, word):
if word in self.cache:
return self.cache[word]
suffix = ""
for p in self.punctuation:
if word.endswith(p):
suffix = p
word = word[:-1]
candidates = self.known([word]) or self.known(self.edits1(word)) or self.known_edits2(word) or [word]
newword = max(candidates, key=self.NWORDS.get) + suffix
self.cache.update({word : newword})
return newword
class Vectorizer(object):
def __init__(self):
self.fit_done = False
def fit(self, input_text, input_scores, max_features=100, min_features=3):
self.spell_corrector = SpellCorrector()
self.stemmer = PorterStemmer()
new_text = self.batch_generate_new_text(input_text)
input_text = [input_text[i] + new_text[i] for i in xrange(0,len(input_text))]
self.vectorizer1 = CountVectorizer(ngram_range=(1,2), min_df = min_features/len(input_text), max_df=.4, stop_words="english")
self.vectorizer1.fit(input_text)
self.vocab = self.get_vocab(input_text, input_scores, max_features)
self.vectorizer = CountVectorizer(ngram_range=(1,2), vocabulary=self.vocab)
self.fit_done = True
self.input_text = input_text
def spell_correct_text(self, text):
text = text.lower()
split = text.split(" ")
corrected = [self.spell_corrector.correct(w) for w in split]
return corrected
def batch_apply(self, all_tokens, applied_func):
for key in all_tokens:
cor = applied_func(all_tokens[key])
all_tokens[key] = cor
return all_tokens
def batch_generate_new_text(self, text):
text = [re.sub("[^A-Za-z0-9]", " ", t.lower()) for t in text]
text = [re.sub("\s+", " ", t) for t in text]
t_tokens = [t.split(" ") for t in text]
all_token_list = list(set(chain.from_iterable(t_tokens)))
all_token_dict = {}
for t in all_token_list:
all_token_dict.update({t : t})
all_token_dict = self.batch_apply(all_token_dict, self.stemmer.stem)
all_token_dict = self.batch_apply(all_token_dict, self.stemmer.stem)
for i in xrange(0,len(t_tokens)):
for j in xrange(0,len(t_tokens[i])):
t_tokens[i][j] = all_token_dict.get(t_tokens[i][j], t_tokens[i][j])
new_text = [" ".join(t) for t in t_tokens]
return new_text
def generate_new_text(self, text):
no_punctuation = re.sub("[^A-Za-z0-9]", " ", text.lower())
no_punctuation = re.sub("\s+", " ", no_punctuation)
corrected = self.spell_correct_text(no_punctuation)
corrected = [self.stemmer.stem(w) for w in corrected]
new = " ".join(corrected)
return new
def get_vocab(self, input_text, input_scores, max_features):
train_mat = self.vectorizer1.transform(input_text)
input_score_med = np.median(input_scores)
new_scores = [0 if i<=input_score_med else 1 for i in input_scores]
ind_max_features = math.floor(max_features/max(input_scores))
all_vocab = []
all_cols = [np.asarray(train_mat.getcol(i).todense().transpose())[0] for i in xrange(0,train_mat.shape[1])]
for s in xrange(0,max(input_scores)):
sel_inds = [i for i in xrange(0,len(input_scores)) if input_scores[i]==s]
out_inds = [i for i in xrange(0,len(input_scores)) if input_scores[i]!=s]
pvalues = []
for i in xrange(0,len(all_cols)):
lcol = all_cols[i]
good_lcol = lcol[sel_inds]
bad_lcol = lcol[out_inds]
good_lcol_present = len(good_lcol[good_lcol > 0])
good_lcol_missing = len(good_lcol[good_lcol == 0])
bad_lcol_present = len(bad_lcol[bad_lcol > 0])
bad_lcol_missing = len(bad_lcol[bad_lcol == 0])
pval = pvalue(good_lcol_present, bad_lcol_present, good_lcol_missing, bad_lcol_missing)
pvalues.append(pval.two_tail)
col_inds = list(xrange(0,train_mat.shape[1]))
p_frame = pd.DataFrame(np.array([col_inds, pvalues]).transpose(), columns=["inds", "pvalues"])
p_frame = p_frame.sort(['pvalues'], ascending=True)
getVar = lambda searchList, ind: [searchList[int(i)] for i in ind]
vocab = getVar(self.vectorizer1.get_feature_names(), p_frame['inds'][:ind_max_features+2])
all_vocab.append(vocab)
return list(set(list(chain.from_iterable(all_vocab))))
def batch_get_features(self, text):
if not self.fit_done:
raise Exception("Vectorizer has not been created.")
new_text = self.batch_generate_new_text(text)
text = [text[i] + new_text[i] for i in xrange(0,len(text))]
return (self.vectorizer.transform(text).todense())
def get_features(self, text):
if not self.fit_done:
raise Exception("Vectorizer has not been created.")
itext=text
if isinstance(text, list):
itext = text[0]
new_text = self.generate_new_text(itext)
if isinstance(text, list):
text = [text[0] + new_text]
else:
text = [text + new_text]
return (self.vectorizer.transform(text).todense())
class FeatureExtractor(Task):
data = Complex()
row_data = List()
speaker_code_dict = Dict()
speaker_codes = List()
vectorizer = Complex()
data_format = SimpsonsFormats.dataframe
category = RegistryCategories.preprocessors
namespace = get_namespace(__module__)
help_text = "Cleanup simpsons scripts."
args = {'scriptfile' : os.path.abspath(os.path.join(settings.DATA_PATH, "script_tasks"))}
def train(self, data, target, **kwargs):
"""
Used in the training phase. Override.
"""
self.data = self.predict(data, **kwargs)
def predict(self, data, **kwargs):
"""
Used in the predict phase, after training. Override
"""
scriptfile = kwargs.get('scriptfile')
script_data = pickle.load(open(scriptfile))
script = script_data.tasks[2].voice_lines.value
speakers = []
lines = []
for s in script:
for (i,l) in enumerate(s):
if i>0:
previous_line = s[i-1]['line']
previous_speaker = s[i-1]['speaker']
else:
previous_line = ""
previous_speaker = ""
if i>1:
two_back_speaker = s[i-2]['speaker']
else:
two_back_speaker = ""
if len(s)>i+1:
next_line = s[i+1]['line']
else:
next_line = ""
current_line = s[i]['line']<|fim▁hole|> speakers.append(current_speaker)
row_data = {
'previous_line' : previous_line,
'previous_speaker' : previous_speaker,
'next_line' : next_line,
'current_line' : current_line,
'current_speaker' : current_speaker,
'two_back_speaker' : two_back_speaker
}
self.row_data.append(row_data)
self.speaker_code_dict = {k:i for (i,k) in enumerate(list(set(speakers)))}
self.speaker_codes = [self.speaker_code_dict[s] for s in speakers]
self.max_features = math.floor(MAX_FEATURES)/3
self.vectorizer = Vectorizer()
self.vectorizer.fit(lines, self.speaker_codes, self.max_features)
prev_features = self.vectorizer.batch_get_features([rd['previous_line'] for rd in self.row_data])
cur_features = self.vectorizer.batch_get_features([rd['current_line'] for rd in self.row_data])
next_features = self.vectorizer.batch_get_features([rd['next_line'] for rd in self.row_data])
self.speaker_code_dict.update({'' : -1})
meta_features = make_df([[self.speaker_code_dict[s['two_back_speaker']] for s in self.row_data], [self.speaker_code_dict[s['previous_speaker']] for s in self.row_data], self.speaker_codes],["two_back_speaker", "previous_speaker", "current_speaker"])
#meta_features = make_df([[self.speaker_code_dict[s['two_back_speaker']] for s in self.row_data], self.speaker_codes],["two_back_speaker", "current_speaker"])
train_frame = pd.concat([pd.DataFrame(prev_features),pd.DataFrame(cur_features),pd.DataFrame(next_features),meta_features],axis=1)
train_frame.index = range(train_frame.shape[0])
data = {
'vectorizer' : self.vectorizer,
'speaker_code_dict' : self.speaker_code_dict,
'train_frame' : train_frame,
'speakers' : make_df([speakers,self.speaker_codes, lines], ["speaker", "speaker_code", "line"]),
'data' : data,
'current_features' : cur_features,
}
return data
class RandomForestTrain(Train):
"""
A class to train a random forest
"""
colnames = List()
clf = Complex()
category = RegistryCategories.algorithms
namespace = get_namespace(__module__)
algorithm = RandomForestClassifier
args = {'n_estimators' : 300, 'min_samples_leaf' : 4, 'compute_importances' : True}
help_text = "Train and predict with Random Forest."
class KNNRF(Task):
data = Complex()
predictions = Complex()
importances = Complex()
data_format = SimpsonsFormats.dataframe
category = RegistryCategories.preprocessors
namespace = get_namespace(__module__)
args = {'algo' : RandomForestTrain}
help_text = "Cleanup simpsons scripts."
def train(self, data, target, **kwargs):
"""
Used in the training phase. Override.
"""
self.data = self.predict(data, **kwargs)
def predict(self, data, **kwargs):
"""
Used in the predict phase, after training. Override
"""
from preprocess import CHARACTERS
vec_length = math.floor(MAX_FEATURES/3)
algo = kwargs.get('algo')
alg = algo()
train_data = data['train_frame'].iloc[:,:-1]
target = data['train_frame']['current_speaker']
clf = alg.train(train_data,target, **algo.args)
self.importances=clf.feature_importances_
test_data = data['data']
match_data = data['current_features']
reverse_speaker_code_dict = {data['speaker_code_dict'][k] : k for k in data['speaker_code_dict']}
speaker_list = []
speaker_codes = reverse_speaker_code_dict.keys()
for i in xrange(0,len(speaker_codes)):
s_text = "\n".join(list(data['speakers'][data['speakers']['speaker']==reverse_speaker_code_dict[speaker_codes[i]]]['line']))
speaker_list.append(s_text)
speaker_features = data['vectorizer'].batch_get_features(speaker_list)
self.predictions = []
counter = 0
for script in test_data['voice_script']:
counter+=1
log.info("On script {0} out of {1}".format(counter,len(test_data['voice_script'])))
lines = script.split("\n")
speaker_code = [-1 for i in xrange(0,len(lines))]
for (i,line) in enumerate(lines):
if i>0 and i%RESET_SCENE_EVERY!=0:
previous_line = lines[i-1]
previous_speaker = speaker_code[i-1]
else:
previous_line = ""
previous_speaker= -1
if i>1 and i%RESET_SCENE_EVERY!=0:
two_back_speaker = speaker_code[i-2]
else:
two_back_speaker = -1
if i<(len(lines)-1):
next_line = lines[i+1]
else:
next_line = ""
prev_features = data['vectorizer'].get_features(previous_line)
cur_features = data['vectorizer'].get_features(line)
next_features = data['vectorizer'].get_features(next_line)
meta_features = make_df([[two_back_speaker], [previous_speaker]],["two_back_speaker", "previous_speaker"])
#meta_features = make_df([[two_back_speaker]],["two_back_speaker"])
train_frame = pd.concat([pd.DataFrame(prev_features),pd.DataFrame(cur_features),pd.DataFrame(next_features), meta_features],axis=1)
speaker_code[i] = alg.predict(train_frame)[0]
nearest_match, distance = self.find_nearest_match(cur_features, speaker_features)
if distance<CHARACTER_DISTANCE_MIN:
sc = speaker_codes[nearest_match]
speaker_code[i] = sc
continue
for k in CHARACTERS:
for c in CHARACTERS[k]:
if c in previous_line:
speaker_code[i] = data['speaker_code_dict'][k]
nearest_match, distance = self.find_nearest_match(cur_features,match_data)
if distance<DISTANCE_MIN:
sc = data['speakers']['speaker_code'][nearest_match]
speaker_code[i] = sc
continue
df = make_df([lines,speaker_code,[reverse_speaker_code_dict[round(s)] for s in speaker_code]],["line","speaker_code","speaker"])
self.predictions.append(df)
return data
def find_nearest_match(self, features, matrix):
features = np.asarray(features)
distances = [self.euclidean(u, features) for u in matrix]
nearest_match = distances.index(min(distances))
return nearest_match, min(distances)
def euclidean(self, v1, v2):
return np.sqrt(np.sum(np.square(np.subtract(v1,v2))))
"""
p = tasks[3].predictions.value
speakers = []
lines = []
for pr in p:
speakers.append(list(pr['speaker']))
lines.append(list(pr['line']))
from itertools import chain
speakers = list(chain.from_iterable(speakers))
lines = list(chain.from_iterable(lines))
rows = []
for (s,l) in zip(speakers, lines):
rows.append({
'speaker' : s,
'line': l,
})
import json
json.dump(rows,open("/home/vik/vikparuchuri/simpsons-scripts/data/final_voice.json","w+"))
"""<|fim▁end|>
|
current_speaker = s[i]['speaker']
lines.append(current_line)
|
<|file_name|>tee.rs<|end_file_name|><|fim▁begin|>//#![deny(warnings)]
extern crate coreutils;
use coreutils::ArgParser;
use std::{process, env};
use std::io::{self, Read, Write};
const MAN_PAGE: &'static str = /* @MANSTART{tee} */ r#"<|fim▁hole|> tee [OPTION]... [FILE]...
DESCRIPTION
Copy standard input to each FILE, and also to standard output.
-a, --append
append to given FILEs, do not overwrite
--help display this help and exit
AUTHOR
Written by Stefan Lücke.
"#; /* @MANEND */
fn main() {
let mut parser = ArgParser::new(2).
add_flag(&["a", "append"]).
add_flag(&["h", "help"]);
parser.parse(env::args());
let mut stdout = io::stdout();
if parser.found("help") {
stdout.write_all(MAN_PAGE.as_bytes()).unwrap();
stdout.flush().unwrap();
process::exit(0);
}
let mut fds: Vec<std::fs::File> = Vec::with_capacity(env::args().len());
if parser.found("append") {
let args = env::args().skip(2);
if args.len() > 0 {
for arg in args {
let fd = std::fs::OpenOptions::new().append(true).open(arg);
match fd {
Ok(f) => fds.push(f),
Err(e) => println!("Err(e): {}", e),
}
}
}
} else {
let args = env::args().skip(1);
if args.len() > 0 {
for arg in args {
let fd = std::fs::OpenOptions::new().write(true).create(true).open(arg);
match fd {
Ok(f) => fds.push(f),
Err(e) => println!("Err(e): {}", e),
}
}
}
}
let stdintemp = io::stdin();
let mut stdin = stdintemp.lock();
let mut buffer: &mut [u8] = &mut[0 as u8; 4096];
'programmloop: loop {
let result_read = stdin.read(buffer);
match result_read {
Ok(size) => {
if size == 0 {
// we've read a EOF here
break;
}
let result_write = stdout.write(&mut buffer[0..size]);
match result_write {
Ok(_) => (),
Err(e) => {
println!("Err(e): {}", e);
break 'programmloop;
},
};
// iterate over open files
'writeloop: for mut f in &mut fds {
let result_write = f.write(&mut buffer[0..size]);
match result_write {
Ok(_) => (),
Err(e) => {
println!("Err(e): {}", e);
break 'programmloop;
},
};
}
},
Err(e) => {
println!("Err(e): {}", e);
break;
},
};
}
stdout.flush().unwrap();
process::exit(0);
}<|fim▁end|>
|
NAME
tee - read from standard input and write to standard output and files
SYNOPSIS
|
<|file_name|>eye.py<|end_file_name|><|fim▁begin|># Speak.activity
# A simple front end to the espeak text-to-speech engine on the XO laptop
# http://wiki.laptop.org/go/Speak
#
# Copyright (C) 2008 Joshua Minor
# This file is part of Speak.activity
#
# Parts of Speak.activity are based on code from Measure.activity
# Copyright (C) 2007 Arjun Sarwal - [email protected]
#
# Speak.activity is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Speak.activity is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Speak.activity. If not, see <http://www.gnu.org/licenses/>.
import math
from gi.repository import Gtk
class Eye(Gtk.DrawingArea):
def __init__(self, fill_color):
Gtk.DrawingArea.__init__(self)
self.connect("draw", self.draw)
self.x, self.y = 0, 0
self.fill_color = fill_color
def has_padding(self):
return True
def has_left_center_right(self):
return False
def look_at(self, x, y):
self.x = x
self.y = y
self.queue_draw()
def look_ahead(self):
self.x = None
self.y = None
self.queue_draw()
# Thanks to xeyes :)
def computePupil(self):
a = self.get_allocation()
if self.x is None or self.y is None:
# look ahead, but not *directly* in the middle
pw = self.get_parent().get_allocation().width
if a.x + a.width // 2 < pw // 2:
cx = a.width * 0.6
else:
cx = a.width * 0.4
return cx, a.height * 0.6
EYE_X, EYE_Y = self.translate_coordinates(
self.get_toplevel(), a.width // 2, a.height // 2)
EYE_HWIDTH = a.width
EYE_HHEIGHT = a.height
BALL_DIST = EYE_HWIDTH / 4
dx = self.x - EYE_X
dy = self.y - EYE_Y
if dx or dy:
angle = math.atan2(dy, dx)
cosa = math.cos(angle)
sina = math.sin(angle)<|fim▁hole|> dist = BALL_DIST * math.hypot(x, y)
if dist < math.hypot(dx, dy):
dx = dist * cosa
dy = dist * sina
return a.width // 2 + dx, a.height // 2 + dy
def draw(self, widget, cr):
bounds = self.get_allocation()
eyeSize = min(bounds.width, bounds.height)
outlineWidth = eyeSize / 20.0
pupilSize = eyeSize / 10.0
pupilX, pupilY = self.computePupil()
dX = pupilX - bounds.width / 2.
dY = pupilY - bounds.height / 2.
distance = math.sqrt(dX * dX + dY * dY)
limit = eyeSize // 2 - outlineWidth * 2 - pupilSize
if distance > limit:
pupilX = bounds.width // 2 + dX * limit // distance
pupilY = bounds.height // 2 + dY * limit // distance
# background
cr.set_source_rgba(*self.fill_color.get_rgba())
cr.rectangle(0, 0, bounds.width, bounds.height)
cr.fill()
# eye ball
cr.arc(bounds.width // 2, bounds.height // 2,
eyeSize // 2 - outlineWidth // 2, 0, 2 * math.pi)
cr.set_source_rgb(1, 1, 1)
cr.fill()
# outline
cr.set_line_width(outlineWidth)
cr.arc(bounds.width // 2, bounds.height // 2,
eyeSize // 2 - outlineWidth // 2, 0, 2 * math.pi)
cr.set_source_rgb(0, 0, 0)
cr.stroke()
# pupil
cr.arc(pupilX, pupilY, pupilSize, 0, 2 * math.pi)
cr.set_source_rgb(0, 0, 0)
cr.fill()
return True<|fim▁end|>
|
h = math.hypot(EYE_HHEIGHT * cosa, EYE_HWIDTH * sina)
x = (EYE_HWIDTH * EYE_HHEIGHT) * cosa / h
y = (EYE_HWIDTH * EYE_HHEIGHT) * sina / h
|
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
class CheckoutAppConfig(AppConfig):
name = 'ecommerce.extensions.checkout'
verbose_name = 'Checkout'
def ready(self):
super(CheckoutAppConfig, self).ready()
<|fim▁hole|> import ecommerce.extensions.checkout.signals # pylint: disable=unused-variable<|fim▁end|>
|
# noinspection PyUnresolvedReferences
|
<|file_name|>call-closure-from-overloaded-op.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// except according to those terms.
fn foo() -> int { 22 }
pub fn main() {
let mut x: Vec<extern "Rust" fn() -> int> = Vec::new();
x.push(foo);
assert_eq!((x[0])(), 22);
}<|fim▁end|>
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
<|file_name|>mission.js<|end_file_name|><|fim▁begin|>define("view/mission",
["jquery", "laces.tie", "lodash", "view", "tmpl/joboutput", "tmpl/mission"],
function($, Laces, _, View, tmpl) {
"use strict";
return View.extend({
initialize: function(options) {
this.mission = options.mission;
this.mission.jobs.on("add", this._onNewJobs, { context: this });
this.subscribe("server-push:missions:job-output", this._onJobOutput);
this.$jobs = null;
},
events: {
"click .action-expand-job": "_expandJob"
},
remove: function() {
this.mission.jobs.off("add", this._onNewJobs);
},
render: function() {
var lastJob = this.mission.jobs[this.mission.jobs.length - 1];
if (lastJob) {
lastJob.expanded = true;
lastJob.fetchResults({ context: this }).then(function() {
var tie = new Laces.Tie(lastJob, tmpl.joboutput);
var $jobOutput = this.$(".js-job-output[data-job-id='" + lastJob.id + "']");
$jobOutput.replaceWith(tie.render());
});
}
var tie = new Laces.Tie(this.mission, tmpl.mission);
this.$el.html(tie.render());
this.$jobs = this.$(".js-jobs");
_.each(this.mission.jobs, _.bind(this._renderJob, this));
<|fim▁hole|> },
_expandJob: function(event) {
var jobId = this.targetData(event, "job-id");
var job = _.find(this.mission.jobs, { id: jobId });
job.expanded = !job.expanded;
if (job.expanded) {
job.fetchResults();
}
},
_onNewJobs: function(event) {
_.each(event.elements, _.bind(this._renderJob, this));
},
_onJobOutput: function(data) {
if (data.missionId === this.mission.id) {
var job = _.find(this.mission.jobs, { id: data.jobId });
if (job && job.expanded) {
var $output = this.$(".js-job-output[data-job-id=" + $.jsEscape(data.jobId) +
"] .js-output");
if ($output.length) {
$output[0].innerHTML += $.colored(data.output);
}
}
}
},
_renderJob: function(job) {
if (this.$jobs) {
var tie = new Laces.Tie(job, tmpl.joboutput);
this.$jobs.prepend(tie.render());
}
}
});
});<|fim▁end|>
|
return this.$el;
|
<|file_name|>mark_tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from airflow import models
from airflow.api.common.experimental.mark_tasks import set_state, _create_dagruns
from airflow.settings import Session
from airflow.utils.dates import days_ago
from airflow.utils.state import State
DEV_NULL = "/dev/null"
class TestMarkTasks(unittest.TestCase):
def setUp(self):
self.dagbag = models.DagBag(include_examples=True)
self.dag1 = self.dagbag.dags['test_example_bash_operator']
self.dag2 = self.dagbag.dags['example_subdag_operator']
self.execution_dates = [days_ago(2), days_ago(1)]
drs = _create_dagruns(self.dag1, self.execution_dates,
state=State.RUNNING,
run_id_template="scheduled__{}")
for dr in drs:
dr.dag = self.dag1
dr.verify_integrity()
drs = _create_dagruns(self.dag2,
[self.dag2.default_args['start_date']],
state=State.RUNNING,
run_id_template="scheduled__{}")
for dr in drs:
dr.dag = self.dag2
dr.verify_integrity()
self.session = Session()
def snapshot_state(self, dag, execution_dates):
TI = models.TaskInstance
tis = self.session.query(TI).filter(
TI.dag_id==dag.dag_id,
TI.execution_date.in_(execution_dates)
).all()
self.session.expunge_all()
return tis
def verify_state(self, dag, task_ids, execution_dates, state, old_tis):
TI = models.TaskInstance
tis = self.session.query(TI).filter(
TI.dag_id==dag.dag_id,
TI.execution_date.in_(execution_dates)
).all()
self.assertTrue(len(tis) > 0)
for ti in tis:
if ti.task_id in task_ids and ti.execution_date in execution_dates:
self.assertEqual(ti.state, state)
else:
for old_ti in old_tis:
if (old_ti.task_id == ti.task_id
and old_ti.execution_date == ti.execution_date):
self.assertEqual(ti.state, old_ti.state)
def test_mark_tasks_now(self):
# set one task to success but do not commit
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=False)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
None, snapshot)
# set one and only one task to success
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)
# set no tasks
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 0)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)
# set task to other than success
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.FAILED, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.FAILED, snapshot)
# dont alter other tasks
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_0")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 1)
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.SUCCESS, snapshot)<|fim▁hole|> def test_mark_downstream(self):
# test downstream
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=True, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 3)
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
State.SUCCESS, snapshot)
def test_mark_upstream(self):
# test upstream
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("run_after_loop")
relatives = task.get_flat_relatives(upstream=True)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=True, downstream=False, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 4)
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]],
State.SUCCESS, snapshot)
def test_mark_tasks_future(self):
# set one task to success towards end of scheduled dag runs
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=False, future=True,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 2)
self.verify_state(self.dag1, [task.task_id], self.execution_dates,
State.SUCCESS, snapshot)
def test_mark_tasks_past(self):
# set one task to success towards end of scheduled dag runs
snapshot = self.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
altered = set_state(task=task, execution_date=self.execution_dates[1],
upstream=False, downstream=False, future=False,
past=True, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 2)
self.verify_state(self.dag1, [task.task_id], self.execution_dates,
State.SUCCESS, snapshot)
def test_mark_tasks_subdag(self):
# set one task to success towards end of scheduled dag runs
task = self.dag2.get_task("section-1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(task=task, execution_date=self.execution_dates[0],
upstream=False, downstream=True, future=False,
past=False, state=State.SUCCESS, commit=True)
self.assertEqual(len(altered), 14)
# cannot use snapshot here as that will require drilling down the
# the sub dag tree essentially recreating the same code as in the
# tested logic.
self.verify_state(self.dag2, task_ids, [self.execution_dates[0]],
State.SUCCESS, [])
def tearDown(self):
self.dag1.clear()
self.dag2.clear()
# just to make sure we are fully cleaned up
self.session.query(models.DagRun).delete()
self.session.query(models.TaskInstance).delete()
self.session.commit()
self.session.close()
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>deserialize.rs<|end_file_name|><|fim▁begin|>use serde::de::Deserializer;
use serde::Deserialize;
use std::time::Duration;
pub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error>
where
D: Deserializer<'de>,
{
let ms: u64 = Deserialize::deserialize(deserializer)?;
Ok(Duration::from_millis(ms))
}
<|fim▁hole|>where
D: Deserializer<'de>,
{
let ms: u64 = Deserialize::deserialize(deserializer)?;
Ok(Some(Duration::from_millis(ms)))
}<|fim▁end|>
|
pub fn deserialize_opt_duration<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
|
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>//! Various utilities
/*
use std::old_io::net::ip;
/// Convert socket address to bytes in network order.
pub fn netaddr_to_netbytes(addr: &ip::SocketAddr) -> Vec<u8> {
match addr.ip {
ip::Ipv4Addr(a, b, c, d) =>
vec![a, b, c, d, (addr.port >> 8) as u8, (addr.port & 0xFF) as u8],
// TODO(divius): implement
ip::Ipv6Addr(..) => panic!("IPv6 not implemented")
}
}
/// Get socket address from netbytes.
pub fn netaddr_from_netbytes(bytes: &[u8]) -> ip::SocketAddr {
assert_eq!(6, bytes.len());
ip::SocketAddr {
ip: ip::Ipv4Addr(bytes[0], bytes[1], bytes[2], bytes[3]),
port: ((bytes[4] as u16) << 8) + bytes[5] as u16
}
}
*/
#[cfg(test)]
pub mod test {
use std::net::SocketAddr;
use std::net::SocketAddrV4;
use std::net::Ipv4Addr;
use num::traits::FromPrimitive;
use num;
use super::super::Node;
pub static ADDR: &'static str = "127.0.0.1:8008";
pub fn new_node(id: usize) -> Node {
new_node_with_port(id, 8008)
}
pub fn new_node_with_port(id: usize, port: u16) -> Node {
Node {
id: FromPrimitive::from_usize(id).unwrap(),
address: SocketAddr::V4( SocketAddrV4::new(
Ipv4Addr::new(127, 0, 0, 1),
port<|fim▁hole|> }
}
pub fn usize_to_id(id: usize) -> num::BigUint {
FromPrimitive::from_usize(id).unwrap()
}
}<|fim▁end|>
|
) )
|
<|file_name|>gce_pd_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package gce_pd
import (
"os"
"testing"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/types"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util/mount"
"github.com/GoogleCloudPlatform/kubernetes/pkg/volume"
)
func TestCanSupport(t *testing.T) {
plugMgr := volume.VolumePluginMgr{}
plugMgr.InitPlugins(ProbeVolumePlugins(), volume.NewFakeVolumeHost("/tmp/fake", nil, nil))
plug, err := plugMgr.FindPluginByName("kubernetes.io/gce-pd")
if err != nil {
t.Errorf("Can't find the plugin by name")
}
if plug.Name() != "kubernetes.io/gce-pd" {
t.Errorf("Wrong name: %s", plug.Name())
}
if !plug.CanSupport(&volume.Spec{Name: "foo", VolumeSource: api.VolumeSource{GCEPersistentDisk: &api.GCEPersistentDiskVolumeSource{}}}) {
t.Errorf("Expected true")
}
}
func TestGetAccessModes(t *testing.T) {
plugMgr := volume.VolumePluginMgr{}<|fim▁hole|> t.Errorf("Can't find the plugin by name")
}
if !contains(plug.GetAccessModes(), api.ReadWriteOnce) || !contains(plug.GetAccessModes(), api.ReadOnlyMany) {
t.Errorf("Expected two AccessModeTypes: %s and %s", api.ReadWriteOnce, api.ReadOnlyMany)
}
}
func contains(modes []api.AccessModeType, mode api.AccessModeType) bool {
for _, m := range modes {
if m == mode {
return true
}
}
return false
}
type fakePDManager struct {
attachCalled bool
detachCalled bool
}
// TODO(jonesdl) To fully test this, we could create a loopback device
// and mount that instead.
func (fake *fakePDManager) AttachAndMountDisk(pd *gcePersistentDisk, globalPDPath string) error {
globalPath := makeGlobalPDName(pd.plugin.host, pd.pdName)
err := os.MkdirAll(globalPath, 0750)
if err != nil {
return err
}
fake.attachCalled = true
// Simulate the global mount so that the fakeMounter returns the
// expected number of mounts for the attached disk.
pd.mounter.Mount(globalPath, globalPath, pd.fsType, nil)
return nil
}
func (fake *fakePDManager) DetachDisk(pd *gcePersistentDisk) error {
globalPath := makeGlobalPDName(pd.plugin.host, pd.pdName)
err := os.RemoveAll(globalPath)
if err != nil {
return err
}
fake.detachCalled = true
return nil
}
func TestPlugin(t *testing.T) {
plugMgr := volume.VolumePluginMgr{}
plugMgr.InitPlugins(ProbeVolumePlugins(), volume.NewFakeVolumeHost("/tmp/fake", nil, nil))
plug, err := plugMgr.FindPluginByName("kubernetes.io/gce-pd")
if err != nil {
t.Errorf("Can't find the plugin by name")
}
spec := &api.Volume{
Name: "vol1",
VolumeSource: api.VolumeSource{
GCEPersistentDisk: &api.GCEPersistentDiskVolumeSource{
PDName: "pd",
FSType: "ext4",
},
},
}
fakeManager := &fakePDManager{}
fakeMounter := &mount.FakeMounter{}
builder, err := plug.(*gcePersistentDiskPlugin).newBuilderInternal(volume.NewSpecFromVolume(spec), types.UID("poduid"), fakeManager, fakeMounter)
if err != nil {
t.Errorf("Failed to make a new Builder: %v", err)
}
if builder == nil {
t.Errorf("Got a nil Builder")
}
path := builder.GetPath()
if path != "/tmp/fake/pods/poduid/volumes/kubernetes.io~gce-pd/vol1" {
t.Errorf("Got unexpected path: %s", path)
}
if err := builder.SetUp(); err != nil {
t.Errorf("Expected success, got: %v", err)
}
if _, err := os.Stat(path); err != nil {
if os.IsNotExist(err) {
t.Errorf("SetUp() failed, volume path not created: %s", path)
} else {
t.Errorf("SetUp() failed: %v", err)
}
}
if _, err := os.Stat(path); err != nil {
if os.IsNotExist(err) {
t.Errorf("SetUp() failed, volume path not created: %s", path)
} else {
t.Errorf("SetUp() failed: %v", err)
}
}
if !fakeManager.attachCalled {
t.Errorf("Attach watch not called")
}
fakeManager = &fakePDManager{}
cleaner, err := plug.(*gcePersistentDiskPlugin).newCleanerInternal("vol1", types.UID("poduid"), fakeManager, fakeMounter)
if err != nil {
t.Errorf("Failed to make a new Cleaner: %v", err)
}
if cleaner == nil {
t.Errorf("Got a nil Cleaner")
}
if err := cleaner.TearDown(); err != nil {
t.Errorf("Expected success, got: %v", err)
}
if _, err := os.Stat(path); err == nil {
t.Errorf("TearDown() failed, volume path still exists: %s", path)
} else if !os.IsNotExist(err) {
t.Errorf("SetUp() failed: %v", err)
}
if !fakeManager.detachCalled {
t.Errorf("Detach watch not called")
}
}
func TestPluginLegacy(t *testing.T) {
plugMgr := volume.VolumePluginMgr{}
plugMgr.InitPlugins(ProbeVolumePlugins(), volume.NewFakeVolumeHost("/tmp/fake", nil, nil))
plug, err := plugMgr.FindPluginByName("gce-pd")
if err != nil {
t.Errorf("Can't find the plugin by name")
}
if plug.Name() != "gce-pd" {
t.Errorf("Wrong name: %s", plug.Name())
}
if plug.CanSupport(&volume.Spec{Name: "foo", VolumeSource: api.VolumeSource{GCEPersistentDisk: &api.GCEPersistentDiskVolumeSource{}}}) {
t.Errorf("Expected false")
}
spec := &api.Volume{VolumeSource: api.VolumeSource{GCEPersistentDisk: &api.GCEPersistentDiskVolumeSource{}}}
if _, err := plug.NewBuilder(volume.NewSpecFromVolume(spec), &api.ObjectReference{UID: types.UID("poduid")}, volume.VolumeOptions{""}, nil); err == nil {
t.Errorf("Expected failiure")
}
cleaner, err := plug.NewCleaner("vol1", types.UID("poduid"), nil)
if err != nil {
t.Errorf("Failed to make a new Cleaner: %v", err)
}
if cleaner == nil {
t.Errorf("Got a nil Cleaner")
}
}<|fim▁end|>
|
plugMgr.InitPlugins(ProbeVolumePlugins(), volume.NewFakeVolumeHost("/tmp/fake", nil, nil))
plug, err := plugMgr.FindPersistentPluginByName("kubernetes.io/gce-pd")
if err != nil {
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
mod diff;
mod iter;
mod link;
mod namecmp;
mod store;
#[cfg(any(test, feature = "for-tests"))]
pub mod testutil;
use std::collections::btree_map::Entry;
use std::collections::BTreeMap;
use std::fmt;
use std::sync::Arc;
use anyhow::Result;
use bytes::Bytes;
use manifest::DiffEntry;
use manifest::DirDiffEntry;
use manifest::Directory;
use manifest::File;
use manifest::FileMetadata;
use manifest::FsNodeMetadata;
use manifest::List;
use manifest::Manifest;
use once_cell::sync::OnceCell;
use pathmatcher::Matcher;
use sha1::Digest;
use sha1::Sha1;
pub use store::Flag;
use storemodel::TreeFormat;
use thiserror::Error;
use types::HgId;
use types::Key;
use types::PathComponent;
use types::PathComponentBuf;
use types::RepoPath;
use types::RepoPathBuf;
pub use self::diff::Diff;
pub(crate) use self::link::Link;
pub use self::store::Entry as TreeEntry;
pub use self::store::TreeStore;
use crate::iter::BfsIter;
use crate::iter::DfsCursor;
use crate::iter::Step;
use crate::link::DirLink;
use crate::link::Durable;
use crate::link::DurableEntry;
use crate::link::Ephemeral;
use crate::link::Leaf;
use crate::store::InnerStore;
/// The Tree implementation of a Manifest dedicates an inner node for each directory in the
/// repository and a leaf for each file.
#[derive(Clone)]
pub struct TreeManifest {
store: InnerStore,
// TODO: root can't be a Leaf
root: Link,
}
#[derive(Error, Debug)]
#[error("failure inserting '{path}' in manifest")]
pub struct InsertError {
pub path: RepoPathBuf,
pub file_metadata: FileMetadata,
pub source: InsertErrorCause,
}
impl InsertError {
pub fn new(path: RepoPathBuf, file_metadata: FileMetadata, source: InsertErrorCause) -> Self {
Self {
path,
file_metadata,
source,
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum InsertErrorCause {
#[error("'{0}' is already a file")]
ParentFileExists(RepoPathBuf),
#[error("file path is already a directory")]
DirectoryExistsForPath,
}
impl TreeManifest {
/// Instantiates a tree manifest that was stored with the specificed `HgId`
pub fn durable(store: Arc<dyn TreeStore + Send + Sync>, hgid: HgId) -> Self {
TreeManifest {
store: InnerStore::new(store),
root: Link::durable(hgid),
}
}
/// Instantiates a new tree manifest with no history
pub fn ephemeral(store: Arc<dyn TreeStore + Send + Sync>) -> Self {
TreeManifest {
store: InnerStore::new(store),
root: Link::ephemeral(),
}
}
fn root_cursor<'a>(&'a self) -> DfsCursor<'a> {
DfsCursor::new(&self.store, RepoPathBuf::new(), &self.root)
}
}
impl Manifest for TreeManifest {
fn get(&self, path: &RepoPath) -> Result<Option<FsNodeMetadata>> {
let result = self.get_link(path)?.map(|link| link.to_fs_node());
Ok(result)
}
fn list(&self, path: &RepoPath) -> Result<List> {
let directory = match self.get_link(path)? {
None => return Ok(List::NotFound),
Some(l) => match l.as_ref() {
Leaf(_) => return Ok(List::File),
Ephemeral(content) => content,
Durable(entry) => entry.materialize_links(&self.store, path)?,
},
};
let directory = directory
.into_iter()
.map(|(key, value)| (key.to_owned(), value.to_fs_node()))
.collect();
Ok(List::Directory(directory))
}
fn insert(&mut self, path: RepoPathBuf, file_metadata: FileMetadata) -> Result<()> {
let mut cursor = &self.root;
let mut must_insert = false;
for (parent, component) in path.parents().zip(path.components()) {
let child = match cursor.as_ref() {
Leaf(_) => Err(InsertError::new(
path.clone(), // TODO: get rid of clone (it is borrowed)
file_metadata,
InsertErrorCause::ParentFileExists(parent.to_owned()),
))?,
Ephemeral(links) => links.get(component),
Durable(ref entry) => {
let links = entry.materialize_links(&self.store, parent)?;
links.get(component)
}
};<|fim▁hole|> break;
}
Some(link) => cursor = link,
}
}
if must_insert == false {
match cursor.as_ref() {
Leaf(existing_metadata) => {
if *existing_metadata == file_metadata {
return Ok(()); // nothing to do
}
}
Ephemeral(_) | Durable(_) => Err(InsertError::new(
path.clone(), // TODO: get rid of clone (it is borrowed later)
file_metadata,
InsertErrorCause::DirectoryExistsForPath,
))?,
}
}
let (path_parent, last_component) = path.split_last_component().unwrap();
let mut cursor = &mut self.root;
// unwrap is fine because root would have been a directory
for (parent, component) in path_parent.parents().zip(path_parent.components()) {
cursor = cursor
.mut_ephemeral_links(&self.store, parent)?
.entry(component.to_owned())
.or_insert_with(|| Link::ephemeral());
}
match cursor
.mut_ephemeral_links(&self.store, path_parent)?
.entry(last_component.to_owned())
{
Entry::Vacant(entry) => {
entry.insert(Link::leaf(file_metadata));
}
Entry::Occupied(mut entry) => {
if let Leaf(ref mut store_ref) = entry.get_mut().as_mut_ref()? {
*store_ref = file_metadata;
} else {
unreachable!("Unexpected directory found while insert.");
}
}
}
Ok(())
}
fn remove(&mut self, path: &RepoPath) -> Result<Option<FileMetadata>> {
// The return value lets us know if there are no more files in the subtree and we should be
// removing it.
fn do_remove<'a, I>(store: &InnerStore, cursor: &mut Link, iter: &mut I) -> Result<bool>
where
I: Iterator<Item = (&'a RepoPath, &'a PathComponent)>,
{
match iter.next() {
None => {
if let Leaf(_) = cursor.as_ref() {
// We reached the file that we want to remove.
Ok(true)
} else {
unreachable!("Unexpected directory found while remove.");
}
}
Some((parent, component)) => {
// TODO: only convert to ephemeral if a removal took place
// We are navigating the tree down following parent directories
let ephemeral_links = cursor.mut_ephemeral_links(&store, parent)?;
// When there is no `component` subtree we behave like the file was removed.
if let Some(link) = ephemeral_links.get_mut(component) {
if do_remove(store, link, iter)? {
// There are no files in the component subtree so we remove it.
ephemeral_links.remove(component);
}
}
Ok(ephemeral_links.is_empty())
}
}
}
if let Some(file_metadata) = self.get_file(path)? {
do_remove(
&self.store,
&mut self.root,
&mut path.parents().zip(path.components()),
)?;
Ok(Some(file_metadata))
} else {
Ok(None)
}
}
/// Write dirty trees using specified format to disk. Return the root tree id.
fn flush(&mut self) -> Result<HgId> {
fn compute_sha1(content: &[u8], format: TreeFormat) -> HgId {
let mut hasher = Sha1::new();
match format {
TreeFormat::Git => hasher.input(format!("tree {}\0", content.len())),
TreeFormat::Hg => {
// XXX: No p1, p2 to produce a genuine SHA1.
// This code path is only meaningful for tests.
assert!(
cfg!(test),
"flush() cannot be used with hg store, consider finalize() instead"
);
}
}
hasher.input(content);
let buf: [u8; HgId::len()] = hasher.result().into();
(&buf).into()
}
fn do_flush<'a, 'b, 'c>(
store: &'a InnerStore,
pathbuf: &'b mut RepoPathBuf,
cursor: &'c mut Link,
format: TreeFormat,
) -> Result<(HgId, store::Flag)> {
loop {
let new_cursor = match cursor.as_mut_ref()? {
Leaf(file_metadata) => {
return Ok((
file_metadata.hgid.clone(),
store::Flag::File(file_metadata.file_type.clone()),
));
}
Durable(entry) => return Ok((entry.hgid.clone(), store::Flag::Directory)),
Ephemeral(links) => {
let iter = links.iter_mut().map(|(component, link)| {
pathbuf.push(component.as_path_component());
let (hgid, flag) = do_flush(store, pathbuf, link, format)?;
pathbuf.pop();
Ok(store::Element::new(
component.to_owned(),
hgid.clone(),
flag,
))
});
let elements: Vec<_> = iter.collect::<Result<Vec<_>>>()?;
let entry = store::Entry::from_elements(elements, format);
let hgid = compute_sha1(entry.as_ref(), format);
store.insert_entry(&pathbuf, hgid, entry)?;
let cell = OnceCell::new();
// TODO: remove clone
cell.set(Ok(links.clone())).unwrap();
let durable_entry = DurableEntry { hgid, links: cell };
Link::new(Durable(Arc::new(durable_entry)))
}
};
*cursor = new_cursor;
}
}
let mut path = RepoPathBuf::new();
let format = self.store.format();
let (hgid, _) = do_flush(&self.store, &mut path, &mut self.root, format)?;
Ok(hgid)
}
fn files<'a, M: 'static + Matcher + Sync + Send>(
&'a self,
matcher: M,
) -> Box<dyn Iterator<Item = Result<File>> + 'a> {
let files = BfsIter::new(&self, matcher).filter_map(|result| match result {
Ok((path, FsNodeMetadata::File(metadata))) => Some(Ok(File::new(path, metadata))),
Ok(_) => None,
Err(e) => Some(Err(e)),
});
Box::new(files)
}
/// Returns an iterator over all the directories that are present in the
/// tree.
///
/// Note: the matcher should be a prefix matcher, other kinds of matchers
/// could be less effective than expected.
fn dirs<'a, M: 'static + Matcher + Sync + Send>(
&'a self,
matcher: M,
) -> Box<dyn Iterator<Item = Result<Directory>> + 'a> {
let dirs = BfsIter::new(&self, matcher).filter_map(|result| match result {
Ok((path, FsNodeMetadata::Directory(metadata))) => {
Some(Ok(Directory::new(path, metadata)))
}
Ok(_) => None,
Err(e) => Some(Err(e)),
});
Box::new(dirs)
}
fn diff<'a, M: Matcher>(
&'a self,
other: &'a Self,
matcher: &'a M,
) -> Result<Box<dyn Iterator<Item = Result<DiffEntry>> + 'a>> {
Ok(Box::new(Diff::new(self, other, matcher)?))
}
fn modified_dirs<'a, M: Matcher>(
&'a self,
other: &'a Self,
matcher: &'a M,
) -> Result<Box<dyn Iterator<Item = Result<DirDiffEntry>> + 'a>> {
Ok(Box::new(Diff::new(self, other, matcher)?.modified_dirs()))
}
}
impl fmt::Debug for TreeManifest {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn write_indent(f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result {
write!(f, "{}", str::repeat("| ", indent))?;
Ok(())
}
fn write_children(
f: &mut fmt::Formatter<'_>,
children: &BTreeMap<PathComponentBuf, Link>,
indent: usize,
) -> fmt::Result {
for (component, link) in children {
write_indent(f, indent)?;
write!(f, "{} ", component)?;
write_links(f, link, indent + 1)?;
}
Ok(())
}
fn write_links(f: &mut fmt::Formatter<'_>, link: &Link, indent: usize) -> fmt::Result {
match link.as_ref() {
Leaf(metadata) => {
write!(f, "(File, {}, {:?})\n", metadata.hgid, metadata.file_type)
}
Ephemeral(children) => {
write!(f, "(Ephemeral)\n")?;
write_children(f, children, indent)
}
Durable(entry) => {
write!(f, "(Durable, {})\n", entry.hgid)?;
match entry.links.get() {
None => Ok(()),
Some(Err(fallible)) => {
write_indent(f, indent)?;
write!(f, "failed to load: {:?}", fallible)
}
Some(Ok(children)) => write_children(f, children, indent),
}
}
}
}
write!(f, "Root ")?;
write_links(f, &self.root, 1)
}
}
impl TreeManifest {
/// Produces new trees to write in hg format (path, id, text, p1, p2).
/// Does not write to the tree store directly.
pub fn finalize(
&mut self,
parent_trees: Vec<&TreeManifest>,
) -> Result<impl Iterator<Item = (RepoPathBuf, HgId, Bytes, HgId, HgId)>> {
fn compute_hgid<C: AsRef<[u8]>>(parent_tree_nodes: &[HgId], content: C) -> HgId {
let mut hasher = Sha1::new();
debug_assert!(parent_tree_nodes.len() <= 2);
let p1 = parent_tree_nodes.get(0).unwrap_or(HgId::null_id());
let p2 = parent_tree_nodes.get(1).unwrap_or(HgId::null_id());
// Even if parents are sorted two hashes go into hash computation but surprise
// the NULL_ID is not a special case in this case and gets sorted.
if p1 < p2 {
hasher.input(p1.as_ref());
hasher.input(p2.as_ref());
} else {
hasher.input(p2.as_ref());
hasher.input(p1.as_ref());
}
hasher.input(content.as_ref());
let buf: [u8; HgId::len()] = hasher.result().into();
(&buf).into()
}
struct Executor<'a> {
store: &'a InnerStore,
path: RepoPathBuf,
converted_nodes: Vec<(RepoPathBuf, HgId, Bytes, HgId, HgId)>,
parent_trees: Vec<DfsCursor<'a>>,
}
impl<'a> Executor<'a> {
fn new(
store: &'a InnerStore,
parent_trees: &[&'a TreeManifest],
) -> Result<Executor<'a>> {
let mut executor = Executor {
store,
path: RepoPathBuf::new(),
converted_nodes: Vec::new(),
parent_trees: parent_trees.iter().map(|v| v.root_cursor()).collect(),
};
// The first node after step is the root directory. `work()` expects cursors to
// be pointing to the underlying link.
for cursor in executor.parent_trees.iter_mut() {
match cursor.step() {
Step::Success | Step::End => {}
Step::Err(err) => return Err(err),
}
}
Ok(executor)
}
fn active_parent_tree_nodes(&self, active_parents: &[usize]) -> Result<Vec<HgId>> {
let mut parent_nodes = Vec::with_capacity(active_parents.len());
for id in active_parents {
let cursor = &self.parent_trees[*id];
let hgid = match cursor.link().as_ref() {
Leaf(_) | Ephemeral(_) => unreachable!(),
Durable(entry) => entry.hgid,
};
parent_nodes.push(hgid);
}
Ok(parent_nodes)
}
fn advance_parents(&mut self, active_parents: &[usize]) -> Result<()> {
for id in active_parents {
let cursor = &mut self.parent_trees[*id];
match cursor.step() {
Step::Success | Step::End => {}
Step::Err(err) => return Err(err),
}
}
Ok(())
}
fn parent_trees_for_subdirectory(
&mut self,
active_parents: &[usize],
) -> Result<Vec<usize>> {
let mut result = Vec::new();
for id in active_parents.iter() {
let cursor = &mut self.parent_trees[*id];
while !cursor.finished() && cursor.path() < self.path.as_repo_path() {
cursor.skip_subtree();
match cursor.step() {
Step::Success | Step::End => {}
Step::Err(err) => return Err(err),
}
}
if !cursor.finished() && cursor.path() == self.path.as_repo_path() {
match cursor.link().as_ref() {
Leaf(_) => {} // files and directories don't share history
Durable(_) => result.push(*id),
Ephemeral(_) => {
panic!("Found ephemeral parent when finalizing manifest.")
}
}
}
}
Ok(result)
}
fn work(
&mut self,
link: &mut Link,
active_parents: Vec<usize>,
) -> Result<(HgId, store::Flag)> {
let parent_tree_nodes = self.active_parent_tree_nodes(&active_parents)?;
if let Durable(entry) = link.as_ref() {
if parent_tree_nodes.contains(&entry.hgid) {
return Ok((entry.hgid, store::Flag::Directory));
}
}
self.advance_parents(&active_parents)?;
if let Leaf(file_metadata) = link.as_ref() {
return Ok((
file_metadata.hgid,
store::Flag::File(file_metadata.file_type.clone()),
));
}
// TODO: This code is also used on durable nodes for the purpose of generating
// a list of entries to insert in the local store. For those cases we don't
// need to convert to Ephemeral instead only verify the hash.
let links = link.mut_ephemeral_links(self.store, &self.path)?;
// finalize() is only used for hg format.
let format = TreeFormat::Hg;
let mut entry = store::EntryMut::new(format);
for (component, link) in links.iter_mut() {
self.path.push(component.as_path_component());
let child_parents = self.parent_trees_for_subdirectory(&active_parents)?;
let (hgid, flag) = self.work(link, child_parents)?;
self.path.pop();
let element = store::Element::new(component.clone(), hgid, flag);
entry.add_element_hg(element);
}
let entry = entry.freeze();
let hgid = compute_hgid(&parent_tree_nodes, &entry);
let cell = OnceCell::new();
// TODO: remove clone
cell.set(Ok(links.clone())).unwrap();
let durable_entry = DurableEntry { hgid, links: cell };
let inner = Arc::new(durable_entry);
*link = Link::new(Durable(inner));
let parent_hgid = |id| *parent_tree_nodes.get(id).unwrap_or(HgId::null_id());
self.converted_nodes.push((
self.path.clone(),
hgid,
entry.to_bytes(),
parent_hgid(0),
parent_hgid(1),
));
Ok((hgid, store::Flag::Directory))
}
}
assert_eq!(
self.store.format(),
TreeFormat::Hg,
"finalize() can only be used for hg store, use flush() instead"
);
let mut executor = Executor::new(&self.store, &parent_trees)?;
executor.work(&mut self.root, (0..parent_trees.len()).collect())?;
Ok(executor.converted_nodes.into_iter())
}
fn get_link(&self, path: &RepoPath) -> Result<Option<&Link>> {
let mut cursor = &self.root;
for (parent, component) in path.parents().zip(path.components()) {
let child = match cursor.as_ref() {
Leaf(_) => return Ok(None),
Ephemeral(links) => links.get(component),
Durable(ref entry) => {
let links = entry.materialize_links(&self.store, parent)?;
links.get(component)
}
};
match child {
None => return Ok(None),
Some(link) => cursor = link,
}
}
Ok(Some(cursor))
}
}
/// The purpose of this function is to provide compatible behavior with the C++ implementation
/// of the treemanifest. This function is problematic because it goes through abstraction
/// boundaries and is built with the assumption that the storage format is the same as the
/// in memory format that is the same as the wire format.
///
/// This function returns the nodes that need to be sent over the wire for a subtree of the
/// manifest to be fully hydrated. The subtree is represented by `path` and `hgid`. The data
/// that is present locally by the client is represented by `other_nodes`.
///
/// It is undefined what this function will do when called with a path that points to a file
/// or with nodes that don't make sense.
// NOTE: The implementation is currently custom. Consider converting the code to use Cursor.
// The suggestion received in code review was also to consider making the return type more
// simple (RepoPath, HgId) and letting the call sites deal with the Bytes.
pub fn compat_subtree_diff(
store: Arc<dyn TreeStore + Send + Sync>,
path: &RepoPath,
hgid: HgId,
other_nodes: Vec<HgId>,
depth: i32,
) -> Result<Vec<(RepoPathBuf, HgId, Vec<HgId>, Bytes)>> {
struct State {
store: InnerStore,
path: RepoPathBuf,
result: Vec<(RepoPathBuf, HgId, Vec<HgId>, Bytes)>,
depth_remaining: i32,
}
impl State {
fn work(&mut self, hgid: HgId, other_nodes: Vec<HgId>) -> Result<()> {
let entry = self.store.get_entry(&self.path, hgid)?;
if self.depth_remaining > 0 {
// TODO: optimize "other_nodes" construction
// We use BTreeMap for convenience only, it is more efficient to use an array since
// the entries are already sorted.
let mut others_map = BTreeMap::new();
for other_hgid in other_nodes.clone() {
let other_entry = self.store.get_entry(&self.path, other_hgid)?;
for other_element_result in other_entry.elements() {
let other_element = other_element_result?;
if other_element.flag == store::Flag::Directory {
others_map
.entry(other_element.component)
.or_insert(vec![])
.push(other_element.hgid);
}
}
}
for element_result in entry.elements() {
let element = element_result?;
if element.flag != store::Flag::Directory {
continue;
}
let mut others = others_map
.remove(&element.component)
.unwrap_or_else(|| vec![]);
if others.contains(&element.hgid) {
continue;
}
others.dedup();
self.path.push(element.component.as_ref());
self.depth_remaining -= 1;
self.work(element.hgid, others)?;
self.depth_remaining += 1;
self.path.pop();
}
}
// NOTE: order in the result set matters for a lot of the integration tests
self.result
.push((self.path.clone(), hgid, other_nodes, entry.to_bytes()));
Ok(())
}
}
if other_nodes.contains(&hgid) {
return Ok(vec![]);
}
let mut state = State {
store: InnerStore::new(store),
path: path.to_owned(),
result: vec![],
depth_remaining: depth - 1,
};
state.work(hgid, other_nodes)?;
Ok(state.result)
}
/// Recursively prefetch the entire subtree under the given Key up to the given depth.
////
/// This serves as a client-driven alternative to the `gettreepack` wire protocol
/// command (wherein the server determines which missing tree nodes to send).
///
/// Determining which missing nodes to fetch on the client side, as this function does,
/// may be faster in some cases since any nodes that are already present on the client
/// will be by definition fast to access, whereas the server would effectively be forced
/// to fetch the desired tree and the base tree from its underlying datastore. This comes
/// at the expense of an increased number of network roundtrips to the server (specifically,
/// O(depth) requests will be sent serially), which may be problematic if there is high
/// network latency between the server and client. As such, this function's performance
/// relative to `gettreepack` is highly dependent on the situation in question.
pub fn prefetch(
store: Arc<dyn TreeStore + Send + Sync>,
key: Key,
mut depth: Option<usize>,
) -> Result<()> {
let tree = TreeManifest::durable(store, key.hgid);
let mut dirs = vec![DirLink::from_link(&tree.root, key.path).unwrap()];
while !dirs.is_empty() {
let keys = dirs.iter().filter_map(|d| d.key()).collect::<Vec<_>>();
if !keys.is_empty() {
// Note that the prefetch() function is expected to filter out
// keys that are already present in the client's cache.
tree.store.prefetch(keys)?;
}
dirs = dirs
.into_iter()
.map(|d| Ok(d.list(&tree.store)?.1))
.collect::<Result<Vec<_>>>()?
.into_iter()
.flatten()
.collect();
depth = match depth {
Some(0) => break,
Some(d) => Some(d - 1),
None => None,
};
}
Ok(())
}
#[cfg(test)]
mod tests {
use manifest::testutil::*;
use manifest::FileType;
use store::Element;
use types::hgid::NULL_ID;
use types::testutil::*;
use self::testutil::*;
use super::*;
impl store::Entry {
fn from_elements_hg(elements: Vec<Element>) -> Self {
Self::from_elements(elements, TreeFormat::Hg)
}
}
fn store_element(path: &str, hex: &str, flag: store::Flag) -> store::Element {
store::Element::new(path_component_buf(path), hgid(hex), flag)
}
fn get_hgid(tree: &TreeManifest, path: &RepoPath) -> HgId {
match tree.get_link(path).unwrap().unwrap().as_ref() {
Leaf(file_metadata) => file_metadata.hgid,
Durable(ref entry) => entry.hgid,
Ephemeral(_) => {
panic!("Asked for hgid on path {} but found ephemeral hgid.", path)
}
}
}
#[test]
fn test_insert() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("foo/bar"), make_meta("10"))
.unwrap();
assert_eq!(
tree.get_file(repo_path("foo/bar")).unwrap(),
Some(make_meta("10"))
);
assert_eq!(tree.get_file(repo_path("baz")).unwrap(), None);
tree.insert(repo_path_buf("baz"), make_meta("20")).unwrap();
assert_eq!(
tree.get_file(repo_path("foo/bar")).unwrap(),
Some(make_meta("10"))
);
assert_eq!(
tree.get_file(repo_path("baz")).unwrap(),
Some(make_meta("20"))
);
tree.insert(repo_path_buf("foo/bat"), make_meta("30"))
.unwrap();
assert_eq!(
tree.get_file(repo_path("foo/bat")).unwrap(),
Some(make_meta("30"))
);
assert_eq!(
tree.get_file(repo_path("foo/bar")).unwrap(),
Some(make_meta("10"))
);
assert_eq!(
tree.get_file(repo_path("baz")).unwrap(),
Some(make_meta("20"))
);
assert_eq!(
tree.insert(repo_path_buf("foo/bar/error"), make_meta("40"))
.unwrap_err()
.chain()
.map(|e| format!("{}", e))
.collect::<Vec<_>>(),
vec![
"failure inserting 'foo/bar/error' in manifest",
"\'foo/bar\' is already a file",
],
);
assert_eq!(
tree.insert(repo_path_buf("foo"), make_meta("50"))
.unwrap_err()
.chain()
.map(|e| format!("{}", e))
.collect::<Vec<_>>(),
vec![
"failure inserting 'foo' in manifest",
"file path is already a directory",
],
);
}
#[test]
fn test_durable_link() {
let store = TestStore::new();
let root_entry = store::Entry::from_elements_hg(vec![
store_element("foo", "10", store::Flag::Directory),
store_element("baz", "20", store::Flag::File(FileType::Regular)),
]);
store
.insert(RepoPath::empty(), hgid("1"), root_entry.to_bytes())
.unwrap();
let foo_entry = store::Entry::from_elements_hg(vec![store_element(
"bar",
"11",
store::Flag::File(FileType::Regular),
)]);
store
.insert(repo_path("foo"), hgid("10"), foo_entry.to_bytes())
.unwrap();
let mut tree = TreeManifest::durable(Arc::new(store), hgid("1"));
assert_eq!(
tree.get_file(repo_path("foo/bar")).unwrap(),
Some(make_meta("11"))
);
assert_eq!(
tree.get_file(repo_path("baz")).unwrap(),
Some(make_meta("20"))
);
tree.insert(repo_path_buf("foo/bat"), make_meta("12"))
.unwrap();
assert_eq!(
tree.get_file(repo_path("foo/bat")).unwrap(),
Some(make_meta("12"))
);
assert_eq!(
tree.get_file(repo_path("foo/bar")).unwrap(),
Some(make_meta("11"))
);
assert_eq!(
tree.get_file(repo_path("baz")).unwrap(),
Some(make_meta("20"))
);
}
#[test]
fn test_insert_into_directory() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("foo/bar/baz"), make_meta("10"))
.unwrap();
assert!(
tree.insert(repo_path_buf("foo/bar"), make_meta("20"))
.is_err()
);
assert!(tree.insert(repo_path_buf("foo"), make_meta("30")).is_err());
}
#[test]
fn test_insert_with_file_parent() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("foo"), make_meta("10")).unwrap();
assert!(
tree.insert(repo_path_buf("foo/bar"), make_meta("20"))
.is_err()
);
assert!(
tree.insert(repo_path_buf("foo/bar/baz"), make_meta("30"))
.is_err()
);
}
#[test]
fn test_get_from_directory() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("foo/bar/baz"), make_meta("10"))
.unwrap();
assert_eq!(
tree.get(repo_path("foo/bar")).unwrap(),
Some(FsNodeMetadata::Directory(None))
);
assert_eq!(
tree.get(repo_path("foo")).unwrap(),
Some(FsNodeMetadata::Directory(None))
);
}
#[test]
fn test_get_with_file_parent() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("foo"), make_meta("10")).unwrap();
assert_eq!(tree.get(repo_path("foo/bar")).unwrap(), None);
assert_eq!(tree.get(repo_path("foo/bar/baz")).unwrap(), None);
}
#[test]
fn test_remove_from_ephemeral() {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
tree.insert(repo_path_buf("a1/b2"), make_meta("20"))
.unwrap();
tree.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
assert_eq!(tree.remove(repo_path("a1")).unwrap(), None);
assert_eq!(tree.remove(repo_path("a1/b1")).unwrap(), None);
assert_eq!(tree.remove(repo_path("a1/b1/c1/d1/e1")).unwrap(), None);
assert_eq!(
tree.remove(repo_path("a1/b1/c1/d1")).unwrap(),
Some(make_meta("10"))
);
assert_eq!(tree.remove(repo_path("a3")).unwrap(), None);
assert_eq!(tree.remove(repo_path("a1/b3")).unwrap(), None);
assert_eq!(tree.remove(repo_path("a1/b1/c1/d2")).unwrap(), None);
assert_eq!(tree.remove(repo_path("a1/b1/c1/d1/e1")).unwrap(), None);
assert_eq!(tree.remove(RepoPath::empty()).unwrap(), None);
assert_eq!(tree.get(repo_path("a1/b1/c1/d1")).unwrap(), None);
assert_eq!(tree.get(repo_path("a1/b1/c1")).unwrap(), None);
assert_eq!(
tree.get(repo_path("a1/b2")).unwrap(),
Some(FsNodeMetadata::File(make_meta("20")))
);
assert_eq!(
tree.remove(repo_path("a1/b2")).unwrap(),
Some(make_meta("20"))
);
assert_eq!(tree.get(repo_path("a1")).unwrap(), None);
assert_eq!(
tree.get(repo_path("a2/b2/c2")).unwrap(),
Some(FsNodeMetadata::File(make_meta("30")))
);
assert_eq!(
tree.remove(repo_path("a2/b2/c2")).unwrap(),
Some(make_meta("30"))
);
assert_eq!(tree.get(repo_path("a2")).unwrap(), None);
assert_eq!(
tree.get(RepoPath::empty()).unwrap(),
Some(FsNodeMetadata::Directory(None))
);
}
#[test]
fn test_remove_from_durable() {
let store = TestStore::new();
let root_entry = store::Entry::from_elements_hg(vec![
store_element("a1", "10", store::Flag::Directory),
store_element("a2", "20", store::Flag::File(FileType::Regular)),
]);
let tree_hgid = hgid("1");
store
.insert(RepoPath::empty(), tree_hgid, root_entry.to_bytes())
.unwrap();
let a1_entry = store::Entry::from_elements_hg(vec![
store_element("b1", "11", store::Flag::File(FileType::Regular)),
store_element("b2", "12", store::Flag::File(FileType::Regular)),
]);
store
.insert(repo_path("a1"), hgid("10"), a1_entry.to_bytes())
.unwrap();
let mut tree = TreeManifest::durable(Arc::new(store), tree_hgid);
assert_eq!(
tree.get(RepoPath::empty()).unwrap(),
Some(FsNodeMetadata::Directory(Some(tree_hgid)))
);
assert_eq!(tree.remove(repo_path("a1")).unwrap(), None);
assert_eq!(
tree.remove(repo_path("a1/b1")).unwrap(),
Some(make_meta("11"))
);
assert_eq!(tree.get(repo_path("a1/b1")).unwrap(), None);
assert_eq!(
tree.get(repo_path("a1/b2")).unwrap(),
Some(FsNodeMetadata::File(make_meta("12")))
);
assert_eq!(
tree.remove(repo_path("a1/b2")).unwrap(),
Some(make_meta("12"))
);
assert_eq!(tree.get(repo_path("a1/b2")).unwrap(), None);
assert_eq!(tree.get(repo_path("a1")).unwrap(), None);
assert_eq!(tree.get_link(repo_path("a1")).unwrap(), None);
assert_eq!(
tree.get(repo_path("a2")).unwrap(),
Some(FsNodeMetadata::File(make_meta("20")))
);
assert_eq!(tree.remove(repo_path("a2")).unwrap(), Some(make_meta("20")));
assert_eq!(tree.get(repo_path("a2")).unwrap(), None);
assert_eq!(
tree.get(RepoPath::empty()).unwrap(),
Some(FsNodeMetadata::Directory(None))
);
}
#[test]
fn test_flush() {
let store = Arc::new(TestStore::new());
let mut tree = TreeManifest::ephemeral(store.clone());
tree.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
tree.insert(repo_path_buf("a1/b2"), make_meta("20"))
.unwrap();
tree.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
let hgid = tree.flush().unwrap();
let tree = TreeManifest::durable(store.clone(), hgid);
assert_eq!(
tree.get_file(repo_path("a1/b1/c1/d1")).unwrap(),
Some(make_meta("10"))
);
assert_eq!(
tree.get_file(repo_path("a1/b2")).unwrap(),
Some(make_meta("20"))
);
assert_eq!(
tree.get_file(repo_path("a2/b2/c2")).unwrap(),
Some(make_meta("30"))
);
assert_eq!(tree.get(repo_path("a2/b1")).unwrap(), None);
}
#[test]
fn test_finalize_with_zero_and_one_parents() {
let store = Arc::new(TestStore::new());
let mut tree = TreeManifest::ephemeral(store.clone());
tree.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
tree.insert(repo_path_buf("a1/b2"), make_meta("20"))
.unwrap();
tree.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
let tree_changed: Vec<_> = tree.finalize(vec![]).unwrap().collect();
assert_eq!(tree_changed.len(), 6);
assert_eq!(tree_changed[0].0, repo_path_buf("a1/b1/c1"));
assert_eq!(tree_changed[1].0, repo_path_buf("a1/b1"));
assert_eq!(tree_changed[2].0, repo_path_buf("a1"));
assert_eq!(tree_changed[3].0, repo_path_buf("a2/b2"));
assert_eq!(tree_changed[4].0, repo_path_buf("a2"));
assert_eq!(tree_changed[5].0, RepoPathBuf::new());
// we should write before we can update
// depends on the implementation but it is valid for finalize to query the store
// for the values returned in the previous finalize call
use bytes::Bytes;
for (path, hgid, raw, _, _) in tree_changed.iter() {
store
.insert(&path, *hgid, Bytes::copy_from_slice(&raw[..]))
.unwrap();
}
let mut update = tree.clone();
update
.insert(repo_path_buf("a1/b2"), make_meta("40"))
.unwrap();
update.remove(repo_path("a2/b2/c2")).unwrap();
update
.insert(repo_path_buf("a3/b1"), make_meta("50"))
.unwrap();
let update_changed: Vec<_> = update.finalize(vec![&tree]).unwrap().collect();
assert_eq!(update_changed[0].0, repo_path_buf("a1"));
assert_eq!(update_changed[0].3, tree_changed[2].1);
assert_eq!(update_changed[0].4, NULL_ID);
assert_eq!(update_changed[1].0, repo_path_buf("a3"));
assert_eq!(update_changed[1].3, NULL_ID);
assert_eq!(update_changed[1].4, NULL_ID);
assert_eq!(update_changed[2].0, RepoPathBuf::new());
assert_eq!(update_changed[2].3, tree_changed[5].1);
assert_eq!(update_changed[2].4, NULL_ID);
}
#[test]
fn test_finalize_merge() {
let store = Arc::new(TestStore::new());
let mut p1 = TreeManifest::ephemeral(store.clone());
p1.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
p1.insert(repo_path_buf("a1/b2"), make_meta("20")).unwrap();
p1.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
let _p1_changed = p1.finalize(vec![]).unwrap();
let mut p2 = TreeManifest::ephemeral(store.clone());
p2.insert(repo_path_buf("a1/b2"), make_meta("40")).unwrap();
p2.insert(repo_path_buf("a3/b1"), make_meta("50")).unwrap();
let _p2_changed = p2.finalize(vec![]).unwrap();
let mut tree = p1.clone();
tree.insert(repo_path_buf("a1/b2"), make_meta("40"))
.unwrap();
tree.insert(repo_path_buf("a2/b2/c2"), make_meta("60"))
.unwrap();
tree.insert(repo_path_buf("a3/b1"), make_meta("50"))
.unwrap();
let tree_changed: Vec<_> = tree.finalize(vec![&p1, &p2]).unwrap().collect();
assert_eq!(tree_changed[0].0, repo_path_buf("a1"));
assert_eq!(tree_changed[0].3, get_hgid(&p1, repo_path("a1")));
assert_eq!(tree_changed[0].4, get_hgid(&p2, repo_path("a1")));
assert_eq!(tree_changed[1].0, repo_path_buf("a2/b2"));
assert_eq!(tree_changed[1].3, get_hgid(&p1, repo_path("a2/b2")));
assert_eq!(tree_changed[1].4, NULL_ID);
assert_eq!(tree_changed[2].0, repo_path_buf("a2"));
assert_eq!(tree_changed[3].0, repo_path_buf("a3"));
assert_eq!(tree_changed[3].3, get_hgid(&p2, repo_path("a3")));
assert_eq!(tree_changed[3].4, NULL_ID);
assert_eq!(tree_changed[4].0, RepoPathBuf::new());
assert_eq!(
vec![tree_changed[4].3, tree_changed[4].4],
vec![
get_hgid(&p1, RepoPath::empty()),
get_hgid(&p2, RepoPath::empty()),
]
);
}
#[test]
fn test_finalize_file_to_directory() {
let store = Arc::new(TestStore::new());
let mut tree1 = TreeManifest::ephemeral(store.clone());
tree1.insert(repo_path_buf("a1"), make_meta("10")).unwrap();
let tree1_changed: Vec<_> = tree1.finalize(vec![]).unwrap().collect();
assert_eq!(tree1_changed[0].0, RepoPathBuf::new());
assert_eq!(tree1_changed[0].3, NULL_ID);
let mut tree2 = TreeManifest::ephemeral(store.clone());
tree2
.insert(repo_path_buf("a1/b1"), make_meta("20"))
.unwrap();
let tree2_changed: Vec<_> = tree2.finalize(vec![&tree1]).unwrap().collect();
assert_eq!(tree2_changed[0].0, repo_path_buf("a1"));
assert_eq!(tree2_changed[0].3, NULL_ID);
assert_eq!(tree2_changed[1].0, RepoPathBuf::new());
assert_eq!(tree2_changed[1].3, tree1_changed[0].1);
assert_eq!(tree2_changed[1].4, NULL_ID);
let mut tree3 = TreeManifest::ephemeral(store.clone());
tree3.insert(repo_path_buf("a1"), make_meta("30")).unwrap();
let tree3_changed: Vec<_> = tree3.finalize(vec![&tree2]).unwrap().collect();
assert_eq!(tree3_changed[0].0, RepoPathBuf::new());
assert_eq!(tree3_changed[0].3, tree2_changed[1].1);
assert_eq!(tree3_changed[0].4, NULL_ID);
}
#[test]
fn test_finalize_on_durable() {
let store = Arc::new(TestStore::new());
let mut tree1 = TreeManifest::ephemeral(store.clone());
tree1
.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
tree1
.insert(repo_path_buf("a1/b2"), make_meta("20"))
.unwrap();
tree1
.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
let _tree1_changed = tree1.finalize(vec![]).unwrap();
let mut tree2 = tree1.clone();
tree2
.insert(repo_path_buf("a1/b2"), make_meta("40"))
.unwrap();
tree2
.insert(repo_path_buf("a2/b2/c2"), make_meta("60"))
.unwrap();
tree2
.insert(repo_path_buf("a3/b1"), make_meta("50"))
.unwrap();
let tree_changed: Vec<_> = tree2.finalize(vec![&tree1]).unwrap().collect();
assert_eq!(
tree2.finalize(vec![&tree1]).unwrap().collect::<Vec<_>>(),
tree_changed,
);
}
#[test]
fn test_finalize_materialization() {
let store = Arc::new(TestStore::new());
let entry_1 = store::Entry::from_elements_hg(vec![
store_element("foo", "10", store::Flag::Directory),
store_element("baz", "20", store::Flag::File(FileType::Regular)),
]);
store
.insert(RepoPath::empty(), hgid("1"), entry_1.to_bytes())
.unwrap();
let parent = TreeManifest::durable(store.clone(), hgid("1"));
let entry_2 = store::Entry::from_elements_hg(vec![
store_element("foo", "10", store::Flag::Directory),
store_element("baz", "21", store::Flag::File(FileType::Regular)),
]);
store
.insert(RepoPath::empty(), hgid("2"), entry_2.to_bytes())
.unwrap();
let mut tree = TreeManifest::durable(store.clone(), hgid("2"));
let _changes: Vec<_> = tree.finalize(vec![&parent]).unwrap().collect();
// expecting the code to not panic
// the panic would be caused by materializing link (foo, 10) which
// doesn't have a store entry
}
#[test]
fn test_cursor_skip_on_root() {
let tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
let mut cursor = tree.root_cursor();
cursor.skip_subtree();
match cursor.step() {
Step::Success => panic!("should have reached the end of the tree"),
Step::End => {} // success
Step::Err(error) => panic!("{}", error),
}
}
#[test]
fn test_cursor_skip() {
fn step<'a>(cursor: &mut DfsCursor<'a>) {
match cursor.step() {
Step::Success => {}
Step::End => panic!("reached the end too soon"),
Step::Err(error) => panic!("{}", error),
}
}
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new()));
tree.insert(repo_path_buf("a1"), make_meta("10")).unwrap();
tree.insert(repo_path_buf("a2/b2"), make_meta("20"))
.unwrap();
tree.insert(repo_path_buf("a3"), make_meta("30")).unwrap();
let mut cursor = tree.root_cursor();
step(&mut cursor);
assert_eq!(cursor.path(), RepoPath::empty());
step(&mut cursor);
assert_eq!(cursor.path(), RepoPath::from_str("a1").unwrap());
// Skip leaf
cursor.skip_subtree();
step(&mut cursor);
assert_eq!(cursor.path(), RepoPath::from_str("a2").unwrap());
// Skip directory
cursor.skip_subtree();
step(&mut cursor);
assert_eq!(cursor.path(), RepoPath::from_str("a3").unwrap());
// Skip on the element before State::End
cursor.skip_subtree();
match cursor.step() {
Step::Success => panic!("should have reached the end of the tree"),
Step::End => {} // success
Step::Err(error) => panic!("{}", error),
}
}
#[test]
fn test_debug() {
use std::fmt::Write;
let store = Arc::new(TestStore::new());
let mut tree = TreeManifest::ephemeral(store.clone());
tree.insert(repo_path_buf("a1/b1/c1/d1"), make_meta("10"))
.unwrap();
let _hgid = tree.flush().unwrap();
tree.insert(repo_path_buf("a1/b2"), make_meta("20"))
.unwrap();
tree.insert(repo_path_buf("a2/b2/c2"), make_meta("30"))
.unwrap();
let mut output = String::new();
write!(output, "{:?}", tree).unwrap();
assert_eq!(
output,
"Root (Ephemeral)\n\
| a1 (Ephemeral)\n\
| | b1 (Durable, 4f75b40350c5a77ea27d3287b371016e2d940bab)\n\
| | | c1 (Durable, 4495bc0cc4093ed880fe1eb1489635f3cddcf04d)\n\
| | | | d1 (File, 0000000000000000000000000000000000000010, Regular)\n\
| | b2 (File, 0000000000000000000000000000000000000020, Regular)\n\
| a2 (Ephemeral)\n\
| | b2 (Ephemeral)\n\
| | | c2 (File, 0000000000000000000000000000000000000030, Regular)\n\
"
);
}
#[test]
fn test_compat_subtree_diff() {
let store = Arc::new(TestStore::new());
// add ("", 1), ("foo", 11), ("baz", 21), ("foo/bar", 111)
let root_1_entry = store::Entry::from_elements_hg(vec![
store_element("foo", "11", store::Flag::Directory),
store_element("baz", "21", store::Flag::File(FileType::Regular)),
]);
store
.insert(
RepoPath::empty(),
hgid("1"),
root_1_entry.clone().to_bytes(),
)
.unwrap();
let foo_11_entry = store::Entry::from_elements_hg(vec![store_element(
"bar",
"111",
store::Flag::File(FileType::Regular),
)]);
store
.insert(
repo_path("foo"),
hgid("11"),
foo_11_entry.clone().to_bytes(),
)
.unwrap();
// add ("", 2), ("foo", 12), ("baz", 21), ("foo/bar", 112)
let root_2_entry = store::Entry::from_elements_hg(vec![
store_element("foo", "12", store::Flag::Directory),
store_element("baz", "21", store::Flag::File(FileType::Regular)),
]);
store
.insert(RepoPath::empty(), hgid("2"), root_2_entry.to_bytes())
.unwrap();
let foo_12_entry = store::Entry::from_elements_hg(vec![store_element(
"bar",
"112",
store::Flag::File(FileType::Regular),
)]);
store
.insert(repo_path("foo"), hgid("12"), foo_12_entry.to_bytes())
.unwrap();
assert_eq!(
compat_subtree_diff(
store.clone(),
RepoPath::empty(),
hgid("1"),
vec![hgid("2")],
3
)
.unwrap(),
vec![
(
repo_path_buf("foo"),
hgid("11"),
vec![hgid("12")],
foo_11_entry.clone().to_bytes()
),
(
RepoPathBuf::new(),
hgid("1"),
vec![hgid("2")],
root_1_entry.clone().to_bytes()
),
]
);
assert_eq!(
compat_subtree_diff(
store.clone(),
RepoPath::empty(),
hgid("1"),
vec![hgid("2")],
1
)
.unwrap(),
vec![(
RepoPathBuf::new(),
hgid("1"),
vec![hgid("2")],
root_1_entry.clone().to_bytes()
),]
);
assert_eq!(
compat_subtree_diff(
store.clone(),
repo_path("foo"),
hgid("11"),
vec![hgid("12")],
3
)
.unwrap(),
vec![(
repo_path_buf("foo"),
hgid("11"),
vec![hgid("12")],
foo_11_entry.clone().to_bytes()
),]
);
assert_eq!(
compat_subtree_diff(
store.clone(),
RepoPath::empty(),
hgid("1"),
vec![hgid("1")],
3
)
.unwrap(),
vec![]
);
assert_eq!(
compat_subtree_diff(
store.clone(),
repo_path("foo"),
hgid("11"),
vec![hgid("11")],
3
)
.unwrap(),
vec![]
);
// it is illegal to call compat_subtree_diff with "baz" but we can't validate for it
}
#[test]
fn test_compat_subtree_diff_file_to_directory() {
let store = Arc::new(TestStore::new());
// add ("", 1), ("foo", 11)
let root_1_entry = store::Entry::from_elements_hg(vec![store_element(
"foo",
"11",
store::Flag::File(FileType::Regular),
)]);
store
.insert(
RepoPath::empty(),
hgid("1"),
root_1_entry.clone().to_bytes(),
)
.unwrap();
// add ("", 2), ("foo", 12), ("foo/bar", 121)
let root_2_entry = store::Entry::from_elements_hg(vec![store_element(
"foo",
"12",
store::Flag::Directory,
)]);
store
.insert(
RepoPath::empty(),
hgid("2"),
root_2_entry.clone().to_bytes(),
)
.unwrap();
let foo_12_entry = store::Entry::from_elements_hg(vec![store_element(
"bar",
"121",
store::Flag::File(FileType::Regular),
)]);
store
.insert(
repo_path("foo"),
hgid("12"),
foo_12_entry.clone().to_bytes(),
)
.unwrap();
assert_eq!(
compat_subtree_diff(
store.clone(),
RepoPath::empty(),
hgid("2"),
vec![hgid("1")],
3
)
.unwrap(),
vec![
(
repo_path_buf("foo"),
hgid("12"),
vec![],
foo_12_entry.clone().to_bytes()
),
(
RepoPathBuf::new(),
hgid("2"),
vec![hgid("1")],
root_2_entry.clone().to_bytes()
),
]
);
}
#[test]
fn test_list() {
test_list_format(TreeFormat::Git);
test_list_format(TreeFormat::Hg);
}
fn test_list_format(format: TreeFormat) {
let mut tree = TreeManifest::ephemeral(Arc::new(TestStore::new().with_format(format)));
let c1_meta = make_meta("10");
tree.insert(repo_path_buf("a1/b1/c1"), c1_meta).unwrap();
let b2_meta = make_meta("20");
tree.insert(repo_path_buf("a1/b2"), b2_meta).unwrap();
let _hgid = tree.flush().unwrap();
let c2_meta = make_meta("30");
tree.insert(repo_path_buf("a2/b3/c2"), c2_meta).unwrap();
let b4_meta = make_meta("40");
tree.insert(repo_path_buf("a2/b4"), b4_meta).unwrap();
assert_eq!(tree.list(repo_path("not_found")).unwrap(), List::NotFound);
assert_eq!(tree.list(repo_path("a1/b1/c1")).unwrap(), List::File);
assert_eq!(
tree.list(repo_path("a1/b1")).unwrap(),
List::Directory(vec![(
path_component_buf("c1"),
FsNodeMetadata::File(c1_meta)
)]),
);
assert_eq!(
tree.list(repo_path("a1")).unwrap(),
List::Directory(vec![
(
path_component_buf("b1"),
tree.get(repo_path("a1/b1")).unwrap().unwrap()
),
(path_component_buf("b2"), FsNodeMetadata::File(b2_meta)),
]),
);
assert_eq!(tree.list(repo_path("a2/b3/c2")).unwrap(), List::File);
assert_eq!(
tree.list(repo_path("a2/b3")).unwrap(),
List::Directory(vec![(
path_component_buf("c2"),
FsNodeMetadata::File(c2_meta)
)]),
);
assert_eq!(
tree.list(repo_path("a2")).unwrap(),
List::Directory(vec![
(path_component_buf("b3"), FsNodeMetadata::Directory(None)),
(path_component_buf("b4"), FsNodeMetadata::File(b4_meta)),
]),
);
assert_eq!(
tree.list(RepoPath::empty()).unwrap(),
List::Directory(vec![
(
path_component_buf("a1"),
tree.get(repo_path("a1")).unwrap().unwrap()
),
(path_component_buf("a2"), FsNodeMetadata::Directory(None)),
]),
);
}
}<|fim▁end|>
|
match child {
None => {
must_insert = true;
|
<|file_name|>Nyaa.go<|end_file_name|><|fim▁begin|>package arn
import (
"fmt"
"regexp"
"strings"
)
type nyaaAnimeProvider struct{}<|fim▁hole|>
// Nyaa anime provider (singleton)
var Nyaa = new(nyaaAnimeProvider)
var nyaaInvalidCharsRegex = regexp.MustCompile(`[^[:alnum:]!']`)
var nyaaTVRegex = regexp.MustCompile(` \(?TV\)?`)
// GetLink retrieves the Nyaa title for the given anime
func (nyaa *nyaaAnimeProvider) GetLink(anime *Anime, additionalSearchTerm string) string {
searchTitle := nyaa.GetTitle(anime) + "+" + additionalSearchTerm
searchTitle = strings.Replace(searchTitle, " ", "+", -1)
quality := ""
subs := ""
nyaaSuffix := fmt.Sprintf("?f=0&c=1_2&q=%s+%s+%s&s=seeders&o=desc", searchTitle, quality, subs)
nyaaSuffix = strings.Replace(nyaaSuffix, "++", "+", -1)
return "https://nyaa.si/" + nyaaSuffix
}
// GetTitle retrieves the Nyaa title for the given anime
func (nyaa *nyaaAnimeProvider) GetTitle(anime *Anime) string {
return nyaa.BuildTitle(anime.Title.Canonical)
}
// BuildTitle tries to create a title for use on Nyaa
func (nyaa *nyaaAnimeProvider) BuildTitle(title string) string {
if title == "" {
return ""
}
title = nyaaInvalidCharsRegex.ReplaceAllString(title, " ")
title = nyaaTVRegex.ReplaceAllString(title, "")
title = strings.Replace(title, " ", " ", -1)
title = strings.TrimSpace(title)
return title
}<|fim▁end|>
| |
<|file_name|>test_fan.py<|end_file_name|><|fim▁begin|>"""Tests for the Bond fan device."""
from datetime import timedelta
from typing import Optional
from bond_api import Action, DeviceType, Direction
from homeassistant import core
from homeassistant.components import fan
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_SPEED,
ATTR_SPEED_LIST,
DIRECTION_FORWARD,
DIRECTION_REVERSE,
DOMAIN as FAN_DOMAIN,
SERVICE_SET_DIRECTION,
SERVICE_SET_SPEED,
SPEED_OFF,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.util import utcnow
from .common import (
help_test_entity_available,
patch_bond_action,
patch_bond_device_state,
setup_platform,
)
from tests.common import async_fire_time_changed
def ceiling_fan(name: str):
"""Create a ceiling fan with given name."""
return {
"name": name,
"type": DeviceType.CEILING_FAN,
"actions": ["SetSpeed", "SetDirection"],
}
async def turn_fan_on(
hass: core.HomeAssistant, fan_id: str, speed: Optional[str] = None
) -> None:
"""Turn the fan on at the specified speed."""
service_data = {ATTR_ENTITY_ID: fan_id}
if speed:
service_data[fan.ATTR_SPEED] = speed
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_ON,
service_data=service_data,
blocking=True,
)
await hass.async_block_till_done()
async def test_entity_registry(hass: core.HomeAssistant):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(
hass,
FAN_DOMAIN,
ceiling_fan("name-1"),
bond_version={"bondid": "test-hub-id"},
bond_device_id="test-device-id",
)
registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry()
entity = registry.entities["fan.name_1"]
assert entity.unique_id == "test-hub-id_test-device-id"
async def test_non_standard_speed_list(hass: core.HomeAssistant):
"""Tests that the device is registered with custom speed list if number of supported speeds differs form 3."""
await setup_platform(
hass,
FAN_DOMAIN,
ceiling_fan("name-1"),
bond_device_id="test-device-id",
props={"max_speed": 6},
)
actual_speeds = hass.states.get("fan.name_1").attributes[ATTR_SPEED_LIST]
assert actual_speeds == [
fan.SPEED_OFF,
fan.SPEED_LOW,
fan.SPEED_MEDIUM,
fan.SPEED_HIGH,
]
with patch_bond_device_state():
with patch_bond_action() as mock_set_speed_low:
await turn_fan_on(hass, "fan.name_1", fan.SPEED_LOW)
mock_set_speed_low.assert_called_once_with(
"test-device-id", Action.set_speed(1)
)
with patch_bond_action() as mock_set_speed_medium:
await turn_fan_on(hass, "fan.name_1", fan.SPEED_MEDIUM)
mock_set_speed_medium.assert_called_once_with(
"test-device-id", Action.set_speed(3)
)
with patch_bond_action() as mock_set_speed_high:
await turn_fan_on(hass, "fan.name_1", fan.SPEED_HIGH)
mock_set_speed_high.assert_called_once_with(
"test-device-id", Action.set_speed(6)
)
async def test_fan_speed_with_no_max_seed(hass: core.HomeAssistant):
"""Tests that fans without max speed (increase/decrease controls) map speed to HA standard."""
await setup_platform(
hass,
FAN_DOMAIN,
ceiling_fan("name-1"),
bond_device_id="test-device-id",
props={"no": "max_speed"},
state={"power": 1, "speed": 14},
)
assert hass.states.get("fan.name_1").attributes["speed"] == fan.SPEED_HIGH
async def test_turn_on_fan_with_speed(hass: core.HomeAssistant):
"""Tests that turn on command delegates to set speed API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_set_speed, patch_bond_device_state():
await turn_fan_on(hass, "fan.name_1", fan.SPEED_LOW)
mock_set_speed.assert_called_with("test-device-id", Action.set_speed(1))
async def test_turn_on_fan_without_speed(hass: core.HomeAssistant):
"""Tests that turn on command delegates to turn on API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_on, patch_bond_device_state():
await turn_fan_on(hass, "fan.name_1")
mock_turn_on.assert_called_with("test-device-id", Action.turn_on())
async def test_turn_on_fan_with_off_speed(hass: core.HomeAssistant):
"""Tests that turn on command delegates to turn off API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_off, patch_bond_device_state():
await turn_fan_on(hass, "fan.name_1", fan.SPEED_OFF)
mock_turn_off.assert_called_with("test-device-id", Action.turn_off())
async def test_set_speed_off(hass: core.HomeAssistant):
"""Tests that set_speed(off) command delegates to turn off API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_off, patch_bond_device_state():
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
service_data={ATTR_ENTITY_ID: "fan.name_1", ATTR_SPEED: SPEED_OFF},
blocking=True,
)
await hass.async_block_till_done()
mock_turn_off.assert_called_with("test-device-id", Action.turn_off())
async def test_turn_off_fan(hass: core.HomeAssistant):
"""Tests that turn off command delegates to API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_off, patch_bond_device_state():
await hass.services.async_call(<|fim▁hole|> )
await hass.async_block_till_done()
mock_turn_off.assert_called_once_with("test-device-id", Action.turn_off())
async def test_update_reports_fan_on(hass: core.HomeAssistant):
"""Tests that update command sets correct state when Bond API reports fan power is on."""
await setup_platform(hass, FAN_DOMAIN, ceiling_fan("name-1"))
with patch_bond_device_state(return_value={"power": 1, "speed": 1}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("fan.name_1").state == "on"
async def test_update_reports_fan_off(hass: core.HomeAssistant):
"""Tests that update command sets correct state when Bond API reports fan power is off."""
await setup_platform(hass, FAN_DOMAIN, ceiling_fan("name-1"))
with patch_bond_device_state(return_value={"power": 0, "speed": 1}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("fan.name_1").state == "off"
async def test_update_reports_direction_forward(hass: core.HomeAssistant):
"""Tests that update command sets correct direction when Bond API reports fan direction is forward."""
await setup_platform(hass, FAN_DOMAIN, ceiling_fan("name-1"))
with patch_bond_device_state(return_value={"direction": Direction.FORWARD}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("fan.name_1").attributes[ATTR_DIRECTION] == DIRECTION_FORWARD
async def test_update_reports_direction_reverse(hass: core.HomeAssistant):
"""Tests that update command sets correct direction when Bond API reports fan direction is reverse."""
await setup_platform(hass, FAN_DOMAIN, ceiling_fan("name-1"))
with patch_bond_device_state(return_value={"direction": Direction.REVERSE}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("fan.name_1").attributes[ATTR_DIRECTION] == DIRECTION_REVERSE
async def test_set_fan_direction(hass: core.HomeAssistant):
"""Tests that set direction command delegates to API."""
await setup_platform(
hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_set_direction, patch_bond_device_state():
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_DIRECTION,
{ATTR_ENTITY_ID: "fan.name_1", ATTR_DIRECTION: DIRECTION_FORWARD},
blocking=True,
)
await hass.async_block_till_done()
mock_set_direction.assert_called_once_with(
"test-device-id", Action.set_direction(Direction.FORWARD)
)
async def test_fan_available(hass: core.HomeAssistant):
"""Tests that available state is updated based on API errors."""
await help_test_entity_available(
hass, FAN_DOMAIN, ceiling_fan("name-1"), "fan.name_1"
)<|fim▁end|>
|
FAN_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "fan.name_1"},
blocking=True,
|
<|file_name|>SummaryParser.java<|end_file_name|><|fim▁begin|>package com.zanghongtu.blog.util;
/**
* 摘要
*/
public class SummaryParser {
private static final int SUMMARY_LENGTH = 256;
/**
* 去除info里content 所有html字符
* 截取字符串长度256
* @param sourceStr 源字符串
* @return 截取后的字符串
*/
public static String getSummary(String sourceStr) {
if(StringUtils.isBlank(sourceStr)){
return "";
}
sourceStr= removeHTML(sourceStr);
sourceStr = sourceStr.trim().replaceAll("\t", " ").replaceAll("\n", " ")
.replaceAll("\r", " ").replaceAll(" ", " ")
.replaceAll("\\s+", " ");
if(sourceStr.length() > SUMMARY_LENGTH) {
sourceStr = sourceStr.substring(0, SUMMARY_LENGTH);
}
return sourceStr;
}
/**
* 除去所有html tag
*<|fim▁hole|> * @param source 源字符串
* @return 去除HTMLisBlank标签后的字符串
*/
private static String removeHTML(String source) {
if (source == null || source.length() == 0)
return "";
StringBuilder sb = new StringBuilder();
Character IN = '<', OUT = '>';
Character currentState = OUT;
int currentIndex = 0, nextIndex = -1;
for (Character c : source.toCharArray()) {
currentIndex++;
if (currentIndex >= nextIndex)
nextIndex = -1;
if (currentState == OUT && c != OUT && c != IN && c != '\"') {
if (c == '&') {
nextIndex = checkInHTMLCode(source, currentIndex);
if (nextIndex > -1)
nextIndex = currentIndex + nextIndex;
}
if (nextIndex == -1)
sb.append(c);
}
if (c == OUT)
currentState = OUT;
if (c == IN)
currentState = IN;
}
return sb.toString();
}
/**
* RemoveHTML的辅助方法,用于检测是否遇到HTML转义符,如:
*
* @param source 源字符串
* @param start 扫描开始的位置
* @return 第一个出现转义字符的位置
*/
private static int checkInHTMLCode(String source, int start) {
int MAX_HTMLCODE_LEN = 10;
int index = 0;
String substr;
if ((source.length() - start - 1) < MAX_HTMLCODE_LEN)
substr = source.substring(start);
else {
substr = source.substring(start, start + MAX_HTMLCODE_LEN);
}
for (Character c : substr.toCharArray()) {
index++;
if (index > 1 && c == ';')
return index + 1;
if (c > 'z' || c < 'a')
return -1;
}
return -1;
}
}<|fim▁end|>
| |
<|file_name|>test_with_rabbitmq.py<|end_file_name|><|fim▁begin|>import os
from pymco.test import ctxt
from . import base
class RabbitMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'connector': 'rabbitmq',
'plugin.rabbitmq.vhost': '/mcollective',
'plugin.rabbitmq.pool.size': '1',
'plugin.rabbitmq.pool.1.host': 'localhost',
'plugin.rabbitmq.pool.1.port': '61613',<|fim▁hole|>
class TestWithRabbitMQMCo22x(base.MCollective22x, RabbitMQTestCase):
'''MCollective integration test case.'''
class TestWithRabbitMQMCo23x(base.MCollective23x, RabbitMQTestCase):
'''MCollective integration test case.'''
class TestWithRabbitMQMCo24x(base.MCollective24x, RabbitMQTestCase):
'''MCollective integration test case.'''
class TestWithRabbitMQSSLMCo23x(base.MCollective23x, RabbitMQTestCase):
"""MCollective integration test case."""
CTXT = {
'connector': 'rabbitmq',
'plugin.rabbitmq.vhost': '/mcollective',
'plugin.rabbitmq.pool.size': '1',
'plugin.rabbitmq.pool.1.host': 'localhost',
'plugin.rabbitmq.pool.1.port': 61612,
'plugin.rabbitmq.pool.1.user': 'mcollective',
'plugin.rabbitmq.pool.1.password': 'marionette',
'plugin.rabbitmq.pool.1.ssl': 'true',
'plugin.rabbitmq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.rabbitmq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.rabbitmq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}<|fim▁end|>
|
'plugin.rabbitmq.pool.1.user': 'mcollective',
'plugin.rabbitmq.pool.1.password': 'marionette',
}
|
<|file_name|>bg.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
MODX Evolution 1.0.5 = dace793f0e7de11aadc0ecf54e834d93
|
<|file_name|>kafka_to_mysql.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
######################################################################
#
# File: kafka_to_mysql.py
#
# Copyright 2015 TiVo Inc. All Rights Reserved.
#
######################################################################
"""
Usage: kafka_to_mysql.py <kafka_topic> <kafka_broker> <mysql-ip> <mysql-port> <mysql-user> <mysql-password> <mysql_table>
"""
import json
import MySQLdb
from kafka import KafkaClient, KafkaConsumer
import sys
def usage():
print __doc__
sys.exit(1)
def main():
# R0915: "too many statements in function (>50)"
# pylint: disable=R0915
if len(sys.argv) != 8:
print "Wrong number of arguments"
usage()
(kafka_topic, kafka_broker, mysql_host, mysql_port, mysql_user, mysql_password, mysql_table) = sys.argv[1:8]
sql_db = MySQLdb.connect(
host = mysql_host,
port = int(mysql_port),
user = mysql_user,
passwd = mysql_password)
query = sql_db.cursor()
client = KafkaClient(kafka_broker)
consumer = KafkaConsumer(kafka_topic, metadata_broker_list = [kafka_broker],
auto_commit_enable = False,
auto_offset_reset='smallest')
last_offsets = {}
partition_ids = client.get_partition_ids_for_topic(kafka_topic)
for partition in partition_ids:
offsets = consumer.get_partition_offsets(kafka_topic, partition, -1, 1)
print offsets
# Don't really understand this format, so put in asserts
# (Pdb) consumer.get_partition_offsets("topicname", 0, -1, 1)
# (15471)
assert len(offsets) == 1
assert offsets[0] > 0
next_offset = offsets[0]
last_offset = next_offset - 1
last_offsets[partition] = last_offset
finished_partitions = set()
print last_offsets
count = 0
# mapping from primary key tuples, to row data
insert_batch = {}
insert_sql = None
for m in consumer:
if m.partition in finished_partitions:
continue
count += 1
payload = m.value
(first_line, rest) = payload.split("\r\n", 1)
(_notused, header_len, _body_len) = first_line.split(" ")
header_len = int(header_len)
body = rest[header_len:]
primary_key_str = m.key
# import pdb; pdb.set_trace()
primary_keys = json.loads(primary_key_str)
primary_tuples = sorted(primary_keys.items())
sorted_primary_key_names = [ k for (k,v) in primary_tuples ]
sorted_primary_key_values = [ int(v) for (k,v) in primary_tuples ]
if len(body) > 0:
# This is a write
data = json.loads(body)
# date fields have to be turned from a number back into a datetime object
date_fields = ['createDate', 'updateDate']
for d in date_fields:
if d not in data:
continue
val = data[d]
if val is None:
continue
if val == -62170156800000:
# this is hacky and a sign that i'm doing something wrong, I think.<|fim▁hole|> else:
val = val/1000
import datetime;
val = datetime.datetime.utcfromtimestamp(val)
data[d] = val
keys = [ k for (k, v) in sorted(data.items()) ]
values = [ v for (k, v) in sorted(data.items()) ]
keys_wo_primary = [ k for (k, v) in sorted(data.items()) ]
for p in sorted_primary_key_names:
keys_wo_primary.remove(p)
# e.g.
# insert into dbname.tablename (col1, col2) values (%s, %s) on duplicate key update col2 = values(col2)
# assuming that col1 is the primary key
insert_sql = """insert into %s """ % mysql_table
insert_sql += """ (%s) """ % (", ".join(keys))
insert_sql += " values (%s) " % (", ".join(["%s"] * len(values) ))
insert_sql += "on duplicate key update "
insert_sql += ", ".join(["%s = values(%s)" % (k, k) for k in keys_wo_primary ])
insert_batch[tuple(primary_tuples)] = tuple(values)
if len(insert_batch) > 5000:
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
else:
# This is a delete
if len(insert_batch) > 0 and insert_sql is not None:
# flush all writes before processing any deletes
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
# get the primary keys, and delete the row
where_clause = ' and '.join([ "%s = %%s" % k for k in sorted_primary_key_names ])
# e.g.
# delete from dbname.tablename where field1 = %s and field2 = %s
delete_sql = """delete from %s where %s""" % (mysql_table, where_clause)
values = tuple(sorted_primary_key_values)
query.execute(delete_sql, values)
sql_db.commit()
# how do I know when to stop?
print "Partition %d Offset %d of %d" % (m.partition, m.offset, last_offsets.get(m.partition))
if m.offset >= last_offsets.get(m.partition):
finished_partitions.add(m.partition)
if len(finished_partitions) == len(last_offsets):
# All partitions are done.
break
if len(insert_batch) > 0:
# flush any remaining writes
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
print "Imported %d messages into mysql" % count
if __name__ == "__main__":
main()<|fim▁end|>
|
val = "0000-00-00 00:00:00"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.