prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>factory.ts<|end_file_name|><|fim▁begin|>class ErrorFactory {<|fim▁hole|> errorMessages: any;
status: number;
constructor(msg: any) {
super(`${errorName}: ${msg.toString()}`);
if (!(msg instanceof String) && msg instanceof Array) {
this.errorMessages = msg;
}
if (undefined !== status) {
this.status = status;
}
}
}
return NestedError;
}
}
const errorArr = [
{ name: 'NoEmailError', code: 401 },
{ name: 'PasswordMismatchError', code: 401 },
{ name: 'DuplicatedEntryError', code: 409 },
{ name: 'InvalidArgumentError', code: 400 },
{ name: 'InsufficientPermissionError', code: 403 },
{ name: 'NotFoundError', code: 404 },
{ name: 'NotAllowedError', code: 405 },
{ name: 'DuplicatedIssueError', code: 400 },
];
const errors: any = {};
const errorFactory = new ErrorFactory();
errorArr.forEach((error) => {
errors[error.name] = errorFactory.getError(error.name, error.code);
});
export { errors as Errors };<|fim▁end|> |
getError(errorName: string, status: number): any {
class NestedError extends Error { |
<|file_name|>trial.py<|end_file_name|><|fim▁begin|>#! /usr/python
'''
///////////////////////////////////////////////////////////
// Permission is hereby granted, free of charge,
// to any person obtaining a copy of
// this software and associated documentation files
// (the "Software"), to deal in the Software without
// restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute,
// sublicense, and/or sell copies of the Software, and
// to permit persons to whom the Software is furnished
// to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL<|fim▁hole|>// DEALINGS IN THE SOFTWARE.
'''
__author__ = 'RobertIan'
__version__ = '0.2.5'
import argparse
import pygame
import picamera
import time
import datetime
import netifaces
import RPi.GPIO as GPIO
import os.path
import sys
import select
import os
class Trial:
def __init__(self, stim, starttime, feedornot):
## initialize display
pygame.display.init()
pygame.mouse.set_visible(False)
self.screen = pygame.display.set_mode((0,0),pygame.FULLSCREEN)
## assign stimulus
self.stimulus = stim
## timing
self.start = float(starttime)
self.tLength = 4*60 #four minute trial
self.feedDelay = 30 #thirty second delay
## GPIO setup
GPIO.setmode(GPIO.BCM)
self.feeder = 17 ##????
self.notfeeder = 5 ##????
self.feederin = 26 ##????
self.notfeederin = 25 ##????
if feedornot == 'feed':
self.feederin = self.feederin
self.feederout = self.feeder
elif feedornot == 'notfeed':
self.feederin = self.notfeederin
self.feederout = self.notfeeder
else:
## currently a print, should be changed to send a message to
#the client
print 'feeder not assigned'
self.safeQuit()
GPIO.setup(self.feederin, GPIO.IN)
GPIO.add_event_detect(self.feederin, GPIO.RISING)
GPIO.setup(self.feederout, GPIO.OUT)
GPIO.output(self.feederout, True)
def checkPiIP(self):
## query IP address from system
self.ip = netifaces.ifaddresses('eth0')[2][0]['addr']
def whatStimulus(self):
## locate stimulus in src folder
self.stim, extension = os.path.splitext(self.stimulus)
if extension == '.png' or extension == '.PNG' or extension == '.jpg' \
or extension == '.JPG':
## still image
try:
self.image = pygame.image.load('/home/pi/ethoStim/individualtesting/src/10.png')
except IOError:
## currently a print, should be changed to send a message to
#the client
print 'are you sure this file exists? check the src folder \
ony jpg/JPG, png/PNG formats'
self.safeQuit()
def cameraInit(self):
## adjust camera settings here
self.camera = picamera.PiCamera()
self.camera.resolution = (1920, 1080)
self.camera.framerate = 30
self.camera.autofocus = False
self.camera.awb_mode = 'fluorescent'
def videoFileName(self, species, tround, sl, sex, fishid, day, session,
conditionside):
## adjust video naming convention here
self.vidout = ('data/'+str(self.ip)+'/'+(str(species)+'_'+str(tround)
+'_'+str(sl)+'_'+str(sex) +'_'+str(fishid)+'_'+str(day)+'_'+
str(session)+'_' +str(self.stim)+'_'+str(conditionside)))
def startRecording(self):
self.camera.start_recording(self.vidout+ '.h264') #output video
def stopRecording(self):
self.camera.stop_recording()
def cameraQuit(self):
self.camera.close()
def safeQuit(self):
GPIO.output(self.feeder, True) #reset feeder ????
GPIO.output(self.notfeeder, True) #reset notfeeder ????
GPIO.cleanup() #reset all GPIOs
pygame.quit()
exit()
def mainLoop(self, camera):
## hang until assigned start time
while time.time()<self.start:
print time.time()-self.start
pass
## start timer
self.startT = time.time()
fed = False # feed delay control variable
## start recording
if camera == 'record':
selft.startRecording()
elif camera == 'notrecord':
pass
## display stimulus/start main loop
while ((time.time() - self.startT) < self.tLength):
pygame.display.flip()
self.screen.blit(self.image, (250,100)) # location of stimulus
## control feeder delay
try:
if (time.time() - self.startT) > self.feedDelay:
if fed:
pass
elif GPIO.event_detected(self.feederin):
time.sleep(1.0)
GPIO.output(self.feederout,True)
fed = True
else:
GPIO.output(self.feederout, False)
except KeyboardInterrupt:
self.safeQuit()
if __name__ == '__main__':
## load in command line argumenents
ap = argparse.ArgumentParser()
ap.add_argument("-f","--fish", help="ID of fish in tank")
ap.add_argument("-ts", "--trainedStim",help="numerosity stimulus the individual is being trained to, e.g. 12")
ap.add_argument("-ps", "--presentedStim", help="stimulus being presented with this raspberry pi")
ap.add_argument("-d","--day", help="experiment day, e.g. 1-7")
ap.add_argument("-s","--session", help="trial session, e.g. 1-4")
ap.add_argument("-fs","--fedSide", help="side(self.ip feed on/conditioned side")
ap.add_argument("-x","--sex", help="fish sex")
ap.add_argument("-p","--proportion", help="training ratio")
ap.add_argument("-sp", "--species", help="species name")
ap.add_argument("-sl","--fishstandardlength", help="standard length of the")
ap.add_argument("-r","--round", help="training round")
ap.add_argument("-fd", "--feed", help="feed with this stimulus",action="store_true")
ap.add_argument("-c", "--camera",help="do you want to record using this pi?",action="store_true")
ap.add_argument("-m:", "--startTime", help="time since epoch that you want to start your trial")
args = vars(ap.parse_args())
## parse trial details and pass it to the Trial class
if args.["feed"]:
T = Trial(args["presentedStim"], args["startTime"], 'feed')
else:
T = Trial(args["presentedStim"], args["startTime"], 'notfeed'))
T.checkPiIP()
T.whatStimulus()
T.videoFileName(args["species"], args["round"], args["fishstandardlength"],
args["sex"], args["fish"], args["day"], args["session"], args["fedSide"])
## initialize camera IF attached to Pi
if args["camera"]:
T.cameraInit()
else:
pass
## start camera recording IF attached to Pi and begin mainloop of Trial
if args["camera"]:
T.mainLoop('record')
else:
T.mainLoop('notrecord')
## stop camera recording IF attached to Pi
if args["camera"]:
T.stopRecording()
else:
pass
## cleanup camera IF attached to Pi
if args["camera"]:
T.cameraQuit()
## cleanup remaining processes and exit
T.safeQuit()<|fim▁end|> | // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER |
<|file_name|>TemporalConvolution.py<|end_file_name|><|fim▁begin|>import math
import torch
from .Module import Module
class TemporalConvolution(Module):
def __init__(self, inputFrameSize, outputFrameSize, kW, dW=1):
super(TemporalConvolution, self).__init__()
self.inputFrameSize = inputFrameSize
self.outputFrameSize = outputFrameSize
self.kW = kW
self.dW = dW
self.weight = torch.Tensor(outputFrameSize, inputFrameSize * kW)
self.bias = torch.Tensor(outputFrameSize)
self.gradWeight = torch.Tensor(outputFrameSize, inputFrameSize * kW)
self.gradBias = torch.Tensor(outputFrameSize)
self.reset()
def reset(self, stdv=None):
if stdv is not None:
stdv = stdv * math.sqrt(3)
else:
stdv = 1. / math.sqrt(self.kW * self.inputFrameSize)
self.weight.uniform_(-stdv, stdv)
self.bias.uniform_(-stdv, stdv)<|fim▁hole|>
def updateOutput(self, input):
self._backend.TemporalConvolution_updateOutput(
self._backend.library_state,
input,
self.output,
self.weight,
self.bias,
self.kW,
self.dW,
self.inputFrameSize,
self.outputFrameSize
)
return self.output
def updateGradInput(self, input, gradOutput):
if self.gradInput is None:
return
self._backend.TemporalConvolution_updateGradInput(
self._backend.library_state,
input,
gradOutput,
self.gradInput,
self.weight,
self.kW,
self.dW
)
return self.gradInput
def accGradParameters(self, input, gradOutput, scale=1):
self._backend.TemporalConvolution_accGradParameters(
self._backend.library_state,
input,
gradOutput,
self.gradWeight,
self.gradBias,
self.kW,
self.dW,
scale
)<|fim▁end|> | |
<|file_name|>autocomplete_match_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/autocomplete/autocomplete_match.h"
#include "base/basictypes.h"
#include "testing/gtest/include/gtest/gtest.h"
TEST(AutocompleteMatchTest, MoreRelevant) {
struct RelevantCases {
int r1;
int r2;
bool expected_result;
} cases[] = {
{ 10, 0, true },
{ 10, -5, true },
{ -5, 10, false },
{ 0, 10, false },
{ -10, -5, false },
{ -5, -10, true },
};<|fim▁hole|> AutocompleteMatch m2(NULL, 0, false, AutocompleteMatch::URL_WHAT_YOU_TYPED);
for (size_t i = 0; i < ARRAYSIZE_UNSAFE(cases); ++i) {
m1.relevance = cases[i].r1;
m2.relevance = cases[i].r2;
EXPECT_EQ(cases[i].expected_result,
AutocompleteMatch::MoreRelevant(m1, m2));
}
}
TEST(AutocompleteMatchTest, MergeClassifications) {
// Merging two empty vectors should result in an empty vector.
EXPECT_EQ(std::string(),
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ACMatchClassifications(),
AutocompleteMatch::ACMatchClassifications())));
// If one vector is empty and the other is "trivial" but non-empty (i.e. (0,
// NONE)), the non-empty vector should be returned.
EXPECT_EQ("0,0",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,0"),
AutocompleteMatch::ACMatchClassifications())));
EXPECT_EQ("0,0",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ACMatchClassifications(),
AutocompleteMatch::ClassificationsFromString("0,0"))));
// Ditto if the one-entry vector is non-trivial.
EXPECT_EQ("0,1",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,1"),
AutocompleteMatch::ACMatchClassifications())));
EXPECT_EQ("0,1",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ACMatchClassifications(),
AutocompleteMatch::ClassificationsFromString("0,1"))));
// Merge an unstyled one-entry vector with a styled one-entry vector.
EXPECT_EQ("0,1",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,0"),
AutocompleteMatch::ClassificationsFromString("0,1"))));
// Test simple cases of overlap.
EXPECT_EQ("0,3," "1,2",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,1," "1,0"),
AutocompleteMatch::ClassificationsFromString("0,2"))));
EXPECT_EQ("0,3," "1,2",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,2"),
AutocompleteMatch::ClassificationsFromString("0,1," "1,0"))));
// Test the case where both vectors have classifications at the same
// positions.
EXPECT_EQ("0,3",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString("0,1," "1,2"),
AutocompleteMatch::ClassificationsFromString("0,2," "1,1"))));
// Test an arbitrary complicated case.
EXPECT_EQ("0,2," "1,0," "2,1," "4,3," "5,7," "6,3," "7,7," "15,1," "17,0",
AutocompleteMatch::ClassificationsToString(
AutocompleteMatch::MergeClassifications(
AutocompleteMatch::ClassificationsFromString(
"0,0," "2,1," "4,3," "7,7," "10,6," "15,0"),
AutocompleteMatch::ClassificationsFromString(
"0,2," "1,0," "5,7," "6,1," "17,0"))));
}<|fim▁end|> |
AutocompleteMatch m1(NULL, 0, false, AutocompleteMatch::URL_WHAT_YOU_TYPED); |
<|file_name|>response.rs<|end_file_name|><|fim▁begin|>use rocket::http::Status;
use rocket::response::status;
use rocket::response::{Response, Responder};
use error::ApiError;
pub struct ApiResponse<T>(pub Status, pub T);
impl<'r, T: Responder<'r>> Responder<'r> for ApiResponse<T> {
fn respond(self) -> Result<Response<'r>, Status> {<|fim▁hole|>
pub type ApiResult<T> = Result<ApiResponse<T>, ApiError>;<|fim▁end|> | status::Custom(self.0, self.1).respond()
}
} |
<|file_name|>karma-standalone.conf.js<|end_file_name|><|fim▁begin|>/* Karma configuration for standalone build */
'use strict';
module.exports = function (config) {
console.log();
console.log('Browser (Standalone) Tests');<|fim▁hole|>
config.set({
basePath: '.',
frameworks: ['mocha'],
files: [
{pattern: 'swagger-tools-standalone.js', watch: false, included: true},
{pattern: 'test-browser.js', watch: false, included: true}
],
client: {
mocha: {
reporter: 'html',
timeout: 5000,
ui: 'bdd'
}
},
plugins: [
'karma-mocha',
'karma-mocha-reporter',
'karma-phantomjs-launcher'
],
browsers: ['PhantomJS'],
reporters: ['mocha'],
colors: true,
autoWatch: false,
singleRun: true
});
};<|fim▁end|> | console.log(); |
<|file_name|>luhn_test.go<|end_file_name|><|fim▁begin|>package luhn
import "testing"
var validTests = []struct {
n string
ok bool
}{<|fim▁hole|> {"8763", true},
{" ", false},
{"", false},
{"2323 2005 7766 3554", true},
}
var addTests = []struct{ raw, luhn string }{
{"123", "1230"},
{"873956", "8739567"},
{"837263756", "8372637564"},
{"2323 2005 7766 355", "2323 2005 7766 3554"},
// bonus Unicode cases
// {"2323·2005·7766·355", "2323·2005·7766·3554"},
// {"123", "1230"},
}
func TestValid(t *testing.T) {
for _, test := range validTests {
if ok := Valid(test.n); ok != test.ok {
t.Fatalf("Valid(%s) = %t, want %t.", test.n, ok, test.ok)
}
}
}
func TestAddCheck(t *testing.T) {
for _, test := range addTests {
if luhn := AddCheck(test.raw); luhn != test.luhn {
t.Fatalf("AddCheck(%s) = %s, want %s.", test.raw, luhn, test.luhn)
}
}
}
func BenchmarkValid(b *testing.B) {
for i := 0; i < b.N; i++ {
Valid("2323 2005 7766 3554")
}
}
func BenchmarkAddCheck(b *testing.B) {
for i := 0; i < b.N; i++ {
AddCheck("2323 2005 7766 355")
}
}<|fim▁end|> | {"738", false},
{"8739567", true},
{"1111", false}, |
<|file_name|>createFileName.ts<|end_file_name|><|fim▁begin|>import { CONFIG_FOLDER } from '../constants';<|fim▁hole|>import { FileType } from '../types';
export function createFileName(type: FileType, name: string, ext: string, suffix?: string): string {
// boost.js
let fileName = name;
// .boost.js
if (type === 'branch') {
fileName = `.${name}`;
}
// .config/boost.js
if (type === 'root-folder') {
fileName = `${CONFIG_FOLDER}/${name}`;
}
// boost.config.js
if (type === 'root-file') {
fileName = name + CONFIG_FOLDER;
}
if (suffix) {
fileName += `.${suffix}`;
}
fileName += `.${ext}`;
return fileName;
}<|fim▁end|> | |
<|file_name|>adt-nullary-enums.rs<|end_file_name|><|fim▁begin|>// Unit test for the "user substitutions" that are annotated on each
// node.
#![allow(warnings)]
use std::cell::Cell;
enum SomeEnum<T> {
SomeVariant(T),
SomeOtherVariant,
}
fn combine<T>(_: T, _: T) { }
fn no_annot() {
let c = 66;
combine(SomeEnum::SomeVariant(Cell::new(&c)), SomeEnum::SomeOtherVariant);
}
fn annot_underscore() {
let c = 66;
combine(SomeEnum::SomeVariant(Cell::new(&c)), SomeEnum::SomeOtherVariant::<Cell<_>>);
}<|fim▁hole|> let c = 66;
combine(SomeEnum::SomeVariant(Cell::new(&c)), SomeEnum::SomeOtherVariant::<Cell<&u32>>);
}
fn annot_reference_static_lifetime() {
let c = 66;
combine(
SomeEnum::SomeVariant(Cell::new(&c)), //~ ERROR
SomeEnum::SomeOtherVariant::<Cell<&'static u32>>,
);
}
fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
let c = 66;
combine(
SomeEnum::SomeVariant(Cell::new(&c)), //~ ERROR
SomeEnum::SomeOtherVariant::<Cell<&'a u32>>,
);
}
fn annot_reference_named_lifetime_ok<'a>(c: &'a u32) {
combine(SomeEnum::SomeVariant(Cell::new(c)), SomeEnum::SomeOtherVariant::<Cell<&'a u32>>);
}
fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
let _closure = || {
let c = 66;
combine(
SomeEnum::SomeVariant(Cell::new(&c)), //~ ERROR
SomeEnum::SomeOtherVariant::<Cell<&'a u32>>,
);
};
}
fn annot_reference_named_lifetime_in_closure_ok<'a>(c: &'a u32) {
let _closure = || {
combine(
SomeEnum::SomeVariant(Cell::new(c)),
SomeEnum::SomeOtherVariant::<Cell<&'a u32>>,
);
};
}
fn main() { }<|fim▁end|> |
fn annot_reference_any_lifetime() { |
<|file_name|>plan.go<|end_file_name|><|fim▁begin|>package command
import (
"context"
"fmt"
"strings"
"github.com/hashicorp/errwrap"
"github.com/hashicorp/terraform/backend"
"github.com/hashicorp/terraform/config"
"github.com/hashicorp/terraform/config/module"
)
// PlanCommand is a Command implementation that compares a Terraform
// configuration to an actual infrastructure and shows the differences.
type PlanCommand struct {
Meta
}
func (c *PlanCommand) Run(args []string) int {
var destroy, refresh, detailed bool
var outPath string
var moduleDepth int
args, err := c.Meta.process(args, true)
if err != nil {
return 1
}
cmdFlags := c.Meta.flagSet("plan")
cmdFlags.BoolVar(&destroy, "destroy", false, "destroy")
cmdFlags.BoolVar(&refresh, "refresh", true, "refresh")
c.addModuleDepthFlag(cmdFlags, &moduleDepth)
cmdFlags.StringVar(&outPath, "out", "", "path")
cmdFlags.IntVar(
&c.Meta.parallelism, "parallelism", DefaultParallelism, "parallelism")
cmdFlags.StringVar(&c.Meta.statePath, "state", "", "path")
cmdFlags.BoolVar(&detailed, "detailed-exitcode", false, "detailed-exitcode")
cmdFlags.BoolVar(&c.Meta.stateLock, "lock", true, "lock state")
cmdFlags.DurationVar(&c.Meta.stateLockTimeout, "lock-timeout", 0, "lock timeout")
cmdFlags.Usage = func() { c.Ui.Error(c.Help()) }
if err := cmdFlags.Parse(args); err != nil {
return 1
}
configPath, err := ModulePath(cmdFlags.Args())
if err != nil {
c.Ui.Error(err.Error())
return 1
}
// Check for user-supplied plugin path
if c.pluginPath, err = c.loadPluginPath(); err != nil {
c.Ui.Error(fmt.Sprintf("Error loading plugin path: %s", err))
return 1
}
// Check if the path is a plan
plan, err := c.Plan(configPath)
if err != nil {
c.Ui.Error(err.Error())
return 1
}
if plan != nil {
// Disable refreshing no matter what since we only want to show the plan
refresh = false
// Set the config path to empty for backend loading
configPath = ""
}
// Load the module if we don't have one yet (not running from plan)
var mod *module.Tree
if plan == nil {
mod, err = c.Module(configPath)
if err != nil {
err = errwrap.Wrapf("Failed to load root config module: {{err}}", err)
c.showDiagnostics(err)
return 1
}
}
var conf *config.Config
if mod != nil {
conf = mod.Config()
}
// Load the backend
b, err := c.Backend(&BackendOpts{
Config: conf,
Plan: plan,
})
if err != nil {
c.Ui.Error(fmt.Sprintf("Failed to load backend: %s", err))
return 1
}
// Build the operation
opReq := c.Operation()
opReq.Destroy = destroy
opReq.Module = mod
opReq.Plan = plan
opReq.PlanRefresh = refresh
opReq.PlanOutPath = outPath
opReq.Type = backend.OperationTypePlan
// Perform the operation
ctx, ctxCancel := context.WithCancel(context.Background())
defer ctxCancel()
op, err := b.Operation(ctx, opReq)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error starting operation: %s", err))
return 1
}
select {
case <-c.ShutdownCh:
// Cancel our context so we can start gracefully exiting
ctxCancel()
// Notify the user
c.Ui.Output(outputInterrupt)
// Still get the result, since there is still one
select {
case <-c.ShutdownCh:
c.Ui.Error(
"Two interrupts received. Exiting immediately")
return 1
case <-op.Done():
}
case <-op.Done():
if err := op.Err; err != nil {
c.showDiagnostics(err)
return 1
}
}
/*
err = terraform.SetDebugInfo(DefaultDataDir)
if err != nil {
c.Ui.Error(err.Error())
return 1
}
*/
if detailed && !op.PlanEmpty {
return 2
}
return 0
}
func (c *PlanCommand) Help() string {
helpText := `
Usage: terraform plan [options] [DIR-OR-PLAN]
Generates an execution plan for Terraform.
This execution plan can be reviewed prior to running apply to get a
sense for what Terraform will do. Optionally, the plan can be saved to
a Terraform plan file, and apply can take this plan file to execute
this plan exactly.
If a saved plan is passed as an argument, this command will output
the saved plan contents. It will not modify the given plan.
Options:
-destroy If set, a plan will be generated to destroy all resources
managed by the given configuration and state.
-detailed-exitcode Return detailed exit codes when the command exits. This
will change the meaning of exit codes to:
0 - Succeeded, diff is empty (no changes)
1 - Errored
2 - Succeeded, there is a diff
-input=true Ask for input for variables if not directly set.
-lock=true Lock the state file when locking is supported.
-lock-timeout=0s Duration to retry a state lock.
-module-depth=n Specifies the depth of modules to show in the output.
This does not affect the plan itself, only the output
shown. By default, this is -1, which will expand all.
-no-color If specified, output won't contain any color.
-out=path Write a plan file to the given path. This can be used as
input to the "apply" command.
-parallelism=n Limit the number of concurrent operations. Defaults to 10.
-refresh=true Update state prior to checking for differences.
-state=statefile Path to a Terraform state file to use to look
up Terraform-managed resources. By default it will
use the state "terraform.tfstate" if it exists.
-target=resource Resource to target. Operation will be limited to this
resource and its dependencies. This flag can be used
multiple times.
-var 'foo=bar' Set a variable in the Terraform configuration. This
flag can be set multiple times.
<|fim▁hole|>`
return strings.TrimSpace(helpText)
}
func (c *PlanCommand) Synopsis() string {
return "Generate and show an execution plan"
}<|fim▁end|> | -var-file=foo Set variables in the Terraform configuration from
a file. If "terraform.tfvars" or any ".auto.tfvars"
files are present, they will be automatically loaded. |
<|file_name|>regions-infer-contravariance-due-to-decl.rs<|end_file_name|><|fim▁begin|>// Test that a type which is contravariant with respect to its region
// parameter yields an error when used in a covariant way.
//
// Note: see variance-regions-*.rs for the tests that check that the
// variance inference works in the first place.
use std::marker;
// This is contravariant with respect to 'a, meaning that
// Contravariant<'foo> <: Contravariant<'static> because
// 'foo <= 'static
struct Contravariant<'a> {<|fim▁hole|> s: &'short isize,
l: &'long isize,
_where:Option<&'short &'long ()>) {
// Test whether Contravariant<'short> <: Contravariant<'long>. Since
// 'short <= 'long, this would be true if the Contravariant type were
// covariant with respect to its parameter 'a.
let _: Contravariant<'long> = c; //~ ERROR E0623
}
fn main() {}<|fim▁end|> | marker: marker::PhantomData<&'a()>
}
fn use_<'short,'long>(c: Contravariant<'short>, |
<|file_name|>console.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"fmt"
"io"
"sort"
"strings"
"github.com/mauricioklein/text-search-engine/ranking"
"github.com/mauricioklein/text-search-engine/report"
)
// QuitSentence defines the sentence, read from the
// input stream, to quit the console
const QuitSentence = ":quit"
// Console defines an instance of the
// interactive console
type Console struct {
Processor ranking.Processor
Reporter report.Reporter
InputStream *bufio.Reader
OutputStream *bufio.Writer
ErrorStream *bufio.Writer
}
// NewConsole creates a new instance of Console
func NewConsole(processor ranking.Processor, reporter report.Reporter, inputStream io.Reader, outputStream io.Writer, errStream io.Writer) Console {
inputBuffer := bufio.NewReader(inputStream)
outputBuffer := bufio.NewWriter(outputStream)
errBuffer := bufio.NewWriter(errStream)
return Console{
Processor: processor,
Reporter: reporter,
InputStream: inputBuffer,
OutputStream: outputBuffer,
ErrorStream: errBuffer,
}
}
// Write writes a string to the console's output stream
func (c Console) Write(line string) {
c.OutputStream.Write([]byte(line))
c.OutputStream.Flush()
}
func (c Console) Error(line string) {
c.ErrorStream.Write([]byte(line))
c.ErrorStream.Flush()
}
// Read reads a string from the console's input stream
func (c Console) Read() (string, error) {
rawInput, err := c.InputStream.ReadString('\n')
if err != nil {
return "", err
}
return strings.Replace(rawInput, "\n", "", -1), nil
}
// Flush flushes the content of output stream
func (c Console) Flush() {
c.OutputStream.Flush()<|fim▁hole|>}
// Run executes and controls the IO of
// the console with the user
func (c Console) Run() {
for {
c.Write("search> ")
userInput, err := c.Read()
if err != nil {
c.Error(fmt.Sprintf("Failed to read input: %s", err))
continue
}
if isStopCondition(userInput) {
break
}
// calculate the ranks
ranks := c.Processor.Calculate(userInput)
// order ranks by score/filename
sort.Sort(
sort.Reverse(
ranking.ByScoreAndName(ranks),
),
)
// get only the top 10
ranks = selectTop(ranks, 10)
// print out the results
for _, rank := range ranks {
c.ReportRank(rank)
}
// flush the output stream
c.Flush()
}
}
// ReportRank reports a given result
func (c Console) ReportRank(rr ranking.RankResult) {
c.Reporter.Report(
c.OutputStream,
rr.File.Name(),
rr.Score,
)
}
// selectTop returns the "n" first ranks,
// or the entire ranks if n > len(ranks)
func selectTop(ranks []ranking.RankResult, n int) []ranking.RankResult {
if n <= len(ranks) {
return ranks[:n]
}
return ranks
}
// isStopCondition checks if the input stream contains
// the console's stop sentence
func isStopCondition(userInput string) bool {
return userInput == QuitSentence
}<|fim▁end|> | c.ErrorStream.Flush() |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
import sys
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need fine tuning.
includefiles = ['windows/libusb-1.0.dll',
('icons/buzzer.png', 'icons/buzzer.png'),
'README.md',
'LICENSE',
'C:\\Windows\\SysWOW64\\msvcr110.dll']
excludes = []
packages = []
buildOptions = {'packages': packages,
'excludes': excludes,
'include_files':includefiles
}
# GUI applications require a different base on Windows (the default is for a
# console application).
base = None
if sys.platform == "win32":
base = "Win32GUI"
executables = [
Executable('pyPardy.py', base=base),
Executable('pyPardyEdit.py', base=base)
]
setup(
name='pyPardy',
#long_description='',
keywords='game jeopardy',
version='0.2',
author='Christian Wichmann',
author_email='[email protected]',
packages=['data', 'gui'],
url='',
license='LICENSE',
description='Jeopardy(tm) game system',
platforms=['any'],
classifiers=[
'Intended Audience :: End Users/Desktop',<|fim▁hole|> 'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Operating System :: OS Independent',
'Natural Language :: English',
'Natural Language :: German',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Games/Entertainment',
],
options=dict(build_exe=buildOptions),
executables=executables, requires=['PyQt4', 'libusb1'],
#data_files=[('libs', 'windows/libusb-1.0.dll'),
# ('icons', 'icons/buzzer.png')],
)<|fim▁end|> | |
<|file_name|>wordcount_debugging.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An example that verifies the counts and includes best practices.
On top of the basic concepts in the wordcount example, this workflow introduces
logging to Cloud Logging, and using assertions in a Dataflow pipeline.
To execute this pipeline locally, specify a local output file or output prefix
on GCS::
--output [YOUR_LOCAL_FILE | gs://YOUR_OUTPUT_PREFIX]
To execute this pipeline using the Google Cloud Dataflow service, specify
pipeline configuration::
--project YOUR_PROJECT_ID
--staging_location gs://YOUR_STAGING_DIRECTORY
--temp_location gs://YOUR_TEMP_DIRECTORY
--job_name YOUR_JOB_NAME
--runner DataflowRunner
and an output prefix on GCS::
--output gs://YOUR_OUTPUT_PREFIX
"""
from __future__ import absolute_import
import argparse
import logging
import re
from past.builtins import unicode
import apache_beam as beam
from apache_beam.io import ReadFromText
from apache_beam.io import WriteToText
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
class FilterTextFn(beam.DoFn):
"""A DoFn that filters for a specific key based on a regular expression."""
def __init__(self, pattern):
super(FilterTextFn, self).__init__()
self.pattern = pattern
# A custom metric can track values in your pipeline as it runs. Those
# values will be available in the monitoring system of the runner used
# to run the pipeline. These metrics below track the number of
# matched and unmatched words.
self.matched_words = Metrics.counter(self.__class__, 'matched_words')
self.umatched_words = Metrics.counter(self.__class__, 'umatched_words')
def process(self, element):
word, _ = element
if re.match(self.pattern, word):
# Log at INFO level each element we match. When executing this pipeline
# using the Dataflow service, these log lines will appear in the Cloud
# Logging UI.
logging.info('Matched %s', word)
self.matched_words.inc()
yield element
else:
# Log at the "DEBUG" level each element that is not matched. Different log
# levels can be used to control the verbosity of logging providing an
# effective mechanism to filter less important information.
# Note currently only "INFO" and higher level logs are emitted to the
# Cloud Logger. This log message will not be visible in the Cloud Logger.
logging.debug('Did not match %s', word)
self.umatched_words.inc()
class CountWords(beam.PTransform):
"""A transform to count the occurrences of each word.<|fim▁hole|> def expand(self, pcoll):
def count_ones(word_ones):
(word, ones) = word_ones
return (word, sum(ones))
return (pcoll
| 'split' >> (beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x))
.with_output_types(unicode))
| 'pair_with_one' >> beam.Map(lambda x: (x, 1))
| 'group' >> beam.GroupByKey()
| 'count' >> beam.Map(count_ones))
def run(argv=None):
"""Runs the debugging wordcount pipeline."""
parser = argparse.ArgumentParser()
parser.add_argument('--input',
dest='input',
default='gs://dataflow-samples/shakespeare/kinglear.txt',
help='Input file to process.')
parser.add_argument('--output',
dest='output',
required=True,
help='Output file to write results to.')
known_args, pipeline_args = parser.parse_known_args(argv)
# We use the save_main_session option because one or more DoFn's in this
# workflow rely on global context (e.g., a module imported at module level).
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = True
with beam.Pipeline(options=pipeline_options) as p:
# Read the text file[pattern] into a PCollection, count the occurrences of
# each word and filter by a list of words.
filtered_words = (
p | 'read' >> ReadFromText(known_args.input)
| CountWords()
| 'FilterText' >> beam.ParDo(FilterTextFn('Flourish|stomach')))
# assert_that is a convenient PTransform that checks a PCollection has an
# expected value. Asserts are best used in unit tests with small data sets
# but is demonstrated here as a teaching tool.
#
# Note assert_that does not provide any output and that successful
# completion of the Pipeline implies that the expectations were met. Learn
# more at https://cloud.google.com/dataflow/pipelines/testing-your-pipeline
# on how to best test your pipeline.
assert_that(
filtered_words, equal_to([('Flourish', 3), ('stomach', 1)]))
# Format the counts into a PCollection of strings and write the output using
# a "Write" transform that has side effects.
# pylint: disable=unused-variable
def format_result(word_count):
(word, count) = word_count
return '%s: %s' % (word, count)
output = (filtered_words
| 'format' >> beam.Map(format_result)
| 'write' >> WriteToText(known_args.output))
if __name__ == '__main__':
# Cloud Logging would contain only logging.INFO and higher level logs logged
# by the root logger. All log statements emitted by the root logger will be
# visible in the Cloud Logging UI. Learn more at
# https://cloud.google.com/logging about the Cloud Logging UI.
#
# You can set the default logging level to a different level when running
# locally.
logging.getLogger().setLevel(logging.INFO)
run()<|fim▁end|> |
A PTransform that converts a PCollection containing lines of text into a
PCollection of (word, count) tuples.
""" |
<|file_name|>operations.go<|end_file_name|><|fim▁begin|>package relay
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// OperationsClient is the use these API to manage Azure Relay resources through Azure Resource Manager.
type OperationsClient struct {
BaseClient
}
// NewOperationsClient creates an instance of the OperationsClient client.
func NewOperationsClient(subscriptionID string) OperationsClient {
return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client.<|fim▁hole|> return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// List lists all available Relay REST API operations.
func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List")
defer func() {
sc := -1
if result.olr.Response.Response != nil {
sc = result.olr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "relay.OperationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.olr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "relay.OperationsClient", "List", resp, "Failure sending request")
return
}
result.olr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "relay.OperationsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) {
const APIVersion = "2017-04-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/providers/Microsoft.Relay/operations"),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationListResult) (result OperationListResult, err error) {
req, err := lastResults.operationListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "relay.OperationsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "relay.OperationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "relay.OperationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx)
return
}<|fim▁end|> | func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { |
<|file_name|>utilitydialog.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2014 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "utilitydialog.h"
#include "ui_helpmessagedialog.h"
#include "bitcoingui.h"
#include "clientmodel.h"
#include "guiutil.h"
#include "clientversion.h"
#include "init.h"
#include <stdio.h>
#include <QCloseEvent>
#include <QLabel>
#include <QRegExp>
#include <QVBoxLayout>
/** "Help message" or "About" dialog box */
HelpMessageDialog::HelpMessageDialog(QWidget *parent, bool about) :
QDialog(parent),
ui(new Ui::HelpMessageDialog)
{
ui->setupUi(this);
GUIUtil::restoreWindowGeometry("nHelpMessageDialogWindow", this->size(), this);
QString version = tr("Fastcoin Core") + " " + tr("version") + " " + QString::fromStdString(FormatFullVersion());
/* On x86 add a bit specifier to the version so that users can distinguish between
* 32 and 64 bit builds. On other architectures, 32/64 bit may be more ambigious.
*/
#if defined(__x86_64__)
version += " " + tr("(%1-bit)").arg(64);
#elif defined(__i386__ )
version += " " + tr("(%1-bit)").arg(32);
#endif
if (about)
{
setWindowTitle(tr("About Fastcoin Core"));
/// HTML-format the license message from the core
QString licenseInfo = QString::fromStdString(LicenseInfo());
QString licenseInfoHTML = licenseInfo;
// Make URLs clickable
QRegExp uri("<(.*)>", Qt::CaseSensitive, QRegExp::RegExp2);
uri.setMinimal(true); // use non-greedy matching
licenseInfoHTML.replace(uri, "<a href=\"\\1\">\\1</a>");
// Replace newlines with HTML breaks
licenseInfoHTML.replace("\n\n", "<br><br>");
ui->helpMessageLabel->setTextFormat(Qt::RichText);
ui->scrollArea->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded);
text = version + "\n" + licenseInfo;
ui->helpMessageLabel->setText(version + "<br><br>" + licenseInfoHTML);
ui->helpMessageLabel->setWordWrap(true);
} else {
setWindowTitle(tr("Command-line options"));
QString header = tr("Usage:") + "\n" +
" fastcoin-qt [" + tr("command-line options") + "] " + "\n";
QString coreOptions = QString::fromStdString(HelpMessage(HMM_BITCOIN_QT));
QString uiOptions = tr("UI options") + ":\n" +
" -choosedatadir " + tr("Choose data directory on startup (default: 0)") + "\n" +
" -lang=<lang> " + tr("Set language, for example \"de_DE\" (default: system locale)") + "\n" +
" -min " + tr("Start minimized") + "\n" +
" -rootcertificates=<file> " + tr("Set SSL root certificates for payment request (default: -system-)") + "\n" +
" -splash " + tr("Show splash screen on startup (default: 1)");
ui->helpMessageLabel->setFont(GUIUtil::bitcoinAddressFont());
text = version + "\n" + header + "\n" + coreOptions + "\n" + uiOptions;
ui->helpMessageLabel->setText(text);
}
}
HelpMessageDialog::~HelpMessageDialog()
{
GUIUtil::saveWindowGeometry("nHelpMessageDialogWindow", this);
delete ui;
}
void HelpMessageDialog::printToConsole()
{
// On other operating systems, the expected action is to print the message to the console.
fprintf(stdout, "%s\n", qPrintable(text));
}
void HelpMessageDialog::showOrPrint()
{
#if defined(WIN32)
// On Windows, show a message box, as there is no stderr/stdout in windowed applications
exec();
#else
// On other operating systems, print help text to console
printToConsole();
#endif
}
void HelpMessageDialog::on_okButton_accepted()
{
close();
}
/** "Shutdown" window */
ShutdownWindow::ShutdownWindow(QWidget *parent, Qt::WindowFlags f):
QWidget(parent, f)
{
QVBoxLayout *layout = new QVBoxLayout();
layout->addWidget(new QLabel(
tr("Fastcoin Core is shutting down...") + "<br /><br />" +
tr("Do not shut down the computer until this window disappears.")));
setLayout(layout);
}
void ShutdownWindow::showShutdownWindow(BitcoinGUI *window)
{
if (!window)
return;
// Show a simple window indicating shutdown status
QWidget *shutdownWindow = new ShutdownWindow();
// We don't hold a direct pointer to the shutdown window after creation, so use
// Qt::WA_DeleteOnClose to make sure that the window will be deleted eventually.
shutdownWindow->setAttribute(Qt::WA_DeleteOnClose);
shutdownWindow->setWindowTitle(window->windowTitle());
// Center shutdown window at where main window was
const QPoint global = window->mapToGlobal(window->rect().center());
shutdownWindow->move(global.x() - shutdownWindow->width() / 2, global.y() - shutdownWindow->height() / 2);
shutdownWindow->show();
}
void ShutdownWindow::closeEvent(QCloseEvent *event)
{
event->ignore();<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>draft.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from xmodule.modulestore import Location, namedtuple_to_son
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore.mongo.base import MongoModuleStore
from pytz import UTC
DRAFT = 'draft'
# Things w/ these categories should never be marked as version='draft'
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
def as_draft(location):
"""
Returns the Location that is the draft for `location`
"""
return Location(location).replace(revision=DRAFT)
def as_published(location):
"""
Returns the Location that is the published version for `location`
"""
return Location(location).replace(revision=None)
def wrap_draft(item):
"""
Sets `item.is_draft` to `True` if the item is a
draft, and `False` otherwise. Sets the item's location to the
non-draft location in either case
"""
setattr(item, 'is_draft', item.location.revision == DRAFT)
item.location = item.location.replace(revision=None)
return item
class DraftModuleStore(MongoModuleStore):
"""
This mixin modifies a modulestore to give it draft semantics.
That is, edits made to units are stored to locations that have the revision DRAFT,
and when reads are made, they first read with revision DRAFT, and then fall back
to the baseline revision only if DRAFT doesn't exist.
This module also includes functionality to promote DRAFT modules (and optionally
their children) to published modules.
"""
def get_item(self, location, depth=0):
"""
Returns an XModuleDescriptor instance for the item at location.
If location.revision is None, returns the item with the most
recent revision
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
location: Something that can be passed to Location
depth (int): An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_item(as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_item(location, depth=depth))
def get_instance(self, course_id, location, depth=0):
"""
Get an instance of this location, with policy for course_id applied.
TODO (vshnayder): this may want to live outside the modulestore eventually
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, location, depth=depth))
def get_items(self, location, course_id=None, depth=0):
"""
Returns a list of XModuleDescriptor instances for the items
that match location. Any element of location that is None is treated
as a wildcard that matches any value
location: Something that can be passed to Location
depth: An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
draft_loc = as_draft(location)
draft_items = super(DraftModuleStore, self).get_items(draft_loc, course_id=course_id, depth=depth)
items = super(DraftModuleStore, self).get_items(location, course_id=course_id, depth=depth)
draft_locs_found = set(item.location.replace(revision=None) for item in draft_items)
non_draft_items = [
item
for item in items
if (item.location.revision != DRAFT
and item.location.replace(revision=None) not in draft_locs_found)
]
return [wrap_draft(item) for item in draft_items + non_draft_items]
def clone_item(self, source, location):
"""
Clone a new item that is a copy of the item at the location `source`
and writes it to `location`
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
return wrap_draft(super(DraftModuleStore, self).clone_item(source, as_draft(location)))
def update_item(self, location, data, allow_not_found=False):
"""
Set the data in the item specified by the location to
data
location: Something that can be passed to Location
data: A nested dictionary of problem data
"""
draft_loc = as_draft(location)
try:
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
except ItemNotFoundError, e:
if not allow_not_found:
raise e
return super(DraftModuleStore, self).update_item(draft_loc, data)
def update_children(self, location, children):
"""
Set the children for the item specified by the location to
children
location: Something that can be passed to Location
children: A list of child item identifiers
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
return super(DraftModuleStore, self).update_children(draft_loc, children)
def update_metadata(self, location, metadata):
"""
Set the metadata for the item specified by the location to
metadata
location: Something that can be passed to Location
metadata: A nested dictionary of module metadata
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
if 'is_draft' in metadata:
del metadata['is_draft']
return super(DraftModuleStore, self).update_metadata(draft_loc, metadata)
def delete_item(self, location, delete_all_versions=False):
"""
Delete an item from this modulestore
location: Something that can be passed to Location
"""
super(DraftModuleStore, self).delete_item(as_draft(location))
if delete_all_versions:
super(DraftModuleStore, self).delete_item(as_published(location))
return
def get_parent_locations(self, location, course_id):
'''Find all locations that are the parents of this location. Needed<|fim▁hole|> for path_to_location().
returns an iterable of things that can be passed to Location.
'''
return super(DraftModuleStore, self).get_parent_locations(location, course_id)
def publish(self, location, published_by_id):
"""
Save a current draft to the underlying modulestore
"""
draft = self.get_item(location)
draft.cms.published_date = datetime.now(UTC)
draft.cms.published_by = published_by_id
super(DraftModuleStore, self).update_item(location, draft._model_data._kvs._data)
super(DraftModuleStore, self).update_children(location, draft._model_data._kvs._children)
super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
self.delete_item(location)
def unpublish(self, location):
"""
Turn the published version into a draft, removing the published version
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
super(DraftModuleStore, self).clone_item(location, as_draft(location))
super(DraftModuleStore, self).delete_item(location)
def _query_children_for_cache_children(self, items):
# first get non-draft in a round-trip
queried_children = []
to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(items)
to_process_dict = {}
for non_draft in to_process_non_drafts:
to_process_dict[Location(non_draft["_id"])] = non_draft
# now query all draft content in another round-trip
query = {
'_id': {'$in': [namedtuple_to_son(as_draft(Location(item))) for item in items]}
}
to_process_drafts = list(self.collection.find(query))
# now we have to go through all drafts and replace the non-draft
# with the draft. This is because the semantics of the DraftStore is to
# always return the draft - if available
for draft in to_process_drafts:
draft_loc = Location(draft["_id"])
draft_as_non_draft_loc = draft_loc.replace(revision=None)
# does non-draft exist in the collection
# if so, replace it
if draft_as_non_draft_loc in to_process_dict:
to_process_dict[draft_as_non_draft_loc] = draft
# convert the dict - which is used for look ups - back into a list
for key, value in to_process_dict.iteritems():
queried_children.append(value)
return queried_children<|fim▁end|> | |
<|file_name|>main.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import {HAMMER_GESTURE_CONFIG} from '@angular/platform-browser';
import {DemoApp} from './demo-app/demo-app';
import {HTTP_PROVIDERS} from '@angular/http';
import {ROUTER_PROVIDERS} from '@angular/router';
import {MdIconRegistry} from './components/icon/icon-registry';
import {OVERLAY_CONTAINER_TOKEN} from './core/overlay/overlay';
import {MdLiveAnnouncer} from './core/live-announcer/live-announcer';
import {provide} from '@angular/core';
import {createOverlayContainer} from './core/overlay/overlay-container';
import {Renderer} from '@angular/core';
import {MdGestureConfig} from './core/gestures/MdGestureConfig';
import 'rxjs/Rx';
bootstrap(DemoApp, [
ROUTER_PROVIDERS,
MdLiveAnnouncer,
provide(OVERLAY_CONTAINER_TOKEN, {useValue: createOverlayContainer()}),
HTTP_PROVIDERS,
MdIconRegistry,
Renderer,
provide(HAMMER_GESTURE_CONFIG, {useClass: MdGestureConfig})
]);<|fim▁end|> | import {bootstrap} from '@angular/platform-browser-dynamic'; |
<|file_name|>GetStatusAndDocs.java<|end_file_name|><|fim▁begin|>/**
* @copyright Copyright (C) DocuSign, Inc. All rights reserved.
*
* This source code is intended only as a supplement to DocuSign SDK
* and/or on-line documentation.
*
* This sample is designed to demonstrate DocuSign features and is not intended
* for production use. Code and policy for a production application must be
* developed to meet the specific data and security requirements of the
* application.
*
* THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
* KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
* PARTICULAR PURPOSE.
*/
package net.docusign.sample;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Enumeration;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.UUID;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import net.docusign.api_3_0.APIServiceSoap;
import net.docusign.api_3_0.ArrayOfString2;
import net.docusign.api_3_0.EnvelopePDF;
import net.docusign.api_3_0.EnvelopeStatusFilter;
import net.docusign.api_3_0.FilteredEnvelopeStatuses;
import net.docusign.api_3_0.RequestRecipientTokenAuthenticationAssertion;
import net.docusign.api_3_0.RequestRecipientTokenAuthenticationAssertionAuthenticationMethod;
import net.docusign.api_3_0.RequestRecipientTokenClientURLs;
/**
* Servlet implementation class GetStatusAndDocs
*/
public class GetStatusAndDocs extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public GetStatusAndDocs() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
request.getSession().setAttribute(Utils.SESSION_EMBEDTOKEN, "");
HttpSession session = request.getSession();
// Make sure we're logged in
if (session.getAttribute(Utils.SESSION_LOGGEDIN) == null ||
session.getAttribute(Utils.SESSION_LOGGEDIN).equals(false)) {
response.sendRedirect(Utils.CONTROLLER_LOGIN);
}
else {
// Do we have envelope IDs in this session?
if (session.getAttribute(Utils.SESSION_ENVELOPEIDS) != null) {
APIServiceSoap api = Utils.getAPI(request);
// Grab all the envelope IDs in this session
ArrayOfString2 envIDs = new ArrayOfString2();
envIDs.getEnvelopeId().addAll((List<String>) session.getAttribute(Utils.SESSION_ENVELOPEIDS));
// Create a filter so we only retrieve these envelope statuses
EnvelopeStatusFilter filter = new EnvelopeStatusFilter();
filter.setAccountId(session.getAttribute(Utils.SESSION_ACCOUNT_ID).toString());
filter.setEnvelopeIds(envIDs);
try {
// Call requestStatusesEx on these envelopes
FilteredEnvelopeStatuses statuses = api.requestStatusesEx(filter);
session.setAttribute(Utils.SESSION_STATUSES,
statuses.getEnvelopeStatuses().getEnvelopeStatus());
} catch (Exception e) {
}
}
response.sendRedirect(Utils.PAGE_GETSTATUS);
}
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// Get the parameter names
Enumeration paramNames = request.getParameterNames();
// Loop through the parameter names
while (paramNames.hasMoreElements()) {
String paramName = (String)paramNames.nextElement();
if (paramName.startsWith(Utils.NAME_STARTSIGNING)) {
// We want to start this user signing
startSigning(paramName, request);<|fim▁hole|> downloadEnvelope(paramName, request, response);
}
}
}
protected void downloadEnvelope(String param, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String eid = param.split("\\+")[1];
// Request the PDF of the envelope
APIServiceSoap api = Utils.getAPI(request);
EnvelopePDF pdf = api.requestPDF(eid);
// Start download of the resulting PDF
byte[] documentBytes = pdf.getPDFBytes();
response.setHeader("Content-Disposition", "attachment;filename=Envelope.pdf");
response.setContentLength(documentBytes.length);
response.setContentType("application/pdf");
response.getOutputStream().write(documentBytes);
return;
}
protected void startSigning(String param, HttpServletRequest request) throws ServletException, IOException {
// Parse out envelope id, email, username, client user id
String[] params = param.split("\\&");
String eid = "", cid = "", uname = "", email = "";
for (int i = 0; i < params.length; i++) {
String[] pair = params[i].split("\\+");
if(pair[0].equals("SignDocEnvelope")) {
eid = pair[1];
} else if (pair[0].equals("Email")) {
email = pair[1];
} else if (pair[0].equals("UserName")) {
uname = pair[1];
} else if (pair[0].equals("CID")) {
cid = pair[1];
}
}
// Request the token
try {
getToken(request, eid, email, uname, cid);
} catch (DatatypeConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected void getToken(HttpServletRequest request, String eid, String email, String username, String CID) throws DatatypeConfigurationException {
String token = null;
// Create the assertion
RequestRecipientTokenAuthenticationAssertion assertion = new RequestRecipientTokenAuthenticationAssertion();
assertion.setAssertionID(UUID.randomUUID().toString());
// wsdl2java translates this to XMLGregorianCalendar
GregorianCalendar gcal = new GregorianCalendar();
gcal.setTime(new Date());
assertion.setAuthenticationInstant(DatatypeFactory.newInstance().newXMLGregorianCalendar(gcal));
assertion.setAuthenticationMethod(RequestRecipientTokenAuthenticationAssertionAuthenticationMethod.PASSWORD);
assertion.setSecurityDomain("DocuSign2010Q1Sample");
// Create the URLs that DocuSign will redirect the iframe to after different events
RequestRecipientTokenClientURLs urls = new RequestRecipientTokenClientURLs();
String urlbase = Utils.getCallbackURL(request, Utils.PAGE_POP);
urls.setOnAccessCodeFailed(urlbase + "?event=AccessCodeFailed&uname=" + username);
urls.setOnCancel(urlbase + "?event=Cancel&uname=" + username);
urls.setOnDecline(urlbase + "?event=Decline&uname=" + username);
urls.setOnException(urlbase + "?event=Exception&uname=" + username);
urls.setOnFaxPending(urlbase + "?event=FaxPending&uname=" + username);
urls.setOnIdCheckFailed(urlbase + "?event=IdCheckFailed&uname=" + username);
urls.setOnSessionTimeout(urlbase + "?event=SessionTimeout&uname=" + username);
urls.setOnTTLExpired(urlbase + "?event=TTLExpired&uname=" + username);
urls.setOnViewingComplete(urlbase + "?event=ViewingComplete&uname=" + username);
urls.setOnSigningComplete(urlbase + "?event=SigningComplete&uname=" + username);
// Get the API service and call RequestRecipientToken for this recipient
APIServiceSoap api = Utils.getAPI(request);
token = api.requestRecipientToken(eid,
CID,
username,
email,
assertion,
urls);
// Set the iframe to the token
request.getSession().setAttribute(Utils.SESSION_EMBEDTOKEN, token);
}
}<|fim▁end|> | response.sendRedirect(Utils.PAGE_GETSTATUS);
} else if (paramName.startsWith(Utils.NAME_DOWNLOAD)) {
// We want to download the specified envelope |
<|file_name|>jquery.app.js<|end_file_name|><|fim▁begin|>/**
* Themes: Velonic Admin theme
*
**/
! function($) {
"use strict";
/**
Sidebar Module
*/
var SideBar = function() {
this.$body = $("body"),
this.$sideBar = $('aside.left-panel'),
this.$navbarToggle = $(".navbar-toggle"),
this.$navbarItem = $("aside.left-panel nav.navigation > ul > li:has(ul) > a")
};
//initilizing
SideBar.prototype.init = function() {
//on toggle side menu bar
var $this = this;
$(document).on('click', '.navbar-toggle', function () {
$this.$sideBar.toggleClass('collapsed');
});
//on menu item clicking
this.$navbarItem.click(function () {
if ($this.$sideBar.hasClass('collapsed') == false || $(window).width() < 768) {
$("aside.left-panel nav.navigation > ul > li > ul").slideUp(300);
$("aside.left-panel nav.navigation > ul > li").removeClass('active');
if (!$(this).next().is(":visible")) {
$(this).next().slideToggle(300, function () {
$("aside.left-panel:not(.collapsed)").getNiceScroll().resize();
});
$(this).closest('li').addClass('active');
}
return false;
}
});
//adding nicescroll to sidebar
if ($.isFunction($.fn.niceScroll)) {
$("aside.left-panel:not(.collapsed)").niceScroll({
cursorcolor: '#8e909a',
cursorborder: '0px solid #fff',
cursoropacitymax: '0.5',
cursorborderradius: '0px'
});
}
},
//exposing the sidebar module
$.SideBar = new SideBar, $.SideBar.Constructor = SideBar
}(window.jQuery),
//portlets
function($) {
"use strict";
/**
Portlet Widget
*/
var Portlet = function() {
this.$body = $("body"),
this.$portletIdentifier = ".portlet",
this.$portletCloser = '.portlet a[data-toggle="remove"]',
this.$portletRefresher = '.portlet a[data-toggle="reload"]'
};
//on init
Portlet.prototype.init = function() {
// Panel closest
var $this = this;<|fim▁hole|> var $portlet_parent = $portlet.parent();
$portlet.remove();
if ($portlet_parent.children().length == 0) {
$portlet_parent.remove();
}
});
// Panel Reload
$(document).on("click",this.$portletRefresher, function (ev) {
ev.preventDefault();
var $portlet = $(this).closest($this.$portletIdentifier);
// This is just a simulation, nothing is going to be reloaded
$portlet.append('<div class="panel-disabled"><div class="loader-1"></div></div>');
var $pd = $portlet.find('.panel-disabled');
setTimeout(function () {
$pd.fadeOut('fast', function () {
$pd.remove();
});
}, 500 + 300 * (Math.random() * 5));
});
},
//
$.Portlet = new Portlet, $.Portlet.Constructor = Portlet
}(window.jQuery),
//main app module
function($) {
"use strict";
var VelonicApp = function() {
this.VERSION = "1.0.0",
this.AUTHOR = "Coderthemes",
this.SUPPORT = "[email protected]",
this.pageScrollElement = "html, body",
this.$body = $("body")
};
//initializing tooltip
VelonicApp.prototype.initTooltipPlugin = function() {
$.fn.tooltip && $('[data-toggle="tooltip"]').tooltip()
},
//initializing popover
VelonicApp.prototype.initPopoverPlugin = function() {
$.fn.popover && $('[data-toggle="popover"]').popover()
},
//initializing nicescroll
VelonicApp.prototype.initNiceScrollPlugin = function() {
//You can change the color of scroll bar here
$.fn.niceScroll && $(".nicescroll").niceScroll({ cursorcolor: '#9d9ea5', cursorborderradius: '0px'});
},
//initializing knob
VelonicApp.prototype.initKnob = function() {
if ($(".knob").length > 0) {
$(".knob").knob();
}
},
//initilizing
VelonicApp.prototype.init = function() {
this.initTooltipPlugin(),
this.initPopoverPlugin(),
this.initNiceScrollPlugin(),
this.initKnob(),
//creating side bar
$.SideBar.init(),
//creating portles
$.Portlet.init();
},
$.VelonicApp = new VelonicApp, $.VelonicApp.Constructor = VelonicApp
}(window.jQuery),
//initializing main application module
function($) {
"use strict";
$.VelonicApp.init()
}(window.jQuery);
/* ==============================================
7.WOW plugin triggers animate.css on scroll
=============================================== */
var wow = new WOW(
{
boxClass: 'wow', // animated element css class (default is wow)
animateClass: 'animated', // animation css class (default is animated)
offset: 50, // distance to the element when triggering the animation (default is 0)
mobile: false // trigger animations on mobile devices (true is default)
}
);
wow.init();<|fim▁end|> | $(document).on("click",this.$portletCloser, function (ev) {
ev.preventDefault();
var $portlet = $(this).closest($this.$portletIdentifier); |
<|file_name|>aws_provider_bag_test.py<|end_file_name|><|fim▁begin|>import pytest
from mock import Mock<|fim▁hole|>
class TestOrchestrateServiceBag(object):
@pytest.fixture
def orchestrate_services(self):
return Mock()
def test_orchestrate_service_bag(self, orchestrate_services):
services = AwsProviderServiceBag(orchestrate_services)
assert services.cloudformation_service is not None
assert services.cloudformation_service.client is not None
assert services.cloudformation_service.cloudformation is not None
assert services.ec2_service is not None
assert services.ec2_service.ec2 is not None
assert services.ecr_service is not None
assert services.ecr_service.client is not None
assert services.eks_service is not None
assert services.eks_service.client is not None
assert services.iam_service is not None
assert services.iam_service.client is not None
assert services.iam_service.iam is not None
assert services.sts_service is not None
assert services.sts_service.client is not None<|fim▁end|> |
from sigopt.orchestrate.services.aws_provider_bag import AwsProviderServiceBag |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8<|fim▁hole|><|fim▁end|> | __author__ = u'Ahmed Şeref GÜNEYSU'
import ui |
<|file_name|>RenderTarget.cpp<|end_file_name|><|fim▁begin|>// Copyright NVIDIA Corporation 2012
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
<|fim▁hole|>// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <dp/ui/RenderTarget.h>
namespace dp
{
namespace ui
{
RenderTarget::~RenderTarget()
{
}
bool RenderTarget::beginRendering()
{
return true;
}
void RenderTarget::endRendering()
{
}
bool RenderTarget::isStereoEnabled() const
{
return false;
}
bool RenderTarget::setStereoTarget( StereoTarget target )
{
return target == LEFT;
}
RenderTarget::StereoTarget RenderTarget::getStereoTarget() const
{
return LEFT;
}
} // namespace ui
} // namespace dp<|fim▁end|> | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None<|fim▁hole|> self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s)
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
self.run_initor(self.initializer)
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def step(self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X})
def function(self,input_tensors,output_tensors):
def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape)<|fim▁end|> |
self._regulization=None |
<|file_name|>de96b7a322c512a247dbf49cad92f67ae6c33fd6.js<|end_file_name|><|fim▁begin|>"use strict";var assert;module.watch(require('assert'),{default(v){assert=v}},0);var unhexArray;module.watch(require('./testutil'),{unhexArray(v){unhexArray=v}},1);var table;module.watch(require('../src/table'),{default(v){table=v}},2);
describe('table.js', function() {
it('should make a ScriptList table', function() {
// https://www.microsoft.com/typography/OTSPEC/chapter2.htm Examples 1 & 2
var expectedData = unhexArray(
'0003 68616E69 0014 6B616E61 0020 6C61746E 002E' + // Example 1 (hani, kana, latn)<|fim▁hole|> '0000 0003 0003 0000 0001 0002' // UrduLangSys
);
assert.deepEqual(new table.ScriptList([
{ tag: 'hani', script: {
defaultLangSys: {
reserved: 0,
reqFeatureIndex: 0xffff,
featureIndexes: [3]
},
langSysRecords: [] } },
{ tag: 'kana', script: {
defaultLangSys: {
reserved: 0,
reqFeatureIndex: 0xffff,
featureIndexes: [3, 4]
},
langSysRecords: [] } },
{ tag: 'latn', script: {
defaultLangSys: {
reserved: 0,
reqFeatureIndex: 0xffff,
featureIndexes: [0, 1, 2]
},
langSysRecords: [{
tag: 'URD ',
langSys: {
reserved: 0,
reqFeatureIndex: 3,
featureIndexes: [0, 1, 2]
}
}]
} },
]).encode(), expectedData);
});
});<|fim▁end|> | '0004 0000 0000 FFFF 0001 0003' + // hani lang sys
'0004 0000 0000 FFFF 0002 0003 0004' + // kana lang sys
'000A 0001 55524420 0016' + // Example 2 for latn
'0000 FFFF 0003 0000 0001 0002' + // DefLangSys |
<|file_name|>youtubech.py<|end_file_name|><|fim▁begin|># coding=UTF-8
from datetime import timedelta
import resource
import time
import urllib
from django.core.exceptions import ObjectDoesNotExist
from snh.models.youtubemodel import *
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
import snhlogger
logger = snhlogger.init_logger(__name__, "youtube.log")
def run_youtube_harvester():
harvester_list = YoutubeHarvester.objects.all()
for harvester in harvester_list:
logger.info(u"The harvester %s is %s" %
(unicode(harvester),
"active" if harvester.is_active else "inactive"))
if harvester.is_active:
run_harvester_v1(harvester)
def sleeper(retry_count):
retry_delay = 1
wait_delay = retry_count*retry_delay
wait_delay = 10 if wait_delay > 10 else wait_delay
time.sleep(wait_delay)
def get_timedelta(dm_time):
ts = datetime.strptime(dm_time,'%Y-%m-%dT%H:%M:%S+0000')
return (datetime.utcnow() - ts).days
def get_existing_user(param):
user = None
try:
user = YTUser.objects.get(**param)
except MultipleObjectsReturned:
user = YTUser.objects.filter(**param)[0]
logger.warning(u"Duplicated user in DB! %s, %s" % (user, user.fid))
except ObjectDoesNotExist:
pass
return user
def update_user(harvester, userid):
snh_user = None
try:
uniuserid = urllib.urlencode({"k":userid.encode('utf-8')}).split("=")[1:][0]
ytuser = harvester.api_call("GetYouTubeUserEntry",{"username":uniuserid})
split_uri = ytuser.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snh_user = get_existing_user({"fid__exact":fid})
if not snh_user:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
fid=fid,
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
snh_user.update_from_youtube(ytuser)
except gdata.service.RequestError, e:
msg = u"RequestError on user %s. Trying to update anyway" % (userid)
logger.info(msg)
if e[0]["status"] == 403 or e[0]["status"] == 400:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
else:
msg = u"RequestError on user %s!!! Force update failed!!!" % (userid)
logger.exception(msg)
except:
msg = u"Cannot update user %s" % (userid)
logger.exception(msg)
return snh_user
def update_users(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
if not snhuser.error_triggered:
uid = snhuser.fid if snhuser.fid else snhuser.username
update_user(harvester, uid)
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)<|fim▁hole|>
split_uri = ytvideo.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snhvideo = None
try:
try:
snhvideo = YTVideo.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhvideo = YTVideo(fid=fid, user=snhuser)
snhvideo.save()
snhvideo.update_from_youtube(snhuser, ytvideo)
except:
msg = u"Cannot update video %s" % (unicode(ytvideo.id.text,'UTF-8'))
logger.exception(msg)
return snhvideo
def update_comment(harvester, snhvideo, ytcomment):
author_name = ytcomment.author[0].name.text
snhuser = update_user(harvester, author_name)
split_uri = ytcomment.id.text.split("/")
fid = split_uri[len(split_uri)-1]
try:
try:
snhcomment = YTComment.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhcomment = YTComment(fid=fid, video=snhvideo)
snhcomment.save()
snhcomment.update_from_youtube(snhvideo, snhuser, ytcomment)
except:
msg = u"Cannot update comment %s" % (unicode(ytcomment.id.text,'UTF-8'))
logger.exception(msg)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.debug(u"Commment updated: comid:%s vidid:%s %s Mem:%s MB" % (snhcomment.fid,snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
return snhcomment
def update_all_comment_helper(harvester, snhvideo, comment_list):
for comment in comment_list.entry:
update_comment(harvester, snhvideo, comment)
get_next_comment_uri = comment_list.GetNextLink().href if comment_list.GetNextLink() else None
return get_next_comment_uri
def update_all_comment(harvester,snhvideo):
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"video_id":snhvideo.fid})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
while get_next_comment_uri:
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"uri":get_next_comment_uri})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Comment harvest completed for this video: %s %s Mem:%s MB" % (snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_all_videos(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
out_of_window = False
if not snhuser.error_triggered:
logger.info(u"Will update user: %s(%s)" % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
get_vid_url = 'http://gdata.youtube.com/feeds/api/users/%s/uploads?' % snhuser.username
while get_vid_url and not out_of_window:
video_list = harvester.api_call("GetYouTubeVideoFeed",{"uri":get_vid_url})
for video in video_list.entry:
published = datetime.strptime(video.published.text,'%Y-%m-%dT%H:%M:%S.000Z')
if published < harvester.harvest_window_to:
snhvideo = update_video(snhuser, video)
update_all_comment(harvester, snhvideo)
if published < harvester.harvest_window_from:
out_of_window = True
break
if not out_of_window:
get_vid_url = video_list.GetNextLink().href if video_list.GetNextLink() else None
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Video harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def run_harvester_v1(harvester):
harvester.start_new_harvest()
try:
start = time.time()
update_users(harvester)
update_all_videos(harvester)
logger.info(u"Results computation complete in %ss" % (time.time() - start))
except:
logger.exception(u"EXCEPTION: %s" % harvester)
finally:
usage = resource.getrusage(resource.RUSAGE_SELF)
harvester.end_current_harvest()
logger.info(u"End: %s Stats:%s Mem:%s MB" % (harvester,unicode(harvester.get_stats()),unicode(getattr(usage, "ru_maxrss")/(1024.0))))<|fim▁end|> | logger.info(u"User harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_video(snhuser, ytvideo): |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// use opentelemetry::trace::Tracer;
fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
// use tonic as grpc layer here.<|fim▁hole|> // // Traced app logic here...
// });
Ok(())
}<|fim▁end|> | // If you want to use grpcio. enable `grpc-sys` feature and use with_grpcio function here.
// let tracer = opentelemetry_otlp::new_pipeline().with_tonic().install_simple()?;
// tracer.in_span("doing_work", |cx| { |
<|file_name|>opening.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from hdf5handler import HDF5Handler
<|fim▁hole|>for i in range(100):
handler.put(i, 'numbers')
handler.close()<|fim▁end|> | handler = HDF5Handler('mydata.hdf5')
handler.open()
|
<|file_name|>slist_traits.hpp<|end_file_name|><|fim▁begin|>// Boost string_algo library slist_traits.hpp header file ---------------------------//
// Copyright Pavol Droba 2002-2003.
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/ for updates, documentation, and revision history.
<|fim▁hole|>#define BOOST_STRING_STD_SLIST_TRAITS_HPP
#include <boost/algorithm/string/config.hpp>
#include <boost/algorithm/string/yes_no_type.hpp>
#include BOOST_SLIST_HEADER
#include <boost/algorithm/string/sequence_traits.hpp>
namespace lslboost {
namespace algorithm {
// SGI's std::slist<> traits -----------------------------------------------//
// stable iterators trait
template<typename T, typename AllocT>
class has_stable_iterators< BOOST_STD_EXTENSION_NAMESPACE::slist<T,AllocT> >
{
public:
#if BOOST_WORKAROUND( __IBMCPP__, <= 600 )
enum { value = true };
#else
BOOST_STATIC_CONSTANT(bool, value=true);
#endif // BOOST_WORKAROUND( __IBMCPP__, <= 600 )
typedef mpl::bool_<has_stable_iterators<T>::value> type;
};
// const time insert trait
template<typename T, typename AllocT>
class has_const_time_insert< BOOST_STD_EXTENSION_NAMESPACE::slist<T,AllocT> >
{
public:
#if BOOST_WORKAROUND( __IBMCPP__, <= 600 )
enum { value = true };
#else
BOOST_STATIC_CONSTANT(bool, value=true);
#endif // BOOST_WORKAROUND( __IBMCPP__, <= 600 )
typedef mpl::bool_<has_const_time_insert<T>::value> type;
};
// const time erase trait
template<typename T, typename AllocT>
class has_const_time_erase< BOOST_STD_EXTENSION_NAMESPACE::slist<T,AllocT> >
{
public:
#if BOOST_WORKAROUND( __IBMCPP__, <= 600 )
enum { value = true };
#else
BOOST_STATIC_CONSTANT(bool, value=true);
#endif // BOOST_WORKAROUND( __IBMCPP__, <= 600 )
typedef mpl::bool_<has_const_time_erase<T>::value> type;
};
} // namespace algorithm
} // namespace lslboost
#endif // BOOST_STRING_STD_LIST_TRAITS_HPP<|fim▁end|> | #ifndef BOOST_STRING_STD_SLIST_TRAITS_HPP |
<|file_name|>test_instance.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import os
from . import spd
# K temps: [0.0, 100.0, 150.0, 200.0, 225.0, 250.0, 275.0, 300.0, 325.0, 350.0, 375.0, 400.0, 425.0, 450.0, 475.0, 500.0, 525.0, 550.0]
# C temps: [273, 373.0, 423.0, 473.0, 498.0, 523.0, 548.0, 573.0, 598.0, 623.0, 648.0, 673.0, 698.0, 723.0, 748.0, 773.0, 798.0, 823.0]
from . import new_lj_thellier_gui_spd as tgs
cwd = os.getcwd()
main_dir = cwd + '/SPD'
calculate = ['int_n', 'frac', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'mdrat', 'maxdev', 'dpal', 'md', 'tail_drat', 'dtr', 'dac', 'DANG']
#calculate = ['int_n', 'frac', 'f', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'z_md', 'q', 'r_sq', 'coeff_det_sq', 'int_mad', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'int_crm', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'drats', 'cdrat', 'mdrat', 'dck', 'maxdev', 'mdev', 'dpal', 'int_ptrm_tail_n', 'md', 'tail_drat', 'dtr', 'dt', 'ac_n', 'dac', 'gmax']
#calculate = ['int_n', 'int_alpha', 'f', 'k', 'drats', 'int_ptrm_tail_n']
#calculate = ['drats']
gui = tgs.Arai_GUI('/magic_measurements.txt', main_dir)
specimens = list(gui.Data.keys())
example = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', calculate)
example.calculate_all_statistics()
PintPars_example = example
def make_specimens(calculate=calculate):
for stat in calculate:
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', [stat])
spec.reqd_stats()
print('---------')
print(calculate)
def many_specimens(calculate=calculate):
from itertools import combinations
c = combinations(calculate, 2)
for combo in c:
print('combo', combo)
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', combo)
spec.reqd_stats()
print('XXXXXXXXXXXXXXX')
#spec.calculate_all_statistics()
SCAT_spec = spd.PintPars(gui.Data, '0238x6011044', 273., 673.) # 0, 400 <|fim▁hole|>SCAT_spec2 = spd.PintPars(gui.Data, '0238x6011044', 273., 698.) # 0, 425
SCAT_spec.York_Regression()
SCAT_spec2.York_Regression()
#new_spec = spd.PintPars(gui.Data, '0238x5721062', 100. + 273., 525. + 273.)
#new_spec.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('/consistency_tests/Yamamoto_Hushi_2008_magic_measurements.txt', cwd)
#thing2 = spd.PintPars(gui2.Data, 'SW01-01A-2', 100. + 273., 480. + 273.)
#thing2 = PintPars(gui.Data, specimens[0], 473., 623.)
#thing2.calculate_all_statistics()
#thing3 = PintPars(gui.Data, specimens[1], 473., 623.)
#thing3.calculate_all_statistics()
#thing4 = PintPars(gui.Data, specimens[2], 473., 623.)
#thing4.calculate_all_statistics()
#thing5 = PintPars(gui.Data, specimens[3], 473., 623.)
#thing5.calculate_all_statistics()
#thing6 = PintPars(gui.Data, specimens[4], 473., 623.)
#thing6.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('new_magic_measurements.txt')
#gui3 = tgs.Arai_GUI('consistency_tests/Bowles_etal_2006_magic_measurements.txt')
#gui4 = tgs.Arai_GUI('consistency_tests/Donadini_etal_2007_magic_measurements.txt')
#gui5 = tgs.Arai_GUI('consistency_tests/Krasa_2000_magic_measurements.txt')
#gui6 = tgs.Arai_GUI('consistency_tests/Muxworthy_etal_2011_magic_measurements.txt')
#gui7 = tgs.Arai_GUI('consistency_tests/Paterson_etal_2010_magic_measurements.txt')
#gui8 = tgs.Arai_GUI('consistency_tests/Selkin_etal_2000_magic_measurements.txt')
#gui10 = tgs.Arai_GUI('consistency_tests/Yamamoto_etal_2003_magic_measurements.txt')<|fim▁end|> | |
<|file_name|>layerwidget_plugin.py<|end_file_name|><|fim▁begin|>###############################################################################
# volumina: volume slicing and editing library
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the Lesser GNU General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# See the files LICENSE.lgpl2 and LICENSE.lgpl3 for full text of the
# GNU Lesser General Public License version 2.1 and 3 respectively.
# This information is also available on the ilastik web site at:
# http://ilastik.org/license/
###############################################################################
from PyQt4.QtDesigner import QPyDesignerCustomWidgetPlugin
from PyQt4.QtGui import QPixmap, QIcon, QColor
from volumina.widgets.layerwidget import LayerWidget
from volumina.layerstack import LayerStackModel, Layer
class PyLayerWidgetPlugin(QPyDesignerCustomWidgetPlugin):
def __init__(self, parent = None):
QPyDesignerCustomWidgetPlugin.__init__(self)
self.initialized = False
def initialize(self, core):
if self.initialized:
return
self.initialized = True
def isInitialized(self):
return self.initialized
def createWidget(self, parent):
model = LayerStackModel()
<|fim▁hole|> o1.name = "Fancy Layer"
o1.opacity = 0.5
model.append(o1)
o2 = Layer()
o2.name = "Some other Layer"
o2.opacity = 0.25
model.append(o2)
o3 = Layer()
o3.name = "Invisible Layer"
o3.opacity = 0.15
o3.visible = False
model.append(o3)
o4 = Layer()
o4.name = "Fancy Layer II"
o4.opacity = 0.95
model.append(o4)
o5 = Layer()
o5.name = "Fancy Layer III"
o5.opacity = 0.65
model.append(o5)
view = LayerWidget(parent, model)
view.updateGeometry()
return view
def name(self):
return "LayerWidget"
def group(self):
return "ilastik widgets"
def icon(self):
return QIcon(QPixmap(16,16))
def toolTip(self):
return ""
def whatsThis(self):
return ""
def isContainer(self):
return False
def domXml(self):
return (
'<widget class="LayerWidget" name=\"layerWidget\">\n'
"</widget>\n"
)
def includeFile(self):
return "volumina.widgets.layerwidget"<|fim▁end|> | o1 = Layer() |
<|file_name|>calculatorService.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package services;
import FareCalculator.Calculate;
import java.util.ArrayList;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.PathParam;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.lang.ClassNotFoundException;
import java.net.URI;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.QueryParam;
import javax.xml.bind.JAXBElement;<|fim▁hole|> *
* @author peppa
*/
@Path("/")
public class calculatorService {
@Context
private UriInfo context;
public calculatorService(){
}
@GET
@Path("/calculate")
@Produces({"application/xml"})
public Response fareCalculator(@DefaultValue("TK") @QueryParam("carrier") String carrier,
@DefaultValue("2012-01-01") @QueryParam("date") String date,
@DefaultValue("ADB") @QueryParam("origCode") String origCode,
@DefaultValue("ESB") @QueryParam("destCode") String destCode,
@DefaultValue("Economy") @QueryParam("fareClass") String fareClass) {
Calculate cal = new Calculate();
FaresType fare = cal.fareCalculate(carrier, date, origCode, destCode, fareClass);
return Response.ok(new JAXBElement<FaresType>(new QName("faresType"), FaresType.class, fare)).build();
}
}<|fim▁end|> | import javax.xml.namespace.QName;
import localstorage.FaresType;
/** |
<|file_name|>migrated_0035.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | function f() { /* infinite */ while (true) { } /* bar */ var each; } |
<|file_name|>Registry.java<|end_file_name|><|fim▁begin|>// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.model.tld;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Maps.toMap;
import static google.registry.config.RegistryConfig.getSingletonCacheRefreshDuration;
import static google.registry.model.common.EntityGroupRoot.getCrossTldKey;
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
import static google.registry.util.CollectionUtils.nullToEmptyImmutableCopy;
import static google.registry.util.DateTimeUtils.END_OF_TIME;
import static google.registry.util.DateTimeUtils.START_OF_TIME;
import static google.registry.util.PreconditionsUtils.checkArgumentNotNull;
import static org.joda.money.CurrencyUnit.USD;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Range;
import com.google.common.net.InternetDomainName;
import com.googlecode.objectify.Key;
import com.googlecode.objectify.annotation.Embed;
import com.googlecode.objectify.annotation.Entity;
import com.googlecode.objectify.annotation.Id;
import com.googlecode.objectify.annotation.Mapify;
import com.googlecode.objectify.annotation.OnSave;
import com.googlecode.objectify.annotation.Parent;
import google.registry.model.Buildable;
import google.registry.model.CreateAutoTimestamp;
import google.registry.model.ImmutableObject;
import google.registry.model.UnsafeSerializable;
import google.registry.model.annotations.InCrossTld;
import google.registry.model.annotations.ReportedOn;
import google.registry.model.common.EntityGroupRoot;
import google.registry.model.common.TimedTransitionProperty;
import google.registry.model.common.TimedTransitionProperty.TimedTransition;
import google.registry.model.domain.fee.BaseFee.FeeType;
import google.registry.model.domain.fee.Fee;
import google.registry.model.replay.DatastoreAndSqlEntity;
import google.registry.model.tld.label.PremiumList;
import google.registry.model.tld.label.ReservedList;
import google.registry.persistence.VKey;
import google.registry.persistence.converter.JodaMoneyType;
import google.registry.util.Idn;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.persistence.Column;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.PostLoad;
import javax.persistence.Transient;
import org.hibernate.annotations.Columns;
import org.hibernate.annotations.Type;
import org.joda.money.CurrencyUnit;
import org.joda.money.Money;
import org.joda.time.DateTime;
import org.joda.time.Duration;
/** Persisted per-TLD configuration data. */
@ReportedOn
@Entity
@javax.persistence.Entity(name = "Tld")
@InCrossTld
public class Registry extends ImmutableObject
implements Buildable, DatastoreAndSqlEntity, UnsafeSerializable {
@Parent @Transient Key<EntityGroupRoot> parent = getCrossTldKey();
/**
* The canonical string representation of the TLD associated with this {@link Registry}, which is
* the standard ASCII for regular TLDs and punycoded ASCII for IDN TLDs.
*/
@Id
@javax.persistence.Id
@Column(name = "tld_name", nullable = false)
String tldStrId;
/**
* A duplicate of {@link #tldStrId}, to simplify BigQuery reporting since the id field becomes
* {@code __key__.name} rather than being exported as a named field.
*/
@Transient String tldStr;
/** Sets the Datastore specific field, tldStr, when the entity is loaded from Cloud SQL */
@PostLoad
void postLoad() {
tldStr = tldStrId;
}
/** The suffix that identifies roids as belonging to this specific tld, e.g. -HOW for .how. */
String roidSuffix;
/** Default values for all the relevant TLD parameters. */
public static final TldState DEFAULT_TLD_STATE = TldState.PREDELEGATION;
public static final boolean DEFAULT_ESCROW_ENABLED = false;
public static final boolean DEFAULT_DNS_PAUSED = false;
public static final Duration DEFAULT_ADD_GRACE_PERIOD = Duration.standardDays(5);
public static final Duration DEFAULT_AUTO_RENEW_GRACE_PERIOD = Duration.standardDays(45);
public static final Duration DEFAULT_REDEMPTION_GRACE_PERIOD = Duration.standardDays(30);
public static final Duration DEFAULT_RENEW_GRACE_PERIOD = Duration.standardDays(5);
public static final Duration DEFAULT_TRANSFER_GRACE_PERIOD = Duration.standardDays(5);
public static final Duration DEFAULT_AUTOMATIC_TRANSFER_LENGTH = Duration.standardDays(5);
public static final Duration DEFAULT_PENDING_DELETE_LENGTH = Duration.standardDays(5);
public static final Duration DEFAULT_ANCHOR_TENANT_ADD_GRACE_PERIOD = Duration.standardDays(30);
public static final CurrencyUnit DEFAULT_CURRENCY = USD;
public static final Money DEFAULT_CREATE_BILLING_COST = Money.of(USD, 8);
public static final Money DEFAULT_EAP_BILLING_COST = Money.of(USD, 0);
public static final Money DEFAULT_RENEW_BILLING_COST = Money.of(USD, 8);
public static final Money DEFAULT_RESTORE_BILLING_COST = Money.of(USD, 100);
public static final Money DEFAULT_SERVER_STATUS_CHANGE_BILLING_COST = Money.of(USD, 20);
public static final Money DEFAULT_REGISTRY_LOCK_OR_UNLOCK_BILLING_COST = Money.of(USD, 0);
/** The type of TLD, which determines things like backups and escrow policy. */
public enum TldType {
/** A real, official TLD. */
REAL,
/** A test TLD, for the prober. */
TEST
}
/**
* The states a TLD can be in at any given point in time. The ordering below is the required
* sequence of states (ignoring {@link #PDT} which is a pseudo-state).
*/
public enum TldState {
/** The state of not yet being delegated to this registry in the root zone by IANA. */
PREDELEGATION,
/**
* The state in which only trademark holders can submit a "create" request. It is identical to
* {@link #GENERAL_AVAILABILITY} in all other respects.
*/
START_DATE_SUNRISE,
/**
* A state in which no domain operations are permitted. Generally used between sunrise and
* general availability. This state is special in that it has no ordering constraints and can
* appear after any phase.
*/
QUIET_PERIOD,
/**
* The steady state of a TLD in which all domain names are available via first-come,
* first-serve.
*/
GENERAL_AVAILABILITY,
/** A "fake" state for use in predelegation testing. Acts like {@link #GENERAL_AVAILABILITY}. */
PDT
}
/**
* A transition to a TLD state at a specific time, for use in a TimedTransitionProperty. Public
* because App Engine's security manager requires this for instantiation via reflection.
*/
@Embed
public static class TldStateTransition extends TimedTransition<TldState> {
/** The TLD state. */
private TldState tldState;
@Override
public TldState getValue() {
return tldState;
}
@Override
protected void setValue(TldState tldState) {
this.tldState = tldState;
}
}
/**
* A transition to a given billing cost at a specific time, for use in a TimedTransitionProperty.
*
* <p>Public because App Engine's security manager requires this for instantiation via reflection.
*/
@Embed
public static class BillingCostTransition extends TimedTransition<Money> {
/** The billing cost value. */
private Money billingCost;
@Override
public Money getValue() {
return billingCost;
}
@Override
protected void setValue(Money billingCost) {
this.billingCost = billingCost;
}
}
/** Returns the registry for a given TLD, throwing if none exists. */
public static Registry get(String tld) {
Registry registry = CACHE.getUnchecked(tld).orElse(null);
if (registry == null) {
throw new RegistryNotFoundException(tld);
}
return registry;
}
/** Returns the registry entities for the given TLD strings, throwing if any don't exist. */
static ImmutableSet<Registry> getAll(Set<String> tlds) {
try {
ImmutableMap<String, Optional<Registry>> registries = CACHE.getAll(tlds);
ImmutableSet<String> missingRegistries =
registries.entrySet().stream()
.filter(e -> !e.getValue().isPresent())
.map(Map.Entry::getKey)
.collect(toImmutableSet());
if (missingRegistries.isEmpty()) {
return registries.values().stream().map(Optional::get).collect(toImmutableSet());
} else {
throw new RegistryNotFoundException(missingRegistries);
}
} catch (ExecutionException e) {
throw new RuntimeException("Unexpected error retrieving TLDs " + tlds, e);
}
}
/**
* Invalidates the cache entry.
*
* <p>This is called automatically when the registry is saved. One should also call it when a
* registry is deleted.
*/
@OnSave
public void invalidateInCache() {
CACHE.invalidate(tldStr);
}
/** A cache that loads the {@link Registry} for a given tld. */
private static final LoadingCache<String, Optional<Registry>> CACHE =
CacheBuilder.newBuilder()
.expireAfterWrite(
java.time.Duration.ofMillis(getSingletonCacheRefreshDuration().getMillis()))
.build(
new CacheLoader<String, Optional<Registry>>() {
@Override
public Optional<Registry> load(final String tld) {
// Enter a transaction-less context briefly; we don't want to enroll every TLD in
// a transaction that might be wrapping this call.
return tm().doTransactionless(() -> tm().loadByKeyIfPresent(createVKey(tld)));
}
@Override
public Map<String, Optional<Registry>> loadAll(Iterable<? extends String> tlds) {
ImmutableMap<String, VKey<Registry>> keysMap =
toMap(ImmutableSet.copyOf(tlds), Registry::createVKey);
Map<VKey<? extends Registry>, Registry> entities =
tm().doTransactionless(() -> tm().loadByKeys(keysMap.values()));
return Maps.transformEntries(
keysMap, (k, v) -> Optional.ofNullable(entities.getOrDefault(v, null)));
}
});
public static VKey<Registry> createVKey(String tld) {
return VKey.create(Registry.class, tld, Key.create(getCrossTldKey(), Registry.class, tld));
}
public static VKey<Registry> createVKey(Key<Registry> key) {
return createVKey(key.getName());
}
/**
* The name of the pricing engine that this TLD uses.
*
* <p>This must be a valid key for the map of pricing engines injected by {@code @Inject
* Map<String, PricingEngine>}.
*
* <p>Note that it used to be the canonical class name, hence the name of this field, but this
* restriction has since been relaxed and it may now be any unique string.
*/
String pricingEngineClassName;
/**
* The set of name(s) of the {@code DnsWriter} implementations that this TLD uses.
*
* <p>There must be at least one entry in this set.
*
* <p>All entries of this list must be valid keys for the map of {@code DnsWriter}s injected by
* {@code @Inject Map<String, DnsWriter>}
*/
@Column(nullable = false)
Set<String> dnsWriters;
/**
* The number of locks we allow at once for {@link google.registry.dns.PublishDnsUpdatesAction}.
*
* <p>This should always be a positive integer- use 1 for TLD-wide locks. All {@link Registry}
* objects have this value default to 1.
*
* <p>WARNING: changing this parameter changes the lock name for subsequent DNS updates, and thus
* invalidates the locking scheme for enqueued DNS publish updates. If the {@link
* google.registry.dns.writer.DnsWriter} you use is not parallel-write tolerant, you must follow
* this procedure to change this value:
*
* <ol>
* <li>Pause the DNS queue via {@link google.registry.tools.UpdateTldCommand}
* <li>Change this number
* <li>Let the Registry caches expire (currently 5 minutes) and drain the DNS publish queue
* <li>Unpause the DNS queue
* </ol>
*
* <p>Failure to do so can result in parallel writes to the {@link
* google.registry.dns.writer.DnsWriter}, which may be dangerous depending on your implementation.
*/
@Column(nullable = false)
int numDnsPublishLocks;
/** Updates an unset numDnsPublishLocks (0) to the standard default of 1. */
void setDefaultNumDnsPublishLocks() {
if (numDnsPublishLocks == 0) {
numDnsPublishLocks = 1;
}
}
/**
* The unicode-aware representation of the TLD associated with this {@link Registry}.
*
* <p>This will be equal to {@link #tldStr} for ASCII TLDs, but will be non-ASCII for IDN TLDs. We
* store this in a field so that it will be retained upon import into BigQuery.
*/
@Column(nullable = false)
String tldUnicode;
/**
* Id of the folder in drive used to public (export) information for this TLD.
*
* <p>This is optional; if not configured, then information won't be exported for this TLD.
*/
@Nullable String driveFolderId;
/** The type of the TLD, whether it's real or for testing. */
@Column(nullable = false)
@Enumerated(EnumType.STRING)
TldType tldType = TldType.REAL;
/**
* Whether to enable invoicing for this TLD.
*
* <p>Note that this boolean is the sole determiner on whether invoices should be generated for a
* TLD. This applies to {@link TldType#TEST} TLDs as well.
*/
@Column(nullable = false)
boolean invoicingEnabled = false;
/**
* A property that transitions to different TldStates at different times. Stored as a list of
* TldStateTransition embedded objects using the @Mapify annotation.
*/
@Column(nullable = false)
@Mapify(TimedTransitionProperty.TimeMapper.class)
TimedTransitionProperty<TldState, TldStateTransition> tldStateTransitions =
TimedTransitionProperty.forMapify(DEFAULT_TLD_STATE, TldStateTransition.class);
/** An automatically managed creation timestamp. */
@Column(nullable = false)
CreateAutoTimestamp creationTime = CreateAutoTimestamp.create(null);
/** The set of reserved list names that are applicable to this registry. */
@Column(name = "reserved_list_names")
Set<String> reservedListNames;
/**
* Retrieves an ImmutableSet of all ReservedLists associated with this TLD.
*
* <p>This set contains only the names of the list and not a reference to the lists. Updates to a
* reserved list in Cloud SQL are saved as a new ReservedList entity. When using the ReservedList
* for a registry, the database should be queried for the entity with this name that has the
* largest revision ID.
*/
public ImmutableSet<String> getReservedListNames() {
return nullToEmptyImmutableCopy(reservedListNames);
}
/**
* The name of the {@link PremiumList} for this TLD, if there is one.
*
* <p>This is only the name of the list and not a reference to the list. Updates to the premium
* list in Cloud SQL are saved as a new PremiumList entity. When using the PremiumList for a
* registry, the database should be queried for the entity with this name that has the largest
* revision ID.
*/
@Column(name = "premium_list_name", nullable = true)
String premiumListName;
/** Should RDE upload a nightly escrow deposit for this TLD? */
@Column(nullable = false)
boolean escrowEnabled = DEFAULT_ESCROW_ENABLED;
/** Whether the pull queue that writes to authoritative DNS is paused for this TLD. */
@Column(nullable = false)
boolean dnsPaused = DEFAULT_DNS_PAUSED;
/**
* The length of the add grace period for this TLD.
*
* <p>Domain deletes are free and effective immediately so long as they take place within this
* amount of time following creation.
*/
@Column(nullable = false)
Duration addGracePeriodLength = DEFAULT_ADD_GRACE_PERIOD;
/** The length of the anchor tenant add grace period for this TLD. */
@Column(nullable = false)
Duration anchorTenantAddGracePeriodLength = DEFAULT_ANCHOR_TENANT_ADD_GRACE_PERIOD;
/** The length of the auto renew grace period for this TLD. */
@Column(nullable = false)
Duration autoRenewGracePeriodLength = DEFAULT_AUTO_RENEW_GRACE_PERIOD;
/** The length of the redemption grace period for this TLD. */
@Column(nullable = false)
Duration redemptionGracePeriodLength = DEFAULT_REDEMPTION_GRACE_PERIOD;
/** The length of the renew grace period for this TLD. */
@Column(nullable = false)
Duration renewGracePeriodLength = DEFAULT_RENEW_GRACE_PERIOD;
/** The length of the transfer grace period for this TLD. */
@Column(nullable = false)
Duration transferGracePeriodLength = DEFAULT_TRANSFER_GRACE_PERIOD;
/** The length of time before a transfer is automatically approved for this TLD. */
@Column(nullable = false)
Duration automaticTransferLength = DEFAULT_AUTOMATIC_TRANSFER_LENGTH;
/** The length of time a domain spends in the non-redeemable pending delete phase for this TLD. */
@Column(nullable = false)
Duration pendingDeleteLength = DEFAULT_PENDING_DELETE_LENGTH;
/** The currency unit for all costs associated with this TLD. */
@Column(nullable = false)
CurrencyUnit currency = DEFAULT_CURRENCY;
/** The per-year billing cost for registering a new domain name. */
@Type(type = JodaMoneyType.TYPE_NAME)
@Columns(
columns = {
@Column(name = "create_billing_cost_amount"),
@Column(name = "create_billing_cost_currency")
})
Money createBillingCost = DEFAULT_CREATE_BILLING_COST;
/** The one-time billing cost for restoring a domain name from the redemption grace period. */
@Type(type = JodaMoneyType.TYPE_NAME)
@Columns(
columns = {
@Column(name = "restore_billing_cost_amount"),
@Column(name = "restore_billing_cost_currency")
})
Money restoreBillingCost = DEFAULT_RESTORE_BILLING_COST;
/** The one-time billing cost for changing the server status (i.e. lock). */
@Type(type = JodaMoneyType.TYPE_NAME)
@Columns(
columns = {
@Column(name = "server_status_change_billing_cost_amount"),
@Column(name = "server_status_change_billing_cost_currency")
})
Money serverStatusChangeBillingCost = DEFAULT_SERVER_STATUS_CHANGE_BILLING_COST;
/** The one-time billing cost for a registry lock/unlock action initiated by a registrar. */
@Type(type = JodaMoneyType.TYPE_NAME)
@Columns(
columns = {
@Column(name = "registry_lock_or_unlock_cost_amount"),
@Column(name = "registry_lock_or_unlock_cost_currency")
})
Money registryLockOrUnlockBillingCost = DEFAULT_REGISTRY_LOCK_OR_UNLOCK_BILLING_COST;
/**
* A property that transitions to different renew billing costs at different times. Stored as a
* list of BillingCostTransition embedded objects using the @Mapify annotation.
*
* <p>A given value of this property represents the per-year billing cost for renewing a domain
* name. This cost is also used to compute costs for transfers, since each transfer includes a
* renewal to ensure transfers have a cost.
*/
@Column(nullable = false)
@Mapify(TimedTransitionProperty.TimeMapper.class)
TimedTransitionProperty<Money, BillingCostTransition> renewBillingCostTransitions =
TimedTransitionProperty.forMapify(DEFAULT_RENEW_BILLING_COST, BillingCostTransition.class);
/** A property that tracks the EAP fee schedule (if any) for the TLD. */
@Column(nullable = false)
@Mapify(TimedTransitionProperty.TimeMapper.class)
TimedTransitionProperty<Money, BillingCostTransition> eapFeeSchedule =
TimedTransitionProperty.forMapify(DEFAULT_EAP_BILLING_COST, BillingCostTransition.class);
/** Marksdb LORDN service username (password is stored in Keyring) */
String lordnUsername;
/** The end of the claims period (at or after this time, claims no longer applies). */
@Column(nullable = false)
DateTime claimsPeriodEnd = END_OF_TIME;
/** An allow list of clients allowed to be used on domains on this TLD (ignored if empty). */
@Nullable Set<String> allowedRegistrantContactIds;
/** An allow list of hosts allowed to be used on domains on this TLD (ignored if empty). */
@Nullable Set<String> allowedFullyQualifiedHostNames;
public String getTldStr() {
return tldStr;
}
public String getRoidSuffix() {
return roidSuffix;
}
/** Retrieve the actual domain name representing the TLD for which this registry operates. */
public InternetDomainName getTld() {
return InternetDomainName.from(tldStr);
}
/** Retrieve the TLD type (real or test). */
public TldType getTldType() {
return tldType;
}
/**
* Retrieve the TLD state at the given time. Defaults to {@link TldState#PREDELEGATION}.
*
* <p>Note that {@link TldState#PDT} TLDs pretend to be in {@link TldState#GENERAL_AVAILABILITY}.
*/
public TldState getTldState(DateTime now) {
TldState state = tldStateTransitions.getValueAtTime(now);
return TldState.PDT.equals(state) ? TldState.GENERAL_AVAILABILITY : state;
}
/** Retrieve whether this TLD is in predelegation testing. */
public boolean isPdt(DateTime now) {
return TldState.PDT.equals(tldStateTransitions.getValueAtTime(now));
}
public DateTime getCreationTime() {
return creationTime.getTimestamp();
}
public boolean getEscrowEnabled() {
return escrowEnabled;
}
public boolean getDnsPaused() {
return dnsPaused;
}
public String getDriveFolderId() {
return driveFolderId;
}
public Duration getAddGracePeriodLength() {
return addGracePeriodLength;
}
public Duration getAutoRenewGracePeriodLength() {
return autoRenewGracePeriodLength;
}
public Duration getRedemptionGracePeriodLength() {
return redemptionGracePeriodLength;
}
public Duration getRenewGracePeriodLength() {
return renewGracePeriodLength;
}
public Duration getTransferGracePeriodLength() {
return transferGracePeriodLength;
}
public Duration getAutomaticTransferLength() {
return automaticTransferLength;
}
public Duration getPendingDeleteLength() {
return pendingDeleteLength;
}
public Duration getAnchorTenantAddGracePeriodLength() {
return anchorTenantAddGracePeriodLength;
}
public Optional<String> getPremiumListName() {
return Optional.ofNullable(premiumListName);
}
public CurrencyUnit getCurrency() {
return currency;
}
/**
* Use <code>PricingEngineProxy.getDomainCreateCost</code> instead of this to find the cost for a
* domain create.
*/
@VisibleForTesting
public Money getStandardCreateCost() {
return createBillingCost;
}
/**
* Returns the add-on cost of a domain restore (the flat registry-wide fee charged in addition to
* one year of renewal for that name).
*/
public Money getStandardRestoreCost() {
return restoreBillingCost;
}
/**
* Use <code>PricingEngineProxy.getDomainRenewCost</code> instead of this to find the cost for a
* domain renewal, and all derived costs (i.e. autorenews, transfers, and the per-domain part of a
* restore cost).
*/
public Money getStandardRenewCost(DateTime now) {
return renewBillingCostTransitions.getValueAtTime(now);
}
/** Returns the cost of a server status change (i.e. lock). */
public Money getServerStatusChangeCost() {
return serverStatusChangeBillingCost;
}
/** Returns the cost of a registry lock/unlock. */
public Money getRegistryLockOrUnlockBillingCost() {
return registryLockOrUnlockBillingCost;
}
public ImmutableSortedMap<DateTime, TldState> getTldStateTransitions() {
return tldStateTransitions.toValueMap();
}
public ImmutableSortedMap<DateTime, Money> getRenewBillingCostTransitions() {
return renewBillingCostTransitions.toValueMap();
}
/** Returns the EAP fee for the registry at the given time. */
public Fee getEapFeeFor(DateTime now) {
ImmutableSortedMap<DateTime, Money> valueMap = getEapFeeScheduleAsMap();
DateTime periodStart = valueMap.floorKey(now);
DateTime periodEnd = valueMap.ceilingKey(now);
// NOTE: assuming END_OF_TIME would never be reached...
Range<DateTime> validPeriod =
Range.closedOpen(
periodStart != null ? periodStart : START_OF_TIME,
periodEnd != null ? periodEnd : END_OF_TIME);
return Fee.create(
eapFeeSchedule.getValueAtTime(now).getAmount(),
FeeType.EAP,
// An EAP fee does not count as premium -- it's a separate one-time fee, independent of
// which the domain is separately considered standard vs premium depending on renewal price.
false,
validPeriod,
validPeriod.upperEndpoint());
}
@VisibleForTesting
public ImmutableSortedMap<DateTime, Money> getEapFeeScheduleAsMap() {
return eapFeeSchedule.toValueMap();
}
public String getLordnUsername() {
return lordnUsername;
}
public DateTime getClaimsPeriodEnd() {
return claimsPeriodEnd;
}
public String getPremiumPricingEngineClassName() {
return pricingEngineClassName;
}
public ImmutableSet<String> getDnsWriters() {
return ImmutableSet.copyOf(dnsWriters);
}
/** Returns the number of simultaneous DNS publish operations we allow at once. */
public int getNumDnsPublishLocks() {
return numDnsPublishLocks;
}
public ImmutableSet<String> getAllowedRegistrantContactIds() {
return nullToEmptyImmutableCopy(allowedRegistrantContactIds);
}
public ImmutableSet<String> getAllowedFullyQualifiedHostNames() {
return nullToEmptyImmutableCopy(allowedFullyQualifiedHostNames);
}
@Override
public Builder asBuilder() {
return new Builder(clone(this));
}
/** A builder for constructing {@link Registry} objects, since they are immutable. */
public static class Builder extends Buildable.Builder<Registry> {
public Builder() {}
private Builder(Registry instance) {
super(instance);
}
public Builder setTldType(TldType tldType) {
getInstance().tldType = tldType;
return this;
}
public Builder setInvoicingEnabled(boolean invoicingEnabled) {
getInstance().invoicingEnabled = invoicingEnabled;
return this;
}
/** Sets the TLD state to transition to the specified states at the specified times. */
public Builder setTldStateTransitions(ImmutableSortedMap<DateTime, TldState> tldStatesMap) {
checkNotNull(tldStatesMap, "TLD states map cannot be null");
// Filter out any entries with QUIET_PERIOD as the value before checking for ordering, since
// that phase is allowed to appear anywhere.<|fim▁hole|> Ordering.natural()
.isStrictlyOrdered(
Iterables.filter(tldStatesMap.values(), not(equalTo(TldState.QUIET_PERIOD)))),
"The TLD states are chronologically out of order");
getInstance().tldStateTransitions =
TimedTransitionProperty.fromValueMap(tldStatesMap, TldStateTransition.class);
return this;
}
public Builder setTldStr(String tldStr) {
checkArgument(tldStr != null, "TLD must not be null");
getInstance().tldStr = tldStr;
return this;
}
public Builder setEscrowEnabled(boolean enabled) {
getInstance().escrowEnabled = enabled;
return this;
}
public Builder setDnsPaused(boolean paused) {
getInstance().dnsPaused = paused;
return this;
}
public Builder setDriveFolderId(String driveFolderId) {
getInstance().driveFolderId = driveFolderId;
return this;
}
public Builder setPremiumPricingEngine(String pricingEngineClass) {
getInstance().pricingEngineClassName = checkArgumentNotNull(pricingEngineClass);
return this;
}
public Builder setDnsWriters(ImmutableSet<String> dnsWriters) {
getInstance().dnsWriters = dnsWriters;
return this;
}
public Builder setNumDnsPublishLocks(int numDnsPublishLocks) {
checkArgument(
numDnsPublishLocks > 0,
"numDnsPublishLocks must be positive when set explicitly (use 1 for TLD-wide locks)");
getInstance().numDnsPublishLocks = numDnsPublishLocks;
return this;
}
public Builder setAddGracePeriodLength(Duration addGracePeriodLength) {
checkArgument(
addGracePeriodLength.isLongerThan(Duration.ZERO),
"addGracePeriodLength must be non-zero");
getInstance().addGracePeriodLength = addGracePeriodLength;
return this;
}
/** Warning! Changing this will affect the billing time of autorenew events in the past. */
public Builder setAutoRenewGracePeriodLength(Duration autoRenewGracePeriodLength) {
checkArgument(
autoRenewGracePeriodLength.isLongerThan(Duration.ZERO),
"autoRenewGracePeriodLength must be non-zero");
getInstance().autoRenewGracePeriodLength = autoRenewGracePeriodLength;
return this;
}
public Builder setRedemptionGracePeriodLength(Duration redemptionGracePeriodLength) {
checkArgument(
redemptionGracePeriodLength.isLongerThan(Duration.ZERO),
"redemptionGracePeriodLength must be non-zero");
getInstance().redemptionGracePeriodLength = redemptionGracePeriodLength;
return this;
}
public Builder setRenewGracePeriodLength(Duration renewGracePeriodLength) {
checkArgument(
renewGracePeriodLength.isLongerThan(Duration.ZERO),
"renewGracePeriodLength must be non-zero");
getInstance().renewGracePeriodLength = renewGracePeriodLength;
return this;
}
public Builder setTransferGracePeriodLength(Duration transferGracePeriodLength) {
checkArgument(
transferGracePeriodLength.isLongerThan(Duration.ZERO),
"transferGracePeriodLength must be non-zero");
getInstance().transferGracePeriodLength = transferGracePeriodLength;
return this;
}
public Builder setAutomaticTransferLength(Duration automaticTransferLength) {
checkArgument(
automaticTransferLength.isLongerThan(Duration.ZERO),
"automaticTransferLength must be non-zero");
getInstance().automaticTransferLength = automaticTransferLength;
return this;
}
public Builder setPendingDeleteLength(Duration pendingDeleteLength) {
checkArgument(
pendingDeleteLength.isLongerThan(Duration.ZERO), "pendingDeleteLength must be non-zero");
getInstance().pendingDeleteLength = pendingDeleteLength;
return this;
}
public Builder setCurrency(CurrencyUnit currency) {
checkArgument(currency != null, "currency must be non-null");
getInstance().currency = currency;
return this;
}
public Builder setCreateBillingCost(Money amount) {
checkArgument(amount.isPositiveOrZero(), "createBillingCost cannot be negative");
getInstance().createBillingCost = amount;
return this;
}
public Builder setReservedListsByName(Set<String> reservedListNames) {
checkArgument(reservedListNames != null, "reservedListNames must not be null");
ImmutableSet.Builder<ReservedList> builder = new ImmutableSet.Builder<>();
for (String reservedListName : reservedListNames) {
// Check for existence of the reserved list and throw an exception if it doesn't exist.
Optional<ReservedList> reservedList = ReservedList.get(reservedListName);
checkArgument(
reservedList.isPresent(),
"Could not find reserved list %s to add to the tld",
reservedListName);
builder.add(reservedList.get());
}
return setReservedLists(builder.build());
}
public Builder setReservedLists(ReservedList... reservedLists) {
return setReservedLists(ImmutableSet.copyOf(reservedLists));
}
public Builder setReservedLists(Set<ReservedList> reservedLists) {
checkArgumentNotNull(reservedLists, "reservedLists must not be null");
ImmutableSet.Builder<String> nameBuilder = new ImmutableSet.Builder<>();
for (ReservedList reservedList : reservedLists) {
nameBuilder.add(reservedList.getName());
}
getInstance().reservedListNames = nameBuilder.build();
return this;
}
public Builder setPremiumList(@Nullable PremiumList premiumList) {
getInstance().premiumListName = (premiumList == null) ? null : premiumList.getName();
return this;
}
public Builder setRestoreBillingCost(Money amount) {
checkArgument(amount.isPositiveOrZero(), "restoreBillingCost cannot be negative");
getInstance().restoreBillingCost = amount;
return this;
}
/**
* Sets the renew billing cost to transition to the specified values at the specified times.
*
* <p>Renew billing costs transitions should only be added at least 5 days (the length of an
* automatic transfer) in advance, to avoid discrepancies between the cost stored with the
* billing event (created when the transfer is requested) and the cost at the time when the
* transfer actually occurs (5 days later).
*/
public Builder setRenewBillingCostTransitions(
ImmutableSortedMap<DateTime, Money> renewCostsMap) {
checkArgumentNotNull(renewCostsMap, "Renew billing costs map cannot be null");
checkArgument(
renewCostsMap.values().stream().allMatch(Money::isPositiveOrZero),
"Renew billing cost cannot be negative");
getInstance().renewBillingCostTransitions =
TimedTransitionProperty.fromValueMap(renewCostsMap, BillingCostTransition.class);
return this;
}
/** Sets the EAP fee schedule for the TLD. */
public Builder setEapFeeSchedule(ImmutableSortedMap<DateTime, Money> eapFeeSchedule) {
checkArgumentNotNull(eapFeeSchedule, "EAP schedule map cannot be null");
checkArgument(
eapFeeSchedule.values().stream().allMatch(Money::isPositiveOrZero),
"EAP fee cannot be negative");
getInstance().eapFeeSchedule =
TimedTransitionProperty.fromValueMap(eapFeeSchedule, BillingCostTransition.class);
return this;
}
private static final Pattern ROID_SUFFIX_PATTERN = Pattern.compile("^[A-Z0-9_]{1,8}$");
public Builder setRoidSuffix(String roidSuffix) {
checkArgument(
ROID_SUFFIX_PATTERN.matcher(roidSuffix).matches(),
"ROID suffix must be in format %s",
ROID_SUFFIX_PATTERN.pattern());
getInstance().roidSuffix = roidSuffix;
return this;
}
public Builder setServerStatusChangeBillingCost(Money amount) {
checkArgument(
amount.isPositiveOrZero(), "Server status change billing cost cannot be negative");
getInstance().serverStatusChangeBillingCost = amount;
return this;
}
public Builder setRegistryLockOrUnlockBillingCost(Money amount) {
checkArgument(amount.isPositiveOrZero(), "Registry lock/unlock cost cannot be negative");
getInstance().registryLockOrUnlockBillingCost = amount;
return this;
}
public Builder setLordnUsername(String username) {
getInstance().lordnUsername = username;
return this;
}
public Builder setClaimsPeriodEnd(DateTime claimsPeriodEnd) {
getInstance().claimsPeriodEnd = checkArgumentNotNull(claimsPeriodEnd);
return this;
}
public Builder setAllowedRegistrantContactIds(
ImmutableSet<String> allowedRegistrantContactIds) {
getInstance().allowedRegistrantContactIds = allowedRegistrantContactIds;
return this;
}
public Builder setAllowedFullyQualifiedHostNames(
ImmutableSet<String> allowedFullyQualifiedHostNames) {
getInstance().allowedFullyQualifiedHostNames = allowedFullyQualifiedHostNames;
return this;
}
@Override
public Registry build() {
final Registry instance = getInstance();
// Pick up the name of the associated TLD from the instance object.
String tldName = instance.tldStr;
checkArgument(tldName != null, "No registry TLD specified");
// Check for canonical form by converting to an InternetDomainName and then back.
checkArgument(
InternetDomainName.isValid(tldName)
&& tldName.equals(InternetDomainName.from(tldName).toString()),
"Cannot create registry for TLD that is not a valid, canonical domain name");
// Check the validity of all TimedTransitionProperties to ensure that they have values for
// START_OF_TIME. The setters above have already checked this for new values, but also check
// here to catch cases where we loaded an invalid TimedTransitionProperty from Datastore and
// cloned it into a new builder, to block re-building a Registry in an invalid state.
instance.tldStateTransitions.checkValidity();
instance.renewBillingCostTransitions.checkValidity();
instance.eapFeeSchedule.checkValidity();
// All costs must be in the expected currency.
// TODO(b/21854155): When we move PremiumList into Datastore, verify its currency too.
checkArgument(
instance.getStandardCreateCost().getCurrencyUnit().equals(instance.currency),
"Create cost must be in the registry's currency");
checkArgument(
instance.getStandardRestoreCost().getCurrencyUnit().equals(instance.currency),
"Restore cost must be in the registry's currency");
checkArgument(
instance.getServerStatusChangeCost().getCurrencyUnit().equals(instance.currency),
"Server status change cost must be in the registry's currency");
checkArgument(
instance.getRegistryLockOrUnlockBillingCost().getCurrencyUnit().equals(instance.currency),
"Registry lock/unlock cost must be in the registry's currency");
Predicate<Money> currencyCheck =
(Money money) -> money.getCurrencyUnit().equals(instance.currency);
checkArgument(
instance.getRenewBillingCostTransitions().values().stream().allMatch(currencyCheck),
"Renew cost must be in the registry's currency");
checkArgument(
instance.eapFeeSchedule.toValueMap().values().stream().allMatch(currencyCheck),
"All EAP fees must be in the registry's currency");
checkArgumentNotNull(
instance.pricingEngineClassName, "All registries must have a configured pricing engine");
checkArgument(
instance.dnsWriters != null && !instance.dnsWriters.isEmpty(),
"At least one DNS writer must be specified."
+ " VoidDnsWriter can be used if DNS writing isn't desired");
// If not set explicitly, numDnsPublishLocks defaults to 1.
instance.setDefaultNumDnsPublishLocks();
checkArgument(
instance.numDnsPublishLocks > 0,
"Number of DNS publish locks must be positive. Use 1 for TLD-wide locks.");
instance.tldStrId = tldName;
instance.tldUnicode = Idn.toUnicode(tldName);
return super.build();
}
}
/** Exception to throw when no Registry entity is found for given TLD string(s). */
public static class RegistryNotFoundException extends RuntimeException {
RegistryNotFoundException(ImmutableSet<String> tlds) {
super("No registry object(s) found for " + Joiner.on(", ").join(tlds));
}
RegistryNotFoundException(String tld) {
this(ImmutableSet.of(tld));
}
}
}<|fim▁end|> | checkArgument( |
<|file_name|>animal.ts<|end_file_name|><|fim▁begin|>export class Animal {
protected name: string;
<|fim▁hole|>
public say() {
this.out(this.name);
}
protected out(str: string) {
console.info(str);
}
}<|fim▁end|> |
constructor(name: string) {
this.name = name;
}
|
<|file_name|>codegen.rs<|end_file_name|><|fim▁begin|>use prolog_parser::ast::*;
use prolog::allocator::*;
use prolog::arithmetic::*;
use prolog::clause_types::*;
use prolog::fixtures::*;
use prolog::forms::*;
use prolog::indexing::*;
use prolog::instructions::*;
use prolog::iterators::*;
use prolog::machine::machine_indices::*;
use prolog::targets::*;
use std::cell::Cell;
use std::collections::{HashMap};
use std::rc::Rc;
use std::vec::Vec;
pub struct CodeGenerator<TermMarker> {
flags: MachineFlags,
marker: TermMarker,
var_count: HashMap<Rc<Var>, usize>,
non_counted_bt: bool
}
pub struct ConjunctInfo<'a> {
pub perm_vs: VariableFixtures<'a>,
pub num_of_chunks: usize,
pub has_deep_cut: bool,
}
impl<'a> ConjunctInfo<'a>
{
fn new(perm_vs: VariableFixtures<'a>, num_of_chunks: usize, has_deep_cut: bool) -> Self {
ConjunctInfo { perm_vs, num_of_chunks, has_deep_cut }
}
fn allocates(&self) -> bool {
self.perm_vs.size() > 0 || self.num_of_chunks > 1 || self.has_deep_cut
}
fn perm_vars(&self) -> usize {
self.perm_vs.size() + self.perm_var_offset()
}
fn perm_var_offset(&self) -> usize {
self.has_deep_cut as usize
}
fn mark_unsafe_vars(&self, mut unsafe_var_marker: UnsafeVarMarker, code: &mut Code) {
// target the last goal of the rule for handling unsafe variables.
// we use this weird logic to find the last goal.
let right_index = if let &Line::Control(_) = code.last().unwrap() {
code.len() - 2
} else {
code.len() - 1
};
let mut index = right_index;
if let Line::Query(_) = &code[right_index] {
while let Line::Query(_) = &code[index] { // index >= 0.
if index == 0 {
break;
} else {
index -= 1;
}
}
if let Line::Query(_) = &code[index] {} else {
index += 1;
}
unsafe_var_marker.record_unsafe_vars(&self.perm_vs);
for line in code.iter_mut() {
if let &mut Line::Query(ref mut query_instr) = line {
unsafe_var_marker.mark_safe_vars(query_instr);
}
}
for index in index .. right_index + 1 {
if let &mut Line::Query(ref mut query_instr) = &mut code[index] {
unsafe_var_marker.mark_unsafe_vars(query_instr);
}
}
}
}
}
impl<'a, TermMarker: Allocator<'a>> CodeGenerator<TermMarker>
{
pub fn new(non_counted_bt: bool, flags: MachineFlags) -> Self {
CodeGenerator { marker: Allocator::new(),
var_count: HashMap::new(),
non_counted_bt,
flags }
}
pub fn take_vars(self) -> AllocVarDict {
self.marker.take_bindings()
}
fn update_var_count<Iter: Iterator<Item=TermRef<'a>>>(&mut self, iter: Iter)
{
for term in iter {
if let TermRef::Var(_, _, var) = term {
let entry = self.var_count.entry(var).or_insert(0);
*entry += 1;
}
}
}
fn get_var_count(&self, var: &'a Var) -> usize {
*self.var_count.get(var).unwrap()
}
fn mark_non_callable(&mut self, name: Rc<Var>, arity: usize, term_loc: GenContext,
vr: &'a Cell<VarReg>, code: &mut Code)
-> RegType
{
match self.marker.bindings().get(&name) {
Some(&VarData::Temp(_, t, _)) if t != 0 => RegType::Temp(t),
Some(&VarData::Perm(p)) if p != 0 => RegType::Perm(p),
_ => {
let mut target = Vec::new();
self.marker.reset_arg(arity);
self.marker.mark_var(name, Level::Shallow, vr, term_loc, &mut target);
if !target.is_empty() {
for query_instr in target {
code.push(Line::Query(query_instr));
}
}
vr.get().norm()
}
}
}
fn add_or_increment_void_instr<Target>(target: &mut Vec<Target>)
where Target: CompilationTarget<'a>
{
if let Some(ref mut instr) = target.last_mut() {
if Target::is_void_instr(&*instr) {
Target::incr_void_instr(instr);
return;
}
}
target.push(Target::to_void(1));
}
fn subterm_to_instr<Target>(&mut self,
subterm: &'a Term,
term_loc: GenContext,
is_exposed: bool,
target: &mut Vec<Target>)
where Target: CompilationTarget<'a>
{
match subterm {
&Term::AnonVar if is_exposed =>
self.marker.mark_anon_var(Level::Deep, term_loc, target),
&Term::AnonVar =>
Self::add_or_increment_void_instr(target),
&Term::Cons(ref cell, _, _) | &Term::Clause(ref cell, _, _, _) => {
self.marker.mark_non_var(Level::Deep, term_loc, cell, target);
target.push(Target::clause_arg_to_instr(cell.get()));
},
&Term::Constant(_, ref constant) =>
target.push(Target::constant_subterm(constant.clone())),
&Term::Var(ref cell, ref var) =>
if is_exposed || self.get_var_count(var) > 1 {
self.marker.mark_var(var.clone(), Level::Deep, cell, term_loc, target);
} else {
Self::add_or_increment_void_instr(target);
}
};
}
fn compile_target<Target, Iter>(&mut self, iter: Iter, term_loc: GenContext, is_exposed: bool)
-> Vec<Target>
where Target: CompilationTarget<'a>, Iter: Iterator<Item=TermRef<'a>>
{
let mut target = Vec::new();
for term in iter {
match term {
TermRef::Clause(lvl, cell, ct, terms) => {
self.marker.mark_non_var(lvl, term_loc, cell, &mut target);
target.push(Target::to_structure(ct, terms.len(), cell.get()));
for subterm in terms {
self.subterm_to_instr(subterm.as_ref(), term_loc, is_exposed, &mut target);
}
},
TermRef::Cons(lvl, cell, head, tail) => {
self.marker.mark_non_var(lvl, term_loc, cell, &mut target);
target.push(Target::to_list(lvl, cell.get()));
self.subterm_to_instr(head, term_loc, is_exposed, &mut target);
self.subterm_to_instr(tail, term_loc, is_exposed, &mut target);
},
TermRef::Constant(lvl @ Level::Shallow, cell, constant) => {
self.marker.mark_non_var(lvl, term_loc, cell, &mut target);
target.push(Target::to_constant(lvl, constant.clone(), cell.get()));
},
TermRef::AnonVar(lvl @ Level::Shallow) =>
if let GenContext::Head = term_loc {
self.marker.advance_arg();
} else {
self.marker.mark_anon_var(lvl, term_loc, &mut target);
},
TermRef::Var(lvl @ Level::Shallow, cell, ref var) if var.as_str() == "!" => {
if self.marker.is_unbound(var.clone()) {
if term_loc != GenContext::Head {
self.marker.mark_reserved_var(var.clone(), lvl, cell, term_loc,
&mut target, perm_v!(1), false);
continue;
}
}
self.marker.mark_var(var.clone(), lvl, cell, term_loc, &mut target);
},
TermRef::Var(lvl @ Level::Shallow, cell, var) =>
self.marker.mark_var(var.clone(), lvl, cell, term_loc, &mut target),
_ => {}
};
}
target
}
fn collect_var_data(&mut self, mut iter: ChunkedIterator<'a>) -> ConjunctInfo<'a>
{
let mut vs = VariableFixtures::new();
while let Some((chunk_num, lt_arity, chunked_terms)) = iter.next() {
for (i, chunked_term) in chunked_terms.iter().enumerate() {
let term_loc = match chunked_term {
&ChunkedTerm::HeadClause(..) => GenContext::Head,
&ChunkedTerm::BodyTerm(_) => if i < chunked_terms.len() - 1 {
GenContext::Mid(chunk_num)
} else {
GenContext::Last(chunk_num)
}
};
self.update_var_count(chunked_term.post_order_iter());
vs.mark_vars_in_chunk(chunked_term.post_order_iter(), lt_arity, term_loc);
}
}
let num_of_chunks = iter.chunk_num;
let has_deep_cut = iter.encountered_deep_cut();
vs.populate_restricting_sets();
vs.set_perm_vals(has_deep_cut);
let vs = self.marker.drain_var_data(vs);
ConjunctInfo::new(vs, num_of_chunks, has_deep_cut)
}
fn add_conditional_call(code: &mut Code, qt: &QueryTerm, pvs: usize)
{
match qt {
&QueryTerm::Jump(ref vars) =>
code.push(jmp_call!(vars.len(), 0, pvs)),
&QueryTerm::Clause(_, ref ct, ref terms, true) =>
code.push(call_clause_by_default!(ct.clone(), terms.len(), pvs)),
&QueryTerm::Clause(_, ref ct, ref terms, false) =>
code.push(call_clause!(ct.clone(), terms.len(), pvs)),
_ => {}
}
}
fn lco(code: &mut Code) -> usize
{
let mut dealloc_index = code.len() - 1;
match code.last_mut() {
Some(&mut Line::Control(ref mut ctrl)) =>
match ctrl {
&mut ControlInstruction::CallClause(_, _, _, ref mut last_call, _) =>
*last_call = true,
&mut ControlInstruction::JmpBy(_, _, _, ref mut last_call) =>
*last_call = true,
&mut ControlInstruction::Proceed => {},
_ => dealloc_index += 1
},
Some(&mut Line::Cut(CutInstruction::Cut(_))) =>
dealloc_index += 1,
_ => {}
};
dealloc_index
}
fn compile_inlined(&mut self, ct: &InlinedClauseType, terms: &'a Vec<Box<Term>>,
term_loc: GenContext, code: &mut Code)
-> Result<(), ParserError>
{
match ct {
&InlinedClauseType::CompareNumber(cmp, ..) => {
if let &Term::Var(ref vr, ref name) = terms[0].as_ref() {
self.mark_non_callable(name.clone(), 2, term_loc, vr, code);
}
if let &Term::Var(ref vr, ref name) = terms[1].as_ref() {
self.mark_non_callable(name.clone(), 2, term_loc, vr, code);
}
let (mut lcode, at_1) = self.call_arith_eval(terms[0].as_ref(), 1)?;
let (mut rcode, at_2) = self.call_arith_eval(terms[1].as_ref(), 2)?;
code.append(&mut lcode);
code.append(&mut rcode);
code.push(compare_number_instr!(cmp,
at_1.unwrap_or(interm!(1)),
at_2.unwrap_or(interm!(2))));
},
&InlinedClauseType::IsAtom(..) =>
match terms[0].as_ref() {
&Term::Constant(_, Constant::Char(_))
| &Term::Constant(_, Constant::EmptyList)
| &Term::Constant(_, Constant::Atom(..)) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_atom!(r));
}
_ => {
code.push(fail!());
}
},
&InlinedClauseType::IsAtomic(..) =>
match terms[0].as_ref() {
&Term::AnonVar | &Term::Clause(..) | &Term::Cons(..) => {
code.push(fail!());
},
&Term::Constant(..) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_atomic!(r));
}
},
&InlinedClauseType::IsCompound(..) =>
match terms[0].as_ref() {
&Term::Clause(..) | &Term::Cons(..) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_compound!(r));
},
_ => {
code.push(fail!());
}
},
&InlinedClauseType::IsRational(..) =>
match terms[0].as_ref() {
&Term::Constant(_, Constant::Rational(_)) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_rational!(r));
},
_ => {
code.push(fail!());
}
},
&InlinedClauseType::IsFloat(..) =>
match terms[0].as_ref() {
&Term::Constant(_, Constant::Float(_)) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_float!(r));
},
_ => {
code.push(fail!());
}
},
&InlinedClauseType::IsString(..) =>
match terms[0].as_ref() {
&Term::Constant(_, Constant::String(_)) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_string!(r));
},
_ => {
code.push(fail!());
}
},
&InlinedClauseType::IsNonVar(..) =>
match terms[0].as_ref() {
&Term::AnonVar => {
code.push(fail!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_nonvar!(r));
},
_ => {
code.push(succeed!());
}
},
&InlinedClauseType::IsInteger(..) =>
match terms[0].as_ref() {
&Term::Constant(_, Constant::CharCode(_))
| &Term::Constant(_, Constant::Integer(_)) => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_integer!(r));
},
_ => {
code.push(fail!());
},
},
&InlinedClauseType::IsVar(..) =>
match terms[0].as_ref() {
&Term::Constant(..) | &Term::Clause(..) | &Term::Cons(..) => {
code.push(fail!());
},
&Term::AnonVar => {
code.push(succeed!());
},
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_var!(r));
}
},
&InlinedClauseType::IsPartialString(..) =>
match terms[0].as_ref() {
&Term::Var(ref vr, ref name) => {
let r = self.mark_non_callable(name.clone(), 1, term_loc, vr, code);
code.push(is_partial_string!(r));
},
_ => code.push(fail!())
}
}
Ok(())
}
fn call_arith_eval(&self, term: &'a Term, target_int: usize) -> Result<ArithCont, ArithmeticError>
{
let mut evaluator = ArithmeticEvaluator::new(self.marker.bindings(), target_int);
evaluator.eval(term)
}
fn compile_is_call(&mut self, terms: &'a Vec<Box<Term>>, code: &mut Code,
term_loc: GenContext, use_default_call_policy: bool)
-> Result<(), ParserError>
{
let (mut acode, at) = self.call_arith_eval(terms[1].as_ref(), 1)?;
code.append(&mut acode);
Ok(match terms[0].as_ref() {
&Term::Var(ref vr, ref name) => {
let mut target = vec![];
self.marker.reset_arg(2);
self.marker.mark_var(name.clone(), Level::Shallow, vr,
term_loc, &mut target);
if !target.is_empty() {
code.extend(target.into_iter().map(Line::Query));
}
if use_default_call_policy {
code.push(is_call_by_default!(temp_v!(1), at.unwrap_or(interm!(1))))
} else {
code.push(is_call!(temp_v!(1), at.unwrap_or(interm!(1))))
}
},
&Term::Constant(_, ref c @ Constant::Integer(_)) => {
code.push(Line::Query(put_constant!(Level::Shallow, c.clone(), temp_v!(1))));
if use_default_call_policy {
code.push(is_call_by_default!(temp_v!(1), at.unwrap_or(interm!(1))))
} else {
code.push(is_call!(temp_v!(1), at.unwrap_or(interm!(1))))
}
},
&Term::Constant(_, ref c @ Constant::Float(_)) => {<|fim▁hole|>
if use_default_call_policy {
code.push(is_call_by_default!(temp_v!(1), at.unwrap_or(interm!(1))))
} else {
code.push(is_call!(temp_v!(1), at.unwrap_or(interm!(1))))
}
},
&Term::Constant(_, ref c @ Constant::Rational(_)) => {
code.push(Line::Query(put_constant!(Level::Shallow, c.clone(), temp_v!(1))));
if use_default_call_policy {
code.push(is_call_by_default!(temp_v!(1), at.unwrap_or(interm!(1))))
} else {
code.push(is_call!(temp_v!(1), at.unwrap_or(interm!(1))))
}
},
_ => code.push(fail!())
})
}
#[inline]
fn compile_unblocked_cut(&mut self, code: &mut Code, cell: &'a Cell<VarReg>)
{
let r = self.marker.get(Rc::new(String::from("!")));
cell.set(VarReg::Norm(r));
code.push(set_cp!(cell.get().norm()));
}
fn compile_get_level_and_unify(&mut self, code: &mut Code, cell: &'a Cell<VarReg>,
var: Rc<Var>, term_loc: GenContext)
{
let mut target = Vec::new();
self.marker.reset_arg(1);
self.marker.mark_var(var, Level::Shallow, cell, term_loc, &mut target);
if !target.is_empty() {
code.extend(target.into_iter().map(|query_instr| Line::Query(query_instr)));
}
code.push(get_level_and_unify!(cell.get().norm()));
}
fn compile_seq(&mut self, iter: ChunkedIterator<'a>, conjunct_info: &ConjunctInfo<'a>,
code: &mut Code, is_exposed: bool)
-> Result<(), ParserError>
{
for (chunk_num, _, terms) in iter.rule_body_iter() {
for (i, term) in terms.iter().enumerate() {
let term_loc = if i + 1 < terms.len() {
GenContext::Mid(chunk_num)
} else {
GenContext::Last(chunk_num)
};
match *term {
&QueryTerm::GetLevelAndUnify(ref cell, ref var) =>
self.compile_get_level_and_unify(code, cell, var.clone(), term_loc),
&QueryTerm::UnblockedCut(ref cell) =>
self.compile_unblocked_cut(code, cell),
&QueryTerm::BlockedCut =>
code.push(if chunk_num == 0 {
Line::Cut(CutInstruction::NeckCut)
} else {
Line::Cut(CutInstruction::Cut(perm_v!(1)))
}),
&QueryTerm::Clause(_, ClauseType::BuiltIn(BuiltInClauseType::Is(..)),
ref terms, use_default_call_policy)
=> self.compile_is_call(terms, code, term_loc, use_default_call_policy)?,
&QueryTerm::Clause(_, ClauseType::Inlined(ref ct), ref terms, _)
=> self.compile_inlined(ct, terms, term_loc, code)?,
_ => {
let num_perm_vars = if chunk_num == 0 {
conjunct_info.perm_vars()
} else {
conjunct_info.perm_vs.vars_above_threshold(i + 1)
};
self.compile_query_line(term, term_loc, code, num_perm_vars, is_exposed);
},
}
}
self.marker.reset_contents();
}
Ok(())
}
fn compile_seq_prelude(&mut self, conjunct_info: &ConjunctInfo, body: &mut Code)
{
if conjunct_info.allocates() {
let perm_vars = conjunct_info.perm_vars();
body.push(Line::Control(ControlInstruction::Allocate(perm_vars)));
if conjunct_info.has_deep_cut {
body.push(Line::Cut(CutInstruction::GetLevel(perm_v!(1))));
}
}
}
fn compile_cleanup(code: &mut Code, conjunct_info: &ConjunctInfo, toc: &'a QueryTerm)
{
// add a proceed to bookend any trailing cuts.
match toc {
&QueryTerm::BlockedCut | &QueryTerm::UnblockedCut(..) => code.push(proceed!()),
&QueryTerm::Clause(_, ClauseType::Inlined(..), ..) => code.push(proceed!()),
_ => {}
};
// perform lco.
let dealloc_index = Self::lco(code);
if conjunct_info.allocates() {
code.insert(dealloc_index, Line::Control(ControlInstruction::Deallocate));
}
}
pub fn compile_rule<'b: 'a>(&mut self, rule: &'b Rule) -> Result<Code, ParserError>
{
let iter = ChunkedIterator::from_rule(rule);
let conjunct_info = self.collect_var_data(iter);
let &Rule { head: (_, ref args, ref p1), ref clauses } = rule;
let mut code = Vec::new();
self.marker.reset_at_head(args);
self.compile_seq_prelude(&conjunct_info, &mut code);
let iter = FactIterator::from_rule_head_clause(args);
let mut fact = self.compile_target(iter, GenContext::Head, false);
let mut unsafe_var_marker = UnsafeVarMarker::new();
if !fact.is_empty() {
unsafe_var_marker = self.mark_unsafe_fact_vars(&mut fact);
for fact_instr in fact {
code.push(Line::Fact(fact_instr));
}
}
let iter = ChunkedIterator::from_rule_body(p1, clauses);
try!(self.compile_seq(iter, &conjunct_info, &mut code, false));
if conjunct_info.allocates() {
conjunct_info.mark_unsafe_vars(unsafe_var_marker, &mut code);
}
Self::compile_cleanup(&mut code, &conjunct_info, clauses.last().unwrap_or(p1));
Ok(code)
}
fn mark_unsafe_fact_vars(&self, fact: &mut CompiledFact) -> UnsafeVarMarker
{
let mut unsafe_vars = HashMap::new();
for var_status in self.marker.bindings().values() {
unsafe_vars.insert(var_status.as_reg_type(), false);
}
for fact_instr in fact.iter_mut() {
match fact_instr {
&mut FactInstruction::UnifyValue(reg) =>
if let Some(found) = unsafe_vars.get_mut(®) {
if !*found {
*found = true;
*fact_instr = FactInstruction::UnifyLocalValue(reg);
}
},
&mut FactInstruction::UnifyVariable(reg) =>
if let Some(found) = unsafe_vars.get_mut(®) {
*found = true;
},
_ => {}
};
}
UnsafeVarMarker { unsafe_vars }
}
pub fn compile_fact<'b: 'a>(&mut self, term: &'b Term) -> Code
{
self.update_var_count(post_order_iter(term));
let mut vs = VariableFixtures::new();
vs.mark_vars_in_chunk(post_order_iter(term), term.arity(), GenContext::Head);
vs.populate_restricting_sets();
self.marker.drain_var_data(vs);
let mut code = Vec::new();
if let &Term::Clause(_, _, ref args, _) = term {
self.marker.reset_at_head(args);
let iter = FactInstruction::iter(term);
let mut compiled_fact = self.compile_target(iter, GenContext::Head, false);
self.mark_unsafe_fact_vars(&mut compiled_fact);
if !compiled_fact.is_empty() {
for fact_instr in compiled_fact {
code.push(Line::Fact(fact_instr));
}
}
}
code.push(proceed!());
code
}
fn compile_query_line(&mut self, term: &'a QueryTerm, term_loc: GenContext,
code: &mut Code, num_perm_vars_left: usize, is_exposed: bool)
{
self.marker.reset_arg(term.arity());
let iter = query_term_post_order_iter(term);
let query = self.compile_target(iter, term_loc, is_exposed);
if !query.is_empty() {
for query_instr in query {
code.push(Line::Query(query_instr));
}
}
Self::add_conditional_call(code, term, num_perm_vars_left);
}
pub fn compile_query(&mut self, query: &'a Vec<QueryTerm>) -> Result<Code, ParserError>
{
let iter = ChunkedIterator::from_term_sequence(query);
let conjunct_info = self.collect_var_data(iter);
let mut code = Vec::new();
self.compile_seq_prelude(&conjunct_info, &mut code);
let iter = ChunkedIterator::from_term_sequence(query);
try!(self.compile_seq(iter, &conjunct_info, &mut code, true));
if conjunct_info.allocates() {
conjunct_info.mark_unsafe_vars(UnsafeVarMarker::new(), &mut code);
}
if let Some(query_term) = query.last() {
Self::compile_cleanup(&mut code, &conjunct_info, query_term);
}
Ok(code)
}
fn split_predicate(clauses: &Vec<PredicateClause>) -> Vec<(usize, usize)>
{
let mut subseqs = Vec::new();
let mut left_index = 0;
for (right_index, clause) in clauses.iter().enumerate() {
match clause.first_arg() {
Some(&Term::Var(_, _)) | Some(&Term::AnonVar) => {
if left_index < right_index {
subseqs.push((left_index, right_index));
}
subseqs.push((right_index, right_index + 1));
left_index = right_index + 1;
},
_ => {}
}
}
if left_index < clauses.len() {
subseqs.push((left_index, clauses.len()));
}
subseqs
}
fn trust_me(&self) -> ChoiceInstruction {
if self.non_counted_bt {
ChoiceInstruction::DefaultTrustMe
} else {
ChoiceInstruction::TrustMe
}
}
fn retry_me_else(&self, offset: usize) -> ChoiceInstruction {
if self.non_counted_bt {
ChoiceInstruction::DefaultRetryMeElse(offset)
} else {
ChoiceInstruction::RetryMeElse(offset)
}
}
fn compile_pred_subseq<'b: 'a>(&mut self, clauses: &'b [PredicateClause])
-> Result<Code, ParserError>
{
let mut code_body = Vec::new();
let mut code_offsets = CodeOffsets::new(self.flags);
let num_clauses = clauses.len();
for (i, clause) in clauses.iter().enumerate() {
self.marker.reset();
let mut clause_code = match clause {
&PredicateClause::Fact(ref fact) =>
self.compile_fact(fact),
&PredicateClause::Rule(ref rule) =>
try!(self.compile_rule(rule))
};
if num_clauses > 1 {
let choice = match i {
0 => ChoiceInstruction::TryMeElse(clause_code.len() + 1),
_ if i == num_clauses - 1 => self.trust_me(),
_ => self.retry_me_else(clause_code.len() + 1)
};
code_body.push(Line::Choice(choice));
}
clause.first_arg().map(|arg| {
let index = code_body.len();
code_offsets.index_term(arg, index);
});
code_body.append(&mut clause_code);
}
let mut code = Vec::new();
code_offsets.add_indices(&mut code, code_body);
Ok(code)
}
pub fn compile_predicate<'b: 'a>(&mut self, clauses: &'b Vec<PredicateClause>)
-> Result<Code, ParserError>
{
let mut code = Vec::new();
let split_pred = Self::split_predicate(&clauses);
let multi_seq = split_pred.len() > 1;
for (l, r) in split_pred {
let mut code_segment = try!(self.compile_pred_subseq(&clauses[l .. r]));
if multi_seq {
let choice = match l {
0 => ChoiceInstruction::TryMeElse(code_segment.len() + 1),
_ if r == clauses.len() => self.trust_me(),
_ => self.retry_me_else(code_segment.len() + 1)
};
code.push(Line::Choice(choice));
}
code.append(&mut code_segment);
}
Ok(code)
}
}<|fim▁end|> | code.push(Line::Query(put_constant!(Level::Shallow, c.clone(), temp_v!(1)))); |
<|file_name|>download.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { NavController, NavParams, LoadingController, ToastController } from 'ionic-angular';
import { FilePath, Transfer } from 'ionic-native';
import { File } from '@ionic-native/file';
import { Http, Headers } from '@angular/http';
/*
Generated class for the Download page.
See http://ionicframework.com/docs/v2/components/#navigation for more info on
Ionic pages and navigation.
*/
@Component({
selector: 'page-download',
templateUrl: 'download.html'
})
export class DownloadPage {
items: any;
hostname: string;
filelist: any;
loading: any;
constructor(public navCtrl: NavController, public navParams: NavParams, public http: Http,
public loadingCtrl: LoadingController, public toastCtrl: ToastController, public file: File) {
this.http = http;
this.http.get("assets/server.json")
.subscribe(data =>{
this.items = JSON.parse(data['_body']);//get ip from server.json
this.hostname = this.items.ip; //put ip into hostname
this.http.get(this.hostname + 'download_dir')
.subscribe(data =>{
this.filelist = JSON.parse(data['_body']);
console.log(this.filelist);
})
});
}
presentToast(text) {
let toast = this.toastCtrl.create({
message: text,
duration: 3000,
position: 'bottom'
});
toast.present();
}
download(file){
console.log(file);
const fileTransfer = new Transfer();
var uri= encodeURI(this.hostname + 'download/' + file);
this.loading = this.loadingCtrl.create({
content: 'Downloading...',
});
this.loading.present();
fileTransfer.download(uri, this.file.externalRootDirectory+ '/Download/' + file).then((entry) => {
console.log('download complete: ' + entry.toURL());
this.loading.dismissAll()
this.presentToast('File succesful downloaded.');
}, err => {
console.log(err);<|fim▁hole|> this.presentToast('Error while downloading file.');
});
}
ionViewDidLoad() {
console.log('ionViewDidLoad DownloadPage');
}
}<|fim▁end|> | this.loading.dismissAll() |
<|file_name|>File_Avc_Duplicate.cpp<|end_file_name|><|fim▁begin|>// File__Duplicate - Duplication of some formats
// Copyright (C) 2007-2012 MediaArea.net SARL, [email protected]
//
// This library is free software: you can redistribute it and/or modify it
// under the terms of the GNU Library General Public License as published by
// the Free Software Foundation, either version 2 of the License, or
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public License
// along with this library. If not, see <http://www.gnu.org/licenses/>.
//
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//
// Duplication helper for some specific formats
//
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//---------------------------------------------------------------------------
// Pre-compilation
#include "MediaInfo/PreComp.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#include "MediaInfo/Setup.h"
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#if defined(MEDIAINFO_AVC_YES)
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#include "MediaInfo/Video/File_Avc.h"
#include "MediaInfo/MediaInfo_Config.h"
#include "MediaInfo/MediaInfo_Config_MediaInfo.h"
#include "ZenLib/ZtringList.h"
#include "ZenLib/File.h"
#include <cstring>
using namespace ZenLib;
using namespace std;
//---------------------------------------------------------------------------
namespace MediaInfoLib
{
//***************************************************************************
// Options
//***************************************************************************
//---------------------------------------------------------------------------
void File_Avc::Option_Manage()
{
#if MEDIAINFO_DUPLICATE
//File__Duplicate configuration
if (File__Duplicate_HasChanged())
{
//Autorisation of other streams
Streams[0x07].ShouldDuplicate=true;
}
#endif //MEDIAINFO_DUPLICATE
}
//***************************************************************************
// Set
//***************************************************************************
//---------------------------------------------------------------------------
#if MEDIAINFO_DUPLICATE
bool File_Avc::File__Duplicate_Set (const Ztring &Value)
{
ZtringList List(Value);
//Searching Target
bool IsForUs=false;
std::vector<ZtringList::iterator> Targets_ToAdd;
std::vector<ZtringList::iterator> Targets_ToRemove;
std::vector<ZtringList::iterator> Orders_ToAdd;
std::vector<ZtringList::iterator> Orders_ToRemove;
for (ZtringList::iterator Current=List.begin(); Current<List.end(); ++Current)
{
//Detecting if we want to remove
bool ToRemove=false;
if (Current->find(__T('-'))==0)
{
ToRemove=true;
Current->erase(Current->begin());
}
//Managing targets
if (Current->find(__T("file:"))==0
|| Current->find(__T("memory:"))==0)
(ToRemove?Targets_ToRemove:Targets_ToAdd).push_back(Current);
//Parser name
else if (Current->find(__T("parser=Avc"))==0)
IsForUs=true;
//Managing orders
else
(ToRemove?Orders_ToRemove:Orders_ToAdd).push_back(Current);
}
//For us?
if (!IsForUs)
return false;
//Configuration of initial values
frame_num_Old=(int32u)-1;
Duplicate_Buffer_Size=0;
SPS_PPS_AlreadyDone=false;
FLV=false;
//For each target to add
for (std::vector<ZtringList::iterator>::iterator Target=Targets_ToAdd.begin(); Target<Targets_ToAdd.end(); ++Target)
Writer.Configure(**Target);
//For each order to add
for (std::vector<ZtringList::iterator>::iterator Order=Orders_ToAdd.begin(); Order<Orders_ToAdd.end(); ++Order)
if ((**Order)==__T("format=Flv"))
FLV=true;
return true;
}
#endif //MEDIAINFO_DUPLICATE
//***************************************************************************
// Write
//***************************************************************************
#if MEDIAINFO_DUPLICATE
void File_Avc::File__Duplicate_Write (int64u Element_Code, int32u frame_num)
{
const int8u* ToAdd=Buffer+Buffer_Offset-(size_t)Header_Size+3;
size_t ToAdd_Size=(size_t)(Element_Size+Header_Size-3);
if (!SPS_PPS_AlreadyDone)
{
if (Element_Code==7)
{
std::memcpy(Duplicate_Buffer, ToAdd, ToAdd_Size);
Duplicate_Buffer_Size=ToAdd_Size;
}
else if (Element_Code==8)
{
// Form:
// 8 bytes : PTS
// 8 bytes : DTS
// 8 bytes : Size (without header)
// 1 byte : Type (0=Frame, 1=Header);
// 7 bytes : Reserved
size_t Extra;
if (FLV)
<|fim▁hole|> else
Extra=0; //MPEG-4
int8u Header[32];
int64u2BigEndian(Header+ 0, FrameInfo.PTS);
int64u2BigEndian(Header+ 8, FrameInfo.DTS);
int64u2BigEndian(Header+16, 5+Extra+2+Duplicate_Buffer_Size+1+2+ToAdd_Size); //5+Extra for SPS_SQS header, 2 for SPS size, 1 for PPS count, 2 for PPS size
Header[24]=1;
int56u2BigEndian(Header+25, 0);
Writer.Write(Header, 32);
//SPS_PPS
int8u* SPS_SQS=new int8u[5+Extra];
if (Extra==1)
{
SPS_SQS[0]=0x01; //Profile FLV
SPS_SQS[1]=(!seq_parameter_sets.empty() && seq_parameter_sets[0])?seq_parameter_sets[0]->profile_idc:0x00; //Compatible Profile. TODO: Handling more than 1 seq_parameter_set
SPS_SQS[2]=0x00; //Reserved
}
else
{
SPS_SQS[0]=(!seq_parameter_sets.empty() && seq_parameter_sets[0])?seq_parameter_sets[0]->profile_idc:0x00; //Profile MPEG-4. TODO: Handling more than 1 seq_parameter_set
SPS_SQS[1]=0x00; //Compatible Profile
}
SPS_SQS[2+Extra]=(!seq_parameter_sets.empty() && seq_parameter_sets[0])?seq_parameter_sets[0]->level_idc:0x00; //Level. TODO: Handling more than 1 seq_parameter_set
SPS_SQS[3+Extra]=0xFF; //Reserved + Size of NALU length minus 1
SPS_SQS[4+Extra]=0xE1; //Reserved + seq_parameter_set count
Writer.Write(SPS_SQS, 5+Extra);
//NALU
int8u NALU[2];
NALU[0]=((Duplicate_Buffer_Size)>> 8)&0xFF;
NALU[1]=((Duplicate_Buffer_Size)>> 0)&0xFF;
Writer.Write(NALU, 2);
//SPS
Writer.Write(Duplicate_Buffer, Duplicate_Buffer_Size);
Duplicate_Buffer_Size=0;
//PPS count
SPS_SQS[0]=0x01; //pic_parameter_set count
Writer.Write(SPS_SQS, 1);
delete[] SPS_SQS;
//NALU
NALU[0]=((ToAdd_Size)>> 8)&0xFF;
NALU[1]=((ToAdd_Size)>> 0)&0xFF;
Writer.Write(NALU, 2);
//PPS
Writer.Write(ToAdd, ToAdd_Size);
SPS_PPS_AlreadyDone=true;
}
}
else if (frame_num!=(int32u)-1)
{
if (frame_num!=frame_num_Old && frame_num_Old!=(int32u)-1 && frame_num!=(int32u)-1)
{
// Form:
// 8 bytes : PTS
// 8 bytes : DTS
// 8 bytes : Size (without header)
// 1 byte : Type (0=Frame, 1=Header);
// 7 bytes : Reserved
int8u Header[32];
int64u2BigEndian(Header+ 0, FrameInfo.PTS);
int64u2BigEndian(Header+ 8, FrameInfo.DTS);
int64u2BigEndian(Header+16, Duplicate_Buffer_Size);
Header[24]=0;
int56u2BigEndian(Header+25, 0);
Writer.Write(Header, 32);
Writer.Write(Duplicate_Buffer, Duplicate_Buffer_Size);
Duplicate_Buffer_Size=0;
}
//NALU
int32u2BigEndian(Duplicate_Buffer+Duplicate_Buffer_Size, (int32u)ToAdd_Size); //4 bytes for NALU header
Duplicate_Buffer_Size+=4;
//Frame (partial)
std::memcpy(Duplicate_Buffer+Duplicate_Buffer_Size, ToAdd, ToAdd_Size);
Duplicate_Buffer_Size+=ToAdd_Size;
frame_num_Old=frame_num;
}
}
#endif //MEDIAINFO_DUPLICATE
//***************************************************************************
// Output_Buffer
//***************************************************************************
//---------------------------------------------------------------------------
#if MEDIAINFO_DUPLICATE
size_t File_Avc::Output_Buffer_Get (const String &)
{
return Writer.Output_Buffer_Get();
}
#endif //MEDIAINFO_DUPLICATE
//---------------------------------------------------------------------------
#if MEDIAINFO_DUPLICATE
size_t File_Avc::Output_Buffer_Get (size_t)
{
return Writer.Output_Buffer_Get();
}
#endif //MEDIAINFO_DUPLICATE
} //NameSpace
#endif //MEDIAINFO_AVC_YES<|fim▁end|> | Extra=1; //FLV
|
<|file_name|>interface.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Machinery to initialise interface prototype objects and interface objects.
use dom::bindings::codegen::PrototypeList;
use dom::bindings::conversions::get_dom_class;
use dom::bindings::utils::{get_proto_or_iface_array, Prefable};
use js::error::throw_type_error;
use js::glue::UncheckedUnwrapObject;
use js::jsapi::{Class, ClassExtension, ClassSpec, GetGlobalForObjectCrossCompartment};
use js::jsapi::{HandleObject, HandleValue, JSClass, JSContext, JSFunctionSpec};
use js::jsapi::{JSNative, JSFUN_CONSTRUCTOR, JSPROP_ENUMERATE, JSPROP_PERMANENT, JSPROP_READONLY};
use js::jsapi::{JSPROP_RESOLVING, JSPropertySpec, JSString, JS_AtomizeAndPinString};
use js::jsapi::{JS_DefineProperty, JS_DefineProperty1, JS_DefineProperty2, JS_DefineProperty4};
use js::jsapi::{JS_GetClass, JS_GetFunctionObject, JS_GetPrototype, JS_LinkConstructorAndPrototype};
use js::jsapi::{JS_NewFunction, JS_NewObject, JS_NewObjectWithUniqueType, JS_NewStringCopyN};
use js::jsapi::{MutableHandleObject, MutableHandleValue, ObjectOps, RootedObject, RootedString};
use js::jsapi::{RootedValue, Value};
use js::jsval::{BooleanValue, DoubleValue, Int32Value, JSVal, NullValue, UInt32Value};
use js::rust::{define_methods, define_properties};
use libc;
use std::ptr;
/// Representation of an IDL constant value.
#[derive(Clone)]
pub enum ConstantVal {
/// `long` constant.
IntVal(i32),
/// `unsigned long` constant.
UintVal(u32),
/// `double` constant.
DoubleVal(f64),
/// `boolean` constant.
BoolVal(bool),
/// `null` constant.
NullVal,
}
/// Representation of an IDL constant.
#[derive(Clone)]
pub struct ConstantSpec {
/// name of the constant.
pub name: &'static [u8],
/// value of the constant.
pub value: ConstantVal,
}
impl ConstantSpec {
/// Returns a `JSVal` that represents the value of this `ConstantSpec`.
pub fn get_value(&self) -> JSVal {
match self.value {
ConstantVal::NullVal => NullValue(),
ConstantVal::IntVal(i) => Int32Value(i),
ConstantVal::UintVal(u) => UInt32Value(u),
ConstantVal::DoubleVal(d) => DoubleValue(d),
ConstantVal::BoolVal(b) => BooleanValue(b),
}
}
}
/// A JSNative that cannot be null.
pub type NonNullJSNative =
unsafe extern "C" fn (arg1: *mut JSContext, arg2: libc::c_uint, arg3: *mut JSVal) -> bool;
/// Defines constants on `obj`.
/// Fails on JSAPI failure.
pub fn define_constants(cx: *mut JSContext, obj: HandleObject, constants: &'static [ConstantSpec]) {
for spec in constants {
let value = RootedValue::new(cx, spec.get_value());
unsafe {
assert!(JS_DefineProperty(cx,
obj,
spec.name.as_ptr() as *const libc::c_char,
value.handle(),
JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT,
None,
None));
}
}
}
unsafe extern "C" fn fun_to_string_hook(cx: *mut JSContext,
obj: HandleObject,
_indent: u32)
-> *mut JSString {
let js_class = JS_GetClass(obj.get());
assert!(!js_class.is_null());
let repr = (*(js_class as *const NonCallbackInterfaceObjectClass)).representation;
assert!(!repr.is_empty());
let ret = JS_NewStringCopyN(cx, repr.as_ptr() as *const libc::c_char, repr.len());
assert!(!ret.is_null());
ret
}
/// The class of a non-callback interface object.
#[derive(Copy, Clone)]
pub struct NonCallbackInterfaceObjectClass {
/// The SpiderMonkey Class structure.
pub class: Class,
/// The prototype id of that interface, used in the hasInstance hook.
pub proto_id: PrototypeList::ID,
/// The prototype depth of that interface, used in the hasInstance hook.
pub proto_depth: u16,
/// The string representation of the object.
pub representation: &'static [u8],
}
unsafe impl Sync for NonCallbackInterfaceObjectClass {}
impl NonCallbackInterfaceObjectClass {
/// Create a new `NonCallbackInterfaceObjectClass` structure.
pub const fn new(
constructor_behavior: InterfaceConstructorBehavior,
string_rep: &'static [u8],
proto_id: PrototypeList::ID,
proto_depth: u16)
-> NonCallbackInterfaceObjectClass {
NonCallbackInterfaceObjectClass {
class: Class {
name: b"Function\0" as *const _ as *const libc::c_char,
flags: 0,
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
mayResolve: None,
finalize: None,
call: constructor_behavior.call,
construct: constructor_behavior.construct,
hasInstance: Some(has_instance_hook),
trace: None,
spec: ClassSpec {
createConstructor_: None,
createPrototype_: None,
constructorFunctions_: ptr::null(),
constructorProperties_: ptr::null(),
prototypeFunctions_: ptr::null(),
prototypeProperties_: ptr::null(),
finishInit_: None,
flags: 0,
},
ext: ClassExtension {
isWrappedNative: false,
weakmapKeyDelegateOp: None,
objectMovedOp: None,
},
ops: ObjectOps {
lookupProperty: None,
defineProperty: None,
hasProperty: None,
getProperty: None,
setProperty: None,
getOwnPropertyDescriptor: None,
deleteProperty: None,
watch: None,
unwatch: None,
getElements: None,
enumerate: None,
funToString: Some(fun_to_string_hook),
}
},
proto_id: proto_id,
proto_depth: proto_depth,
representation: string_rep,
}
}
/// cast own reference to `JSClass` reference
pub fn as_jsclass(&self) -> &JSClass {
unsafe {
&*(self as *const _ as *const JSClass)
}
}
}
/// A constructor class hook.
pub type ConstructorClassHook =
unsafe extern "C" fn(cx: *mut JSContext, argc: u32, vp: *mut Value) -> bool;
/// The constructor behavior of a non-callback interface object.
pub struct InterfaceConstructorBehavior {
call: JSNative,
construct: JSNative,
}
impl InterfaceConstructorBehavior {
/// An interface constructor that unconditionally throws a type error.
pub const fn throw() -> InterfaceConstructorBehavior {
InterfaceConstructorBehavior {
call: Some(invalid_constructor),
construct: Some(invalid_constructor),
}
}
/// An interface constructor that calls a native Rust function.
pub const fn call(hook: ConstructorClassHook) -> InterfaceConstructorBehavior {
InterfaceConstructorBehavior {
call: Some(non_new_constructor),
construct: Some(hook),
}
}
}
/// Create and define the interface object of a callback interface.
pub unsafe fn create_callback_interface_object(
cx: *mut JSContext,
receiver: HandleObject,
constants: &'static [Prefable<ConstantSpec>],
name: &'static [u8],
rval: MutableHandleObject) {
assert!(!constants.is_empty());
rval.set(JS_NewObject(cx, ptr::null()));
assert!(!rval.ptr.is_null());
for prefable in constants {
define_constants(cx, rval.handle(), prefable.specs());
}
define_name(cx, rval.handle(), name);
define_on_global_object(cx, receiver, name, rval.handle());
}
/// Create the interface prototype object of a non-callback interface.
pub unsafe fn create_interface_prototype_object(
cx: *mut JSContext,
proto: HandleObject,
class: &'static JSClass,
regular_methods: Option<&'static [Prefable<JSFunctionSpec>]>,
regular_properties: Option<&'static [Prefable<JSPropertySpec>]>,
constants: &'static [Prefable<ConstantSpec>],
rval: MutableHandleObject) {
create_object(cx, proto, class, regular_methods, regular_properties, constants, rval);
}
/// Create and define the interface object of a non-callback interface.
pub unsafe fn create_noncallback_interface_object(
cx: *mut JSContext,
receiver: HandleObject,
proto: HandleObject,
class: &'static NonCallbackInterfaceObjectClass,
static_methods: Option<&'static [Prefable<JSFunctionSpec>]>,
static_properties: Option<&'static [Prefable<JSPropertySpec>]>,
constants: &'static [Prefable<ConstantSpec>],
interface_prototype_object: HandleObject,
name: &'static [u8],
length: u32,
rval: MutableHandleObject) {
create_object(cx,
proto,
class.as_jsclass(),
static_methods,
static_properties,
constants,
rval);
assert!(JS_LinkConstructorAndPrototype(cx, rval.handle(), interface_prototype_object));
define_name(cx, rval.handle(), name);
define_length(cx, rval.handle(), length);
define_on_global_object(cx, receiver, name, rval.handle());
}
/// Create and define the named constructors of a non-callback interface.
pub unsafe fn create_named_constructors(
cx: *mut JSContext,
receiver: HandleObject,
named_constructors: &[(NonNullJSNative, &'static [u8], u32)],
interface_prototype_object: HandleObject) {
let mut constructor = RootedObject::new(cx, ptr::null_mut());
for &(native, name, arity) in named_constructors {
assert!(*name.last().unwrap() == b'\0');
let fun = JS_NewFunction(cx,
Some(native),
arity,
JSFUN_CONSTRUCTOR,
name.as_ptr() as *const libc::c_char);
assert!(!fun.is_null());
constructor.ptr = JS_GetFunctionObject(fun);
assert!(!constructor.ptr.is_null());
assert!(JS_DefineProperty1(cx,
constructor.handle(),
b"prototype\0".as_ptr() as *const libc::c_char,
interface_prototype_object,
JSPROP_PERMANENT | JSPROP_READONLY,
None,
None));
define_on_global_object(cx, receiver, name, constructor.handle());
}
}
/// Hook for instanceof on interface objects.
unsafe extern "C" fn has_instance_hook(cx: *mut JSContext,
obj: HandleObject,
value: MutableHandleValue,
rval: *mut bool) -> bool {
match has_instance(cx, obj, value.handle()) {
Ok(result) => {
*rval = result;
true
}
Err(()) => false,
}
}
/// Return whether a value is an instance of a given prototype.
/// http://heycam.github.io/webidl/#es-interface-hasinstance
unsafe fn has_instance(
cx: *mut JSContext,
interface_object: HandleObject,
value: HandleValue)
-> Result<bool, ()> {
if !value.is_object() {
// Step 1.
return Ok(false);
}
let mut value = RootedObject::new(cx, value.to_object());
let js_class = JS_GetClass(interface_object.get());
let object_class = &*(js_class as *const NonCallbackInterfaceObjectClass);
if let Ok(dom_class) = get_dom_class(UncheckedUnwrapObject(value.ptr, /* stopAtWindowProxy = */ 0)) {
if dom_class.interface_chain[object_class.proto_depth as usize] == object_class.proto_id {
// Step 4.
return Ok(true);
}
}
// Step 2.
let global = GetGlobalForObjectCrossCompartment(interface_object.get());
assert!(!global.is_null());
let proto_or_iface_array = get_proto_or_iface_array(global);
let prototype = RootedObject::new(cx, (*proto_or_iface_array)[object_class.proto_id as usize]);
assert!(!prototype.ptr.is_null());
// Step 3 only concern legacy callback interface objects (i.e. NodeFilter).
while JS_GetPrototype(cx, value.handle(), value.handle_mut()) {
if value.ptr.is_null() {<|fim▁hole|> } else if value.ptr as *const _ == prototype.ptr {
// Step 5.3.
return Ok(true);
}
}
// JS_GetPrototype threw an exception.
Err(())
}
unsafe fn create_object(
cx: *mut JSContext,
proto: HandleObject,
class: &'static JSClass,
methods: Option<&'static [Prefable<JSFunctionSpec>]>,
properties: Option<&'static [Prefable<JSPropertySpec>]>,
constants: &'static [Prefable<ConstantSpec>],
rval: MutableHandleObject) {
rval.set(JS_NewObjectWithUniqueType(cx, class, proto));
assert!(!rval.ptr.is_null());
if let Some(methods) = methods {
define_prefable_methods(cx, rval.handle(), methods);
}
if let Some(properties) = properties {
define_prefable_properties(cx, rval.handle(), properties);
}
for prefable in constants {
define_constants(cx, rval.handle(), prefable.specs());
}
}
/// Conditionally define methods on an object.
pub unsafe fn define_prefable_methods(cx: *mut JSContext,
obj: HandleObject,
methods: &'static [Prefable<JSFunctionSpec>]) {
for prefable in methods {
define_methods(cx, obj, prefable.specs()).unwrap();
}
}
/// Conditionally define properties on an object.
pub unsafe fn define_prefable_properties(cx: *mut JSContext,
obj: HandleObject,
properties: &'static [Prefable<JSPropertySpec>]) {
for prefable in properties {
define_properties(cx, obj, prefable.specs()).unwrap();
}
}
unsafe fn define_name(cx: *mut JSContext, obj: HandleObject, name: &'static [u8]) {
assert!(*name.last().unwrap() == b'\0');
let name = RootedString::new(
cx, JS_AtomizeAndPinString(cx, name.as_ptr() as *const libc::c_char));
assert!(!name.ptr.is_null());
assert!(JS_DefineProperty2(cx,
obj,
b"name\0".as_ptr() as *const libc::c_char,
name.handle(),
JSPROP_READONLY,
None, None));
}
unsafe fn define_length(cx: *mut JSContext, obj: HandleObject, length: u32) {
assert!(JS_DefineProperty4(cx,
obj,
b"length\0".as_ptr() as *const libc::c_char,
length,
JSPROP_READONLY,
None, None));
}
unsafe fn define_on_global_object(
cx: *mut JSContext,
receiver: HandleObject,
name: &'static [u8],
obj: HandleObject) {
assert!(*name.last().unwrap() == b'\0');
assert!(JS_DefineProperty1(cx,
receiver,
name.as_ptr() as *const libc::c_char,
obj,
JSPROP_RESOLVING,
None, None));
}
unsafe extern "C" fn invalid_constructor(
cx: *mut JSContext,
_argc: libc::c_uint,
_vp: *mut JSVal)
-> bool {
throw_type_error(cx, "Illegal constructor.");
false
}
unsafe extern "C" fn non_new_constructor(
cx: *mut JSContext,
_argc: libc::c_uint,
_vp: *mut JSVal)
-> bool {
throw_type_error(cx, "This constructor needs to be called with `new`.");
false
}<|fim▁end|> | // Step 5.2.
return Ok(false); |
<|file_name|>3a6b2ab00e3e_session_proposal_one.py<|end_file_name|><|fim▁begin|>"""Make session:proposal 1:1.
Revision ID: 3a6b2ab00e3e
Revises: 4dbf686f4380
Create Date: 2013-11-09 13:51:58.343243
"""
# revision identifiers, used by Alembic.
revision = '3a6b2ab00e3e'
down_revision = '4dbf686f4380'<|fim▁hole|>
from alembic import op
def upgrade():
op.create_unique_constraint('session_proposal_id_key', 'session', ['proposal_id'])
def downgrade():
op.drop_constraint('session_proposal_id_key', 'session', 'unique')<|fim▁end|> | |
<|file_name|>performancenavigationtiming.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::PerformanceBinding::DOMHighResTimeStamp;
use crate::dom::bindings::codegen::Bindings::PerformanceNavigationTimingBinding::NavigationType;
use crate::dom::bindings::codegen::Bindings::PerformanceNavigationTimingBinding::PerformanceNavigationTimingMethods;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::document::Document;
use crate::dom::globalscope::GlobalScope;
use crate::dom::performanceresourcetiming::{InitiatorType, PerformanceResourceTiming};
use dom_struct::dom_struct;
#[dom_struct]
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming
/// Only the current document resource is included in the performance timeline;
/// there is only one PerformanceNavigationTiming object in the performance timeline.
pub struct PerformanceNavigationTiming {
// https://w3c.github.io/navigation-timing/#PerformanceResourceTiming
performanceresourcetiming: PerformanceResourceTiming,
navigation_start: u64,
navigation_start_precise: u64,
document: Dom<Document>,
nav_type: NavigationType,
}
impl PerformanceNavigationTiming {
fn new_inherited(
nav_start: u64,
nav_start_precise: u64,
document: &Document,
) -> PerformanceNavigationTiming {
PerformanceNavigationTiming {
performanceresourcetiming: PerformanceResourceTiming::new_inherited(
document.url(),
InitiatorType::Navigation,
None,
nav_start_precise as f64,
),
navigation_start: nav_start,
navigation_start_precise: nav_start_precise,
document: Dom::from_ref(document),
nav_type: NavigationType::Navigate,
}
}
pub fn new(
global: &GlobalScope,
nav_start: u64,
nav_start_precise: u64,
document: &Document,
) -> DomRoot<PerformanceNavigationTiming> {
reflect_dom_object(
Box::new(PerformanceNavigationTiming::new_inherited(
nav_start,
nav_start_precise,
document,
)),
global,
)
}
}
// https://w3c.github.io/navigation-timing/
impl PerformanceNavigationTimingMethods for PerformanceNavigationTiming {
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-unloadeventstart
fn UnloadEventStart(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_unload_event_start() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-unloadeventend
fn UnloadEventEnd(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_unload_event_end() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-dominteractive
fn DomInteractive(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_dom_interactive() as f64)
}
<|fim▁hole|> // https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-domcontentloadedeventstart
fn DomContentLoadedEventStart(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_dom_content_loaded_event_start() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-domcontentloadedeventstart
fn DomContentLoadedEventEnd(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_dom_content_loaded_event_end() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-domcomplete
fn DomComplete(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_dom_complete() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-loadeventstart
fn LoadEventStart(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_load_event_start() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-loadeventend
fn LoadEventEnd(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_load_event_end() as f64)
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-type
fn Type(&self) -> NavigationType {
self.nav_type.clone()
}
// https://w3c.github.io/navigation-timing/#dom-performancenavigationtiming-redirectcount
fn RedirectCount(&self) -> u16 {
self.document.get_redirect_count()
}
// check-tidy: no specs after this line
// Servo-only timing for when top-level content (not iframes) is complete
fn TopLevelDomComplete(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.document.get_top_level_dom_complete() as f64)
}
}<|fim▁end|> | |
<|file_name|>naive-seg.py<|end_file_name|><|fim▁begin|>import sys
sys.path.insert(0, r'../')
import os
import re
import numpy as np
import gensim
from collections import defaultdict
import argparse
from argparse import ArgumentParser
from utilities import readConll, writevec, writey, traversaltree
from data.dataprocessing import streamtw, streamtwElec
from tdparse import lexicon, streamdata, readTang
import warnings
warnings.simplefilter("error")
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
def segment(tw, target):
tw = ' '.join(tw)
segs = []
for seg in re.split(r'(?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|\?|\!|\,)\s', tw):
for token in seg.split():
if token == target:
segs.append(seg.split())
break
target_positions = [[i for i, j in enumerate(seg) if j == target] for seg in segs]
new_segs = []
for i in xrange(len(target_positions)):
temp = []
if len(target_positions[i]) > 1:
for j in xrange(len(target_positions[i])):
temp.append(segs[i])
else:
temp.append(segs[i])
new_segs.append(temp)
target_positions = [item for sublist in target_positions for item in sublist]
new_segs = [item for sublist in new_segs for item in sublist]
return new_segs, target_positions
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
class targettw(object):
def __init__(self,w2vf='../resources/wordemb/w2v/c10_w3_s100',
sswef='../resources/wordemb/sswe'):
self.w2v=gensim.models.Word2Vec.load(w2vf)
self.sswe=readTang(sswef)
self.lexicons=lexicon()
def emdsswe(self,i,loc,uni,target):
f=np.array([])
l=np.array([])
r=np.array([])
t=np.array([])
ls=np.array([])
rs=np.array([])
#embeddings of fulltw features
f=self.sswe.get(uni,self.sswe['<unk>'])
#embeddings of left context features
if i<loc:
l=self.sswe.get(uni,self.sswe['<unk>'])
if self.lexicons.pos.issuperset(set([uni])):
try:
ls=self.sswe[uni]
except:
pass
if self.lexicons.neg.issuperset(set([uni])):
try:
ls=self.sswe[uni]
except:
pass
#embbeddings of target features
elif(i==loc):
t=self.sswe.get(target.replace('_',''),self.sswe['<unk>'])
target2=target.split('_')
for wd in target2:
ti=self.sswe.get(wd,self.sswe['<unk>'])
t=np.concatenate([t,ti])
#embbeddings of right context features
else:
r=self.sswe.get(uni,self.sswe['<unk>'])
if self.lexicons.pos.issuperset(set([uni])):
try:
rs=self.sswe[uni]
except:
pass
if self.lexicons.neg.issuperset(set([uni])):
try:
rs=self.sswe[uni]
except:
pass
return [f,l,t,r,ls,rs]
def emdw2v(self,i,loc,uni,target):<|fim▁hole|> ls=np.array([])
rs=np.array([])
try:
f=self.w2v[uni]
except:
pass
#embbeddings of left context features
if i<loc:
try:
l=self.w2v[uni]
except:
pass
try:
if self.lexicons.pos.issuperset(set([uni])):
ls=self.w2v[uni]
except:
pass
try:
if self.lexicons.neg.issuperset(set([uni])):
ls=self.w2v[uni]
except:
pass
#embbeddings of target feature
elif(i==loc):
try:
t=self.w2v[target.replace('_','')]
except:
pass
target2=target.split('_')
for wd in target2:
try:
ti=self.w2v[wd]
t=np.concatenate([t,ti])
except:
pass
#embbeddings of right context features
else:
try:
r=self.w2v[uni]
except:
pass
try:
if self.lexicons.pos.issuperset(set([uni])):
rs=self.w2v[uni]
except:
pass
try:
if self.lexicons.neg.issuperset(set([uni])):
rs=self.w2v[uni]
except:
pass
return [f,l,t,r,ls,rs]
def concattw(self,feature,size,tws,etype,locs,target,emp,emode):
""" Concatenation of different features
"""
fullf=np.array([])
leftf=np.array([])
rightf=np.array([])
tarf=np.array([])
fulltw_maxs=np.array([])
fulltw_mins=np.array([])
fulltw_means=np.array([])
fulltw_prods=np.array([])
left_maxs=np.array([])
left_mins=np.array([])
left_means=np.array([])
left_prods=np.array([])
right_maxs=np.array([])
right_mins=np.array([])
right_means=np.array([])
right_prods=np.array([])
tar_maxs=np.array([])
tar_mins=np.array([])
tar_means=np.array([])
tar_prods=np.array([])
leftsenti_max=np.array([])
rightsenti_max=np.array([])
leftsenti_sum=np.array([])
rightsenti_sum=np.array([])
fulltws=np.array([])
lefts=np.array([])
rights=np.array([])
tars=np.array([])
leftsentis=np.array([])
rightsentis=np.array([])
for a in range(len(locs)):
if emode=='full':
tw = tws
else:
tw = tws[a]
loc = locs[a]
fulltw=np.array([])
left=np.array([])
right=np.array([])
tar=np.array([])
leftsenti=np.array([])
rightsenti=np.array([])
for i,uni in enumerate(tw):
if etype=='w2v':
f,l,t,r,ls,rs=self.emdw2v(i,loc,uni,target)
if etype=='sswe':
f,l,t,r,ls,rs=self.emdsswe(i,loc,uni,target)
fulltw=np.concatenate([fulltw,f])
left=np.concatenate([left,l])
tar=np.concatenate([tar,t])
right=np.concatenate([right,r])
leftsenti=np.concatenate([leftsenti,ls])
rightsenti=np.concatenate([rightsenti,rs])
#padding
if list(left)==[]:
left=np.zeros((2*size,))
if list(right)==[]:
right=np.zeros((2*size,))
if list(fulltw)==[]:
fulltw=np.zeros((2*size,))
if list(tar)==[]:
tar=np.zeros((2*size,))
if len(left)<=size:
left=np.concatenate([left,np.zeros((size,))])
if len(right)<=size:
right=np.concatenate([right,np.zeros((size,))])
if len(fulltw)<=size:
fulltw=np.concatenate([fulltw,np.zeros((size,))])
if len(tar)<=size:
tar=np.concatenate([tar,np.zeros((size,))])
if list(leftsenti)==[]:
leftsenti=np.zeros((size,))
if list(rightsenti)==[]:
rightsenti=np.zeros((size,))
fullf=np.append(fullf,fulltw, axis=0)
leftf=np.append(leftf,left, axis=0)
rightf=np.append(rightf,right, axis=0)
tarf=np.append(tarf,tar, axis=0)
fulltw=fulltw.reshape(len(fulltw)/size,size)
left=left.reshape(len(left)/size,size)
right=right.reshape(len(right)/size,size)
tar=tar.reshape(len(tar)/size,size)
leftsenti=leftsenti.reshape(len(leftsenti)/size,size)
rightsenti=rightsenti.reshape(len(rightsenti)/size,size)
fulltw_maxs=np.append(fulltw_maxs,fulltw.max(axis=0), axis=0)
fulltw_mins=np.append(fulltw_mins,fulltw.min(axis=0), axis=0)
fulltw_means=np.append(fulltw_means,fulltw.mean(axis=0), axis=0)
fulltw_prods=np.append(fulltw_prods,fulltw.prod(axis=0), axis=0)
left_maxs=np.append(left_maxs,left.max(axis=0), axis=0)
left_mins=np.append(left_mins,left.min(axis=0), axis=0)
left_means=np.append(left_means,left.mean(axis=0), axis=0)
if not np.count_nonzero(left):
left_prods=np.append(left_prods,left.prod(axis=0), axis=0)
else:
left_prods=np.append(left_prods,left[~np.all(left == 0, axis=1)].prod(axis=0), axis=0)
right_maxs=np.append(right_maxs,right.max(axis=0), axis=0)
right_mins=np.append(right_mins,right.min(axis=0), axis=0)
right_means=np.append(right_means,right.mean(axis=0), axis=0)
if not np.count_nonzero(right):
right_prods=np.append(right_prods,right.prod(axis=0), axis=0)
else:
right_prods=np.append(right_prods,right[~np.all(right == 0, axis=1)].prod(axis=0), axis=0)
tar_maxs=np.append(tar_maxs,tar.max(axis=0), axis=0)
tar_mins=np.append(tar_mins,tar.min(axis=0), axis=0)
tar_means=np.append(tar_means,tar.mean(axis=0), axis=0)
tar_prods=np.append(tar_prods,tar.prod(axis=0), axis=0)
leftsenti_max=np.append(leftsenti_max,leftsenti.max(axis=0), axis=0)
rightsenti_max=np.append(rightsenti_max,rightsenti.max(axis=0), axis=0)
leftsenti_sum=np.append(leftsenti_sum,leftsenti.sum(axis=0), axis=0)
rightsenti_sum=np.append(rightsenti_sum,rightsenti.sum(axis=0), axis=0)
fullf=fullf.reshape(len(fullf)/size,size)
leftf=leftf.reshape(len(leftf)/size,size)
rightf=rightf.reshape(len(rightf)/size,size)
tarf=tarf.reshape(len(tarf)/size,size)
fulltw_maxs=fulltw_maxs.reshape(len(fulltw_maxs)/size,size)
fulltw_mins=fulltw_mins.reshape(len(fulltw_mins)/size,size)
fulltw_means=fulltw_means.reshape(len(fulltw_means)/size,size)
fulltw_prods=fulltw_prods.reshape(len(fulltw_prods)/size,size)
left_maxs=left_maxs.reshape(len(left_maxs)/size,size)
left_mins=left_mins.reshape(len(left_mins)/size,size)
left_means=left_means.reshape(len(left_means)/size,size)
left_prods=left_prods.reshape(len(left_prods)/size,size)
right_maxs=right_maxs.reshape(len(right_maxs)/size,size)
right_mins=right_mins.reshape(len(right_mins)/size,size)
right_means=right_means.reshape(len(right_means)/size,size)
right_prods=right_prods.reshape(len(right_prods)/size,size)
tar_maxs=tar_maxs.reshape(len(tar_maxs)/size,size)
tar_mins=tar_mins.reshape(len(tar_mins)/size,size)
tar_means=tar_means.reshape(len(tar_means)/size,size)
tar_prods=tar_prods.reshape(len(tar_prods)/size,size)
leftsenti_max=leftsenti_max.reshape(len(leftsenti_max)/size,size)
rightsenti_max=rightsenti_max.reshape(len(rightsenti_max)/size,size)
leftsenti_sum=leftsenti_sum.reshape(len(leftsenti_sum)/size,size)
rightsenti_sum=rightsenti_sum.reshape(len(rightsenti_sum)/size,size)
fulltws=np.concatenate([np.median(fulltw_maxs, axis=0),
np.median(fulltw_mins, axis=0),
np.median(fulltw_means, axis=0),
np.std(fullf, axis=0),
np.median(fulltw_prods, axis=0)
])
lefts=np.concatenate([np.median(left_maxs, axis=0),
np.median(left_mins, axis=0),
np.median(left_means, axis=0),
np.std(leftf, axis=0),
np.median(left_prods, axis=0)
])
rights=np.concatenate([np.median(right_maxs, axis=0),
np.median(right_mins, axis=0),
np.median(right_means, axis=0),
np.std(rightf, axis=0),
np.median(right_prods, axis=0)
])
tars=np.concatenate([np.median(tar_maxs, axis=0),
np.median(tar_mins, axis=0),
np.median(tar_means, axis=0),
np.std(tarf, axis=0),
np.median(tar_prods, axis=0)
])
leftsentis=np.concatenate([np.median(leftsenti_max, axis=0),
np.median(leftsenti_sum, axis=0)])
rightsentis=np.concatenate([np.median(rightsenti_max, axis=0),
np.median(rightsenti_sum, axis=0)])
if emode == 'full':
feature=np.concatenate([feature,lefts])
feature=np.concatenate([feature,rights])
feature=np.concatenate([feature,tars])
feature=np.concatenate([feature,leftsentis])
feature=np.concatenate([feature,rightsentis])
elif emode == 'seg':
feature=np.concatenate([feature,fulltws])
return feature
def lidongfeat(self, dataf):
""" Main function for (Dong et al., 2014) data feature extraction.
"""
size1=len(self.w2v['the'])
size2=len(self.sswe['the'])
data = streamtw(dataf)
y=[]
x=np.array([])
for a, d in enumerate(data):
feature = np.array([])
tw = d[0]
target = d[1]
y.append(d[2])
try:
loc=[i for i, j in enumerate(tw) if j == target]
except Exception as e:
print "Couldn't find the tokenised target!"
print target, tw
segs, target_position = segment(tw, target)
feature=self.concattw(feature,size1,segs,'w2v',target_position,target,False,'seg')
feature=self.concattw(feature,size2,segs,'sswe',target_position,target,False,'seg')
feature=self.concattw(feature,size1,tw,'w2v',loc,target,False,'full')
feature=self.concattw(feature,size2,tw,'sswe',loc,target,False,'full')
x=np.concatenate([x,feature])
x=x.reshape((len(y),len(x)/len(y)))
print x.shape
return(x,y)
def elecfeat(self, dataf):
""" Main function for election data feature extraction.
"""
size1=len(self.w2v['the'])
size2=len(self.sswe['the'])
data = streamtwElec(dataf)
y=[]
x=np.array([])
id = []
for a, d in enumerate(data):
feature = np.array([])
tw = d[0]
target = d[1]
if target == '"long_term_economic"_plans':
target = 'long_term_economic'
y.append(d[2])
id.append(d[3])
whichone = d[4]
locations = [i for i, j in enumerate(tw) if j == target]
if (whichone != 'nan') and (len(locations)> 1):
if whichone >= len(locations):
loc = locations[-1]
else:
loc = locations[whichone]
else:
loc = tw.index(target)
segs, target_position = segment(tw, target)
feature=self.concattw(feature,size1,segs,'w2v',target_position,target,False,'seg')
feature=self.concattw(feature,size2,segs,'sswe',target_position,target,False,'seg')
feature=self.concattw(feature,size1,tw,'w2v',[loc],target,False,'full')
feature=self.concattw(feature,size2,tw,'sswe',[loc],target,False,'full')
x=np.concatenate([x,feature])
x=x.reshape((len(y),len(x)/len(y)))
print x.shape
return(x,y,id)
def main(d):
features=targettw()
print "extracting features for training"
if (d == 'lidong') or (d == 'semeval'):
x_train,y_train=features.lidongfeat('../data/'+d+'/training/')
elif d == 'election':
print 'election training data'
x_train,y_train,id_train=features.elecfeat('../data/'+d+'/training/')
writey('../data/'+d+'/output/id_train',id_train)
writevec('../data/'+d+'/output/training',x_train,y_train)
print "extracting features for testing"
if (d == 'lidong') or (d == 'semeval'):
x_test,y_test=features.lidongfeat('../data/'+d+'/testing/')
elif d == 'election':
print 'election testing data'
x_test,y_test,id_test=features.elecfeat('../data/'+d+'/testing/')
writevec('../data/'+d+'/output/testing',x_test,y_test)
writey('../data/'+d+'/output/y_test',y_test)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--data", dest="d", help="dataset", default='data')
args = parser.parse_args()
main(args.d)<|fim▁end|> | f=np.array([])
l=np.array([])
r=np.array([])
t=np.array([]) |
<|file_name|>supplier.js<|end_file_name|><|fim▁begin|>/******************************************************************************
* supplier.js
*
* Copyright 2016 Marcos Salomão
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* <|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*
* @version 1.0
* @author Marcos Salomão ([email protected])
*****************************************************************************/
/**
* Objeto global relativo aos fornecedores da loja.
*/
! function($) {
/*
* Inserindo o escopo de fornecedor.
*/
$.supplier = {};
/*****************************************************************************
* Controller API
*****************************************************************************/
/**
* Métodos relativos à API do recurso fornecedor.
*/
$.supplier.api = {
SERVICE_NAME : '/supplier',
service : function(pathVariable) {
return $.supplier.api.SERVICE_NAME + (pathVariable? '/' + pathVariable : '');
},
/**
* Método persiste o fornecedor.
*/
save: function(_data) {
// Execute custumers delete endpoint
return $.api.request({
path : $.supplier.api.service(),
method : 'POST',
body : _data,
dialogSuccess : {
title : messages.supplier.save.dialog.title,
message : messages.supplier.save.dialog.success
},
dialogError : {
title : messages.supplier.save.dialog.title,
message : messages.supplier.save.dialog.errormessage
}
}).then(function(response) {
$('form.supplier-form').populate(response.result);
return response;
});
}, // End save()
/**
* Método realiza a exclusão do fornecedor.
*/
delete: function(_id) {
// Execute custumers delete endpoint
return $.api.request({
path : $.supplier.api.service(_id),
method : 'DELETE',
dialogError : {
title : messages.supplier.delete.dialog.title,
message : messages.supplier.delete.dialog.errormessage
}
});
}, // End delete()
}; // Fim API
/*****************************************************************************
* View components
*****************************************************************************/
$.supplier.view = {
/**
* Método destinado à criar a tabela com os fornecedors.
*/
bindTable: function(_data) {
// Construir tabela
$('table.table-suppliers').dataTable({
service: $.supplier.api.service(),
errorMessage: messages.supplier.list.dialog.errormessage,
columns: [{
field: 'id',
visible: false
}, {
field: 'name',
title: messages.supplier.name,
searchable: true
}, {
title: '',
align: 'center',
searchable: false,
'class': 'col-sm-2',
formatter: $.common.view.tableactionbuttons,
events: {
'click button.delete': function(e, value, row, index) {
$.supplier.api.delete(row.id).then(
function() {
$('table.table-suppliers').bootstrapTable('remove', {
field: 'id',
values: [row.id]
});
});
},
'click button.update': function(e, value, row, index) {
// Preencher form, precisa ser primeiro show tab
// senão não atualiza o map
$('form.supplier-form').populate(row);
// mostar tab do form
$('.nav-tabs a[href="#tab_2"]').tab('show');
}
}
}]
});
}, // Fim bindTable
/**
* Método destinado à carregar a tabela com os fornecedors.
*/
loadTable: function() {
$.supplier.view.bindTable();
}, // Fim loadTable
/**
* Load page event.
*/
loadPage : function() {
// Aplicar i18n
$('span.tab_list').text(messages.supplier.tab.list);
$('span.tab_save').text(messages.supplier.tab.save);
$('h3.supplier_save_title').text(messages.supplier.save.title);
$('span.new-item').text(messages.action.new_item);
$('small.supplier_save_subtitle').text(messages.supplier.save.subtitle);
$('label.name').text(messages.supplier.name);
$('input[name="name"]').attr('placeholder', messages.supplier.form.name.placeholder);
$('label.email').text(messages.supplier.email);
$('input[name="email"]').attr('placeholder', messages.supplier.form.email.placeholder);
$('label.phone').text(messages.supplier.phone);
$('input[name="phone"]').attr('placeholder', messages.supplier.form.phone.placeholder);
$('label.location').text(messages.supplier.location);
$('input[name="location"]').attr('placeholder', messages.supplier.form.location.placeholder);
$('button.save').text(messages.action.save);
// Carregar a lista de fornecedors
$.supplier.view.loadTable();
// Criar a validação do formulário
$('form.supplier-form').validate({ // initialize the plugin
rules: {
name: {
required: true,
minlength: 3
},
email : {
email: true
}
},
messages: {
name: messages.supplier.form.name.required,
email: messages.supplier.form.email.valid
},
/**
* Ação ao submeter o formulário.
*/
submitHandler: function(form, event) {
// não submete form
event.preventDefault();
// Convert form to JSON Object
var data = $(form).serializeObject();
// Submeter ao endpoint
$.supplier.api.save(data).then(function(_data) {
// Atualizar lista
var row = $('table.table-suppliers').bootstrapTable(
'getRowByUniqueId', _data.id);
// Insere se não existe ou atualiza caso já esteja inserida
if (row == null) {
$('table.table-suppliers').bootstrapTable('insertRow', {
index: 0,
row: _data
});
} else {
$('table.table-suppliers').bootstrapTable('updateByUniqueId', {
id: _data.id,
row: _data
});
}
});
}
}); // Fim validate
$('.nav-tabs-custom').on('shown.bs.tab',
function(e) {
if ($(e.target).attr('href') != '#tab_2') return;
$('.map-canvas').maps({
autocomplete : $('input[name="location"]')
});
});
}
};
}(jQuery);<|fim▁end|> | * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>DigitPipeline.java<|end_file_name|><|fim▁begin|>package xsierra.digitguesser.drawer.pipeline;
import java.awt.image.BufferedImage;
public interface DigitPipeline {
/**
* @param image An image that contains a drawed digit
* @return the guessed digit, a number between 0 - 9
*/<|fim▁hole|><|fim▁end|> | byte imageGuessDigit(BufferedImage image);
} |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>(function(){
// Back to Top - by CodyHouse.co
var backTop = document.getElementsByClassName('js-cd-top')[0],
offset = 300, // browser window scroll (in pixels) after which the "back to top" link is shown
offsetOpacity = 1200, //browser window scroll (in pixels) after which the "back to top" link opacity is reduced
scrollDuration = 700,
scrolling = false;
if( backTop ) {<|fim▁hole|> window.addEventListener("scroll", function(event) {
if( !scrolling ) {
scrolling = true;
(!window.requestAnimationFrame) ? setTimeout(checkBackToTop, 250) : window.requestAnimationFrame(checkBackToTop);
}
});
//smooth scroll to top
backTop.addEventListener('click', function(event) {
event.preventDefault();
(!window.requestAnimationFrame) ? window.scrollTo(0, 0) : Util.scrollTo(0, scrollDuration);
});
}
function checkBackToTop() {
var windowTop = window.scrollY || document.documentElement.scrollTop;
( windowTop > offset ) ? Util.addClass(backTop, 'cd-top--is-visible') : Util.removeClass(backTop, 'cd-top--is-visible cd-top--fade-out');
( windowTop > offsetOpacity ) && Util.addClass(backTop, 'cd-top--fade-out');
scrolling = false;
}
})();<|fim▁end|> | //update back to top visibility on scrolling |
<|file_name|>twitter_server.py<|end_file_name|><|fim▁begin|>import tweepy
import os
from flask import Flask, make_response, jsonify
CONSUMER_KEY = os.environ['CATCHMEMEALL_TWITTER_CONSUMER_TOKEN']
CONSUMER_SECRET = os.environ['CATCHMEMEALL_TWITTER_CONSUMER_SECRET']
app = Flask(__name__)
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
api = tweepy.API(auth)
@app.route('/twitter/avatar/<username>', methods=['GET'])
def get_user_avatar(username):<|fim▁hole|> else:
json_data = {'avatar': user.profile_image_url}
return make_response(jsonify(json_data), 200)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=8888, debug=True)<|fim▁end|> | try:
user = api.get_user(username)
except tweepy.TweepError:
return make_response(jsonify({'error': 'no username %s' % (username)}), 404) |
<|file_name|>bootstrap_interactive.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package commands
import (
"bufio"
"fmt"
"io"
"sort"
"strings"
"github.com/juju/errors"
jujucloud "github.com/juju/juju/cloud"
"github.com/juju/juju/cmd/juju/common"
"github.com/juju/juju/cmd/juju/interact"
)<|fim▁hole|>// assembleClouds
func assembleClouds() ([]string, error) {
public, _, err := jujucloud.PublicCloudMetadata(jujucloud.JujuPublicCloudsPath())
if err != nil {
return nil, errors.Trace(err)
}
personal, err := jujucloud.PersonalCloudMetadata()
if err != nil {
return nil, errors.Trace(err)
}
return sortClouds(public, common.BuiltInClouds(), personal), nil
}
// queryCloud asks the user to choose a cloud.
func queryCloud(clouds []string, defCloud string, scanner *bufio.Scanner, w io.Writer) (string, error) {
list := strings.Join(clouds, "\n")
if _, err := fmt.Fprint(w, "Clouds\n", list, "\n\n"); err != nil {
return "", errors.Trace(err)
}
// add support for a default (empty) selection.
clouds = append(clouds, "")
verify := interact.MatchOptions(clouds, errors.Errorf("Invalid cloud."))
query := fmt.Sprintf("Select a cloud [%s]: ", defCloud)
cloud, err := interact.QueryVerify([]byte(query), scanner, w, verify)
if err != nil {
return "", errors.Trace(err)
}
if cloud == "" {
return defCloud, nil
}
cloudName, ok := interact.FindMatch(cloud, clouds)
if !ok {
// should be impossible
return "", errors.Errorf("invalid cloud name chosen: %s", cloud)
}
return cloudName, nil
}
// queryRegion asks the user to pick a region of the ones passed in. The first
// region in the list will be the default.
func queryRegion(cloud string, regions []jujucloud.Region, scanner *bufio.Scanner, w io.Writer) (string, error) {
fmt.Fprintf(w, "Regions in %s:\n", cloud)
names := jujucloud.RegionNames(regions)
// add an empty string to allow for a default value. Also gives us an extra
// line return after the list of names.
names = append(names, "")
if _, err := fmt.Fprintln(w, strings.Join(names, "\n")); err != nil {
return "", errors.Trace(err)
}
verify := interact.MatchOptions(names, errors.Errorf("Invalid region."))
defaultRegion := regions[0].Name
query := fmt.Sprintf("Select a region in %s [%s]: ", cloud, defaultRegion)
region, err := interact.QueryVerify([]byte(query), scanner, w, verify)
if err != nil {
return "", errors.Trace(err)
}
if region == "" {
return defaultRegion, nil
}
regionName, ok := interact.FindMatch(region, names)
if !ok {
// should be impossible
return "", errors.Errorf("invalid region name chosen: %s", region)
}
return regionName, nil
}
func defaultControllerName(username, cloudname, region string, cloud *jujucloud.Cloud) string {
name := cloudname
if len(cloud.Regions) > 1 {
name = region
}
if username == "" {
return name
}
return username + "-" + name
}
func queryName(defName string, scanner *bufio.Scanner, w io.Writer) (string, error) {
query := fmt.Sprintf("Enter a name for the Controller [%s]: ", defName)
name, err := interact.QueryVerify([]byte(query), scanner, w, nil)
if err != nil {
return "", errors.Trace(err)
}
if name == "" {
return defName, nil
}
return name, nil
}
func sortClouds(maps ...map[string]jujucloud.Cloud) []string {
var clouds []string
for _, m := range maps {
for name := range m {
clouds = append(clouds, name)
}
}
sort.Strings(clouds)
return clouds
}<|fim▁end|> | |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2016 Elico Corp (www.elico-corp.com).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Membership Management - POS Membership',
'version': '8.0.1.0.2',
'category': 'Generic Modules',
'depends': [
'account_membership_balance',
'point_of_sale',
'account_accountant',
'account_voucher',
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'views/pos_membership.xml',
'views/partner_view.xml'
],
'qweb': [
'static/src/xml/pos.xml'
],
'installable': True,<|fim▁hole|>}<|fim▁end|> | 'application': False, |
<|file_name|>Log.java<|end_file_name|><|fim▁begin|>package nl.jappieklooster;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Wraps arround the java.util.logging.Logger, just to save some typing time. & it makes all the
* loging go trough here so sutting it down is easy
*
* @author jappie
*/
public class Log {
private static final Logger LOGGER = Logger.getLogger("Logger");
private static final Level LOG_LEVEL = Level.FINER;
static {
LOGGER.setLevel(LOG_LEVEL);
ConsoleHandler handler = new ConsoleHandler();
// PUBLISH this level
handler.setLevel(LOG_LEVEL);
LOGGER.addHandler(handler);
}
// no initilization of this class allowed
private Log() {
}
private static void write(Level severity, String message, Object... params) {
LOGGER.log(severity, message, params);
}
private static void write(Level severity, String message) {
LOGGER.log(severity, message);
}
private static void write(String message) {
write(Level.INFO, message);
}
public static void debug(String message) {
write(Level.FINER, message);
}
public static void verbose(String message) {
write(Level.FINEST, message);
}
public static void write(String message, Object... params) {
write(Level.INFO, message, params);
}
public static void debug(String message, Object... params) {
write(Level.FINER, message, params);
}
public static void verbose(String message, Object... params) {
write(Level.FINEST, message, params);
}
public static void panic(String message, Object... params) {
write(Level.SEVERE, message, params);
}
<|fim▁hole|> write(Level.SEVERE, message);
}
public static void warn(String message, Object... params) {
write(Level.WARNING, message, params);
}
public static void warn(String message) {
write(Level.WARNING, message);
}
}<|fim▁end|> | public static void panic(String message) { |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import datetime
import os
import uuid
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from pinax.invitations.models import JoinInvitation
from reversion import revisions as reversion
from slugify import slugify
from . import signals
from .hooks import hookset
def avatar_upload(instance, filename):
ext = filename.split(".")[-1]
filename = f"{uuid.uuid4()}.{ext}"
return os.path.join("avatars", filename)
def create_slug(name):
return slugify(name)[:50]
class BaseTeam(models.Model):
MEMBER_ACCESS_OPEN = "open"
MEMBER_ACCESS_APPLICATION = "application"
MEMBER_ACCESS_INVITATION = "invitation"
MANAGER_ACCESS_ADD = "add someone"
MANAGER_ACCESS_INVITE = "invite someone"
MEMBER_ACCESS_CHOICES = [
(MEMBER_ACCESS_OPEN, _("open")),
(MEMBER_ACCESS_APPLICATION, _("by application")),
(MEMBER_ACCESS_INVITATION, _("by invitation"))
]
MANAGER_ACCESS_CHOICES = [
(MANAGER_ACCESS_ADD, _("add someone")),
(MANAGER_ACCESS_INVITE, _("invite someone"))
]
member_access = models.CharField(max_length=20, choices=MEMBER_ACCESS_CHOICES, verbose_name=_("member access"))
manager_access = models.CharField(max_length=20, choices=MANAGER_ACCESS_CHOICES, verbose_name=_("manager access"))
class Meta:
abstract = True
verbose_name = _("Base")
verbose_name_plural = _("Bases")
def can_join(self, user):
state = self.state_for(user)
if self.member_access == BaseTeam.MEMBER_ACCESS_OPEN and state is None:
return True
elif state == BaseMembership.STATE_INVITED:
return True
else:
return False
def can_leave(self, user):
# managers can't leave at the moment
role = self.role_for(user)
return role == BaseMembership.ROLE_MEMBER
def can_apply(self, user):
state = self.state_for(user)
return self.member_access == BaseTeam.MEMBER_ACCESS_APPLICATION and state is None
@property
def applicants(self):
return self.memberships.filter(state=BaseMembership.STATE_APPLIED)
@property
def invitees(self):
return self.memberships.filter(state=BaseMembership.STATE_INVITED)
@property
def declines(self):
return self.memberships.filter(state=BaseMembership.STATE_DECLINED)
@property
def rejections(self):
return self.memberships.filter(state=BaseMembership.STATE_REJECTED)
@property
def waitlisted(self):
return self.memberships.filter(state=BaseMembership.STATE_WAITLISTED)
@property
def acceptances(self):
return self.memberships.filter(state__in=[
BaseMembership.STATE_ACCEPTED,
BaseMembership.STATE_AUTO_JOINED]
)
@property
def members(self):
return self.acceptances.filter(role=BaseMembership.ROLE_MEMBER)
@property
def managers(self):
return self.acceptances.filter(role=BaseMembership.ROLE_MANAGER)
@property
def owners(self):
return self.acceptances.filter(role=BaseMembership.ROLE_OWNER)
def is_owner_or_manager(self, user):
return self.acceptances.filter(
role__in=[
BaseMembership.ROLE_OWNER,
BaseMembership.ROLE_MANAGER
],
user=user
).exists()
def is_member(self, user):
return self.members.filter(user=user).exists()
def is_manager(self, user):
return self.managers.filter(user=user).exists()
def is_owner(self, user):
return self.owners.filter(user=user).exists()
def is_on_team(self, user):
return self.acceptances.filter(user=user).exists()
def add_member(self, user, role=None, state=None, by=None):
# we do this, rather than put the BaseMembership constants in declaration
# because BaseMembership is not yet defined
if role is None:
role = BaseMembership.ROLE_MEMBER
if state is None:
state = BaseMembership.STATE_AUTO_JOINED
membership, created = self.memberships.get_or_create(
team=self,
user=user,
defaults={"role": role, "state": state},
)
signals.added_member.send(sender=self, membership=membership, by=by)
return membership
def add_user(self, user, role, by=None):
state = BaseMembership.STATE_AUTO_JOINED
if self.manager_access == BaseTeam.MANAGER_ACCESS_INVITE:
state = BaseMembership.STATE_INVITED
membership, _ = self.memberships.get_or_create(
user=user,
defaults={"role": role, "state": state}
)
signals.added_member.send(sender=self, membership=membership, by=by)
return membership
def invite_user(self, from_user, to_email, role, message=None):
if not JoinInvitation.objects.filter(signup_code__email=to_email).exists():
invite = JoinInvitation.invite(from_user, to_email, message, send=False)
membership, _ = self.memberships.get_or_create(
invite=invite,
defaults={"role": role, "state": BaseMembership.STATE_INVITED}
)
invite.send_invite()
signals.invited_user.send(sender=self, membership=membership, by=from_user)
return membership
def for_user(self, user):
try:
return self.memberships.get(user=user)
except ObjectDoesNotExist:
pass
def state_for(self, user):
membership = self.for_user(user=user)
if membership:
return membership.state
def role_for(self, user):
if hookset.user_is_staff(user):
return Membership.ROLE_MANAGER
membership = self.for_user(user)
if membership:
return membership.role
class SimpleTeam(BaseTeam):
class Meta:
verbose_name = _("Simple Team")
verbose_name_plural = _("Simple Teams")
class Team(BaseTeam):
slug = models.SlugField(unique=True)
name = models.CharField(max_length=100, verbose_name=_("name"))
avatar = models.ImageField(upload_to=avatar_upload, blank=True, verbose_name=_("avatar"))
description = models.TextField(blank=True, verbose_name=_("description"))
creator = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="teams_created", verbose_name=_("creator"), on_delete=models.CASCADE)
created = models.DateTimeField(default=timezone.now, editable=False, verbose_name=_("created"))
class Meta:
verbose_name = _("Team")
verbose_name_plural = _("Teams")
def get_absolute_url(self):
return reverse("pinax_teams:team_detail", args=[self.slug])
def __str__(self):
return self.name
def save(self, *args, **kwargs):
if not self.id:
self.slug = create_slug(self.name)
self.full_clean()
super().save(*args, **kwargs)
class BaseMembership(models.Model):
STATE_APPLIED = "applied"
STATE_INVITED = "invited"
STATE_DECLINED = "declined"
STATE_REJECTED = "rejected"
STATE_ACCEPTED = "accepted"
STATE_WAITLISTED = "waitlisted"
STATE_AUTO_JOINED = "auto-joined"
ROLE_MEMBER = "member"
ROLE_MANAGER = "manager"
ROLE_OWNER = "owner"
STATE_CHOICES = [
(STATE_APPLIED, _("applied")),
(STATE_INVITED, _("invited")),
(STATE_DECLINED, _("declined")),
(STATE_REJECTED, _("rejected")),
(STATE_ACCEPTED, _("accepted")),
(STATE_WAITLISTED, _("waitlisted")),
(STATE_AUTO_JOINED, _("auto joined"))
]
ROLE_CHOICES = [
(ROLE_MEMBER, _("member")),
(ROLE_MANAGER, _("manager")),
(ROLE_OWNER, _("owner"))
]
state = models.CharField(max_length=20, choices=STATE_CHOICES, verbose_name=_("state"))
role = models.CharField(max_length=20, choices=ROLE_CHOICES, default=ROLE_MEMBER, verbose_name=_("role"))
created = models.DateTimeField(default=timezone.now, verbose_name=_("created"))
class Meta:
abstract = True
def is_owner(self):
return self.role == BaseMembership.ROLE_OWNER
def is_manager(self):
return self.role == BaseMembership.ROLE_MANAGER
def is_member(self):
return self.role == BaseMembership.ROLE_MEMBER
def promote(self, by):
role = self.team.role_for(by)
if role in [BaseMembership.ROLE_MANAGER, BaseMembership.ROLE_OWNER]:
if self.role == Membership.ROLE_MEMBER:
self.role = Membership.ROLE_MANAGER
self.save()
signals.promoted_member.send(sender=self, membership=self, by=by)
return True
return False
def demote(self, by):
role = self.team.role_for(by)
if role in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
if self.role == Membership.ROLE_MANAGER:
self.role = Membership.ROLE_MEMBER
self.save()
signals.demoted_member.send(sender=self, membership=self, by=by)
return True
return False
def accept(self, by):
role = self.team.role_for(by)
if role in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
if self.state == Membership.STATE_APPLIED:<|fim▁hole|> signals.accepted_membership.send(sender=self, membership=self)
return True
return False
def reject(self, by):
role = self.team.role_for(by)
if role in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
if self.state == Membership.STATE_APPLIED:
self.state = Membership.STATE_REJECTED
self.save()
signals.rejected_membership.send(sender=self, membership=self)
return True
return False
def joined(self):
self.user = self.invite.to_user
if self.team.manager_access == Team.MANAGER_ACCESS_ADD:
self.state = Membership.STATE_AUTO_JOINED
else:
self.state = Membership.STATE_INVITED
self.save()
def status(self):
if self.user:
return self.get_state_display()
if self.invite:
return self.invite.get_status_display()
return "Unknown"
def resend_invite(self, by=None):
if self.invite is not None:
code = self.invite.signup_code
code.expiry = timezone.now() + datetime.timedelta(days=5)
code.save()
code.send()
signals.resent_invite.send(sender=self, membership=self, by=by)
def remove(self, by=None):
if self.invite is not None:
self.invite.signup_code.delete()
self.invite.delete()
self.delete()
signals.removed_membership.send(sender=Membership, team=self.team, user=self.user, invitee=self.invitee, by=by)
@property
def invitee(self):
return self.user or self.invite.to_user_email()
class SimpleMembership(BaseMembership):
team = models.ForeignKey(SimpleTeam, related_name="memberships", verbose_name=_("team"), on_delete=models.CASCADE)
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="simple_memberships", null=True, blank=True, verbose_name=_("user"), on_delete=models.SET_NULL)
invite = models.ForeignKey(JoinInvitation, related_name="simple_memberships", null=True, blank=True, verbose_name=_("invite"), on_delete=models.SET_NULL)
def __str__(self):
return f"{self.user} in {self.team}"
class Meta:
unique_together = [("team", "user", "invite")]
verbose_name = _("Simple Membership")
verbose_name_plural = _("Simple Memberships")
class Membership(BaseMembership):
team = models.ForeignKey(Team, related_name="memberships", verbose_name=_("team"), on_delete=models.CASCADE)
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="memberships", null=True, blank=True, verbose_name=_("user"), on_delete=models.SET_NULL)
invite = models.ForeignKey(JoinInvitation, related_name="memberships", null=True, blank=True, verbose_name=_("invite"), on_delete=models.SET_NULL)
def __str__(self):
return f"{self.user} in {self.team}"
class Meta:
unique_together = [("team", "user", "invite")]
verbose_name = _("Membership")
verbose_name_plural = _("Memberships")
reversion.register(SimpleMembership)
reversion.register(Membership)<|fim▁end|> | self.state = Membership.STATE_ACCEPTED
self.save() |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for hapi-server-session 5.1
// Project: https://github.com/btmorex/hapi-server-session
// Definitions by: Avery Fay <https://github.com/btmorex>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
import { BinaryLike } from 'crypto';
import { CachePolicyOptions, Plugin, ServerStateCookieOptions } from '@hapi/hapi';
declare module '@hapi/hapi' {
interface Request {
session: any;
}
}
export interface Options {
algorithm?: string;
cache?: CachePolicyOptions<any>;
cookie?: ServerStateCookieOptions;
expiresIn?: number;
key?: BinaryLike;
name?: string;
size?: number;
vhost?: string | string[];
}
export const plugin: Plugin<Options>;<|fim▁hole|>export default plugin;<|fim▁end|> | |
<|file_name|>cholesky.rs<|end_file_name|><|fim▁begin|>use rulinalg::matrix::Matrix;
use rulinalg::matrix::decomposition::{Cholesky, Decomposition};
use test::Bencher;
#[bench]
fn cholesky_decompose_unpack_100x100(b: &mut Bencher) {
let n = 100;
let x = Matrix::<f64>::identity(n);
b.iter(|| {
// Assume that the cost of cloning x is roughly
// negligible in comparison with the cost of LU
Cholesky::decompose(x.clone()).expect("Matrix is invertible")
.unpack()
})
}
#[bench]
fn cholesky_decompose_unpack_500x500(b: &mut Bencher) {
let n = 500;
let x = Matrix::<f64>::identity(n);
b.iter(|| {
// Assume that the cost of cloning x is roughly
// negligible in comparison with the cost of LU
Cholesky::decompose(x.clone()).expect("Matrix is invertible")
.unpack()
})
}
#[bench]
fn cholesky_100x100(b: &mut Bencher) {
// Benchmark for legacy cholesky(). Remove when
// cholesky() has been removed.
let n = 100;
let x = Matrix::<f64>::identity(n);
b.iter(|| {
x.cholesky().expect("Matrix is invertible")
})
}
#[bench]
fn cholesky_500x500(b: &mut Bencher) {
// Benchmark for legacy cholesky(). Remove when<|fim▁hole|> x.cholesky().expect("Matrix is invertible")
})
}
#[bench]
fn cholesky_solve_1000x1000(b: &mut Bencher) {
let n = 1000;
let x = Matrix::identity(n);
let cholesky = Cholesky::decompose(x).unwrap();
b.iter(|| {
cholesky.solve(vector![0.0; n])
});
}
#[bench]
fn cholesky_solve_100x100(b: &mut Bencher) {
let n = 100;
let x = Matrix::identity(n);
let cholesky = Cholesky::decompose(x).unwrap();
b.iter(|| {
cholesky.solve(vector![0.0; n])
});
}<|fim▁end|> | // cholesky() has been removed.
let n = 500;
let x = Matrix::<f64>::identity(n);
b.iter(|| { |
<|file_name|>FindObjectMetadataFullyQualifiedGetHandler.java<|end_file_name|><|fim▁begin|>package org.vitrivr.cineast.api.rest.handlers.actions.metadata;
import io.javalin.http.Context;
import io.javalin.plugin.openapi.dsl.OpenApiBuilder;
import io.javalin.plugin.openapi.dsl.OpenApiDocumentation;
import java.util.Map;
import org.vitrivr.cineast.api.messages.result.MediaObjectMetadataQueryResult;
import org.vitrivr.cineast.api.rest.OpenApiCompatHelper;
import org.vitrivr.cineast.api.rest.handlers.interfaces.GetRestHandler;
import org.vitrivr.cineast.api.rest.services.MetadataRetrievalService;
/**
* This class handles GET requests with an object id, domain and key and returns all matching metadata descriptors.
* <p>
* <h3>GET</h3>
* This action's resource should have the following structure: {@code find/metadata/of/:id/in/:domain/with/:key}. It returns then all metadata of the object with this id, belonging to that domain with the specified key.
* </p>
*/
public class FindObjectMetadataFullyQualifiedGetHandler implements
GetRestHandler<MediaObjectMetadataQueryResult> {
public static final String OBJECT_ID_NAME = "id";
public static final String DOMAIN_NAME = "domain";
public static final String KEY_NAME = "key";
public static final String ROUTE = "find/metadata/of/{" + OBJECT_ID_NAME + "}/in/{" + DOMAIN_NAME + "}/with/{" + KEY_NAME + "}";
@Override
public MediaObjectMetadataQueryResult doGet(Context ctx) {
final Map<String, String> parameters = ctx.pathParamMap();
final String objectId = parameters.get(OBJECT_ID_NAME);
final String domain = parameters.get(DOMAIN_NAME);
final String key = parameters.get(KEY_NAME);
final MetadataRetrievalService service = new MetadataRetrievalService();
return new MediaObjectMetadataQueryResult("", service.find(objectId, domain, key)
);
}
public OpenApiDocumentation docs() {
return OpenApiBuilder.document()
.operation(op -> {
op.description("The description");
op.summary("Find metadata for specific object id in given domain with given key");
op.addTagsItem(OpenApiCompatHelper.METADATA_OAS_TAG);
op.operationId("findMetaFullyQualified");
})<|fim▁hole|> param.description("The domain name");
})
.pathParam(KEY_NAME, String.class, param -> param.description("Metadata key"))
.json("200", outClass());
}
@Override
public String route() {
return ROUTE;
}
@Override
public Class<MediaObjectMetadataQueryResult> outClass() {
return MediaObjectMetadataQueryResult.class;
}
/* TODO Actually, there is a lot of refactoring potential in this entire package */
}<|fim▁end|> | .pathParam(OBJECT_ID_NAME, String.class, param -> {
param.description("The object id");
})
.pathParam(DOMAIN_NAME, String.class, param -> { |
<|file_name|>run_hardcoded_tests.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
var tests = require("./hardcoded_tests.js");
var runTest = require(".//hardcoded_test_runner.js");
var colors = require("colors");
var parseArgs = require("minimist");
var failures = {};
var num_successes = 0;
var num_failures = 0;
var argv = parseArgs(
process.argv.slice(2),
{string: ["filter"]}
);
var todo = {};
function escape_content(content) {
return content
.replace(/[\\]/g, '\\\\')
.replace(/[\/]/g, '\\/')
.replace(/[\b]/g, '\\b')
.replace(/[\f]/g, '\\f')
.replace(/[\n]/g, '\\n')
.replace(/[\r]/g, '\\r')
.replace(/[\t]/g, '\\t');
}
function test_section(section) {
console.log("===%s===".bold, section);
for (var content in tests[section]) {
test = {
content: content,
spec: tests[section][content]
};
test.dumpAst = argv.dumpAst;
test.jsonErrors = argv.jsonErrors;
test.showDifferences = argv.showDifferences;
var name = escape_content(test.content);
process.stdout.write("RUNNING".yellow + " " + name + "\r");
var result = runTest(test);
if (result.passed) {
console.log('%s: "%s"', 'PASSED'.green, name);
num_successes++;
} else {
console.log('%s: "%s"', 'FAILED'.redBG.white, name);
num_failures++;
failures[section] = failures[section] || {};
failures[section][test.content] = result;
}
}
}
function go() {
if (typeof argv.filter == "string") {
var regex = new RegExp(argv.filter);
for (section in tests) {
if (tests.hasOwnProperty(section)) {
var foundOne = false;
for (test in tests[section]) {
if (test.match(regex)) {
foundOne = true;
} else {
delete tests[section][test];
}
}
if (!foundOne) {
delete tests[section];
}
}
}
} else if (typeof argv.filter != "undefined") {
console.log("Filter must be a string, given %s", typeof argv.filter);
return usage();
}
for (prop in tests) {
if (todo[prop]) {
delete tests[prop];
}
}
if (argv.dumpAst) {
var num_tests = 0;<|fim▁hole|> }
}
if (num_tests > 20) {
console.log(
"Oh summer child, you really don't want to dump the Ast for %d tests. " +
"Try using --filter to run fewer tests",
num_tests
);
return usage();
}
}
for (prop in tests) {
if (tests.hasOwnProperty(prop)) {
test_section(prop);
}
}
console.log("%d/%d tests passed", num_successes, num_successes + num_failures);
if (num_failures > 0) {
console.log("*** %d TESTS FAILED! ***".redBG.white, num_failures);
for (section in failures) {
if (failures.hasOwnProperty(section)) {
console.log("===%s Failures===".bold, section);
for (test in failures[section]) {
if (failures[section].hasOwnProperty(test)) {
var result = failures[section][test];
console.log('Test failure: "%s"'.redBG.white, escape_content(test));
console.log(result.output);
}
}
}
}
process.exit(1);
}
}
function usage() {
console.log("usage: %s [OPTIONS]", process.argv[0]);
console.log("Supported options");
console.log("\t--dumpAst", "Dumps the esprima & flow ASTs before each test");
console.log("\t--filter=regex", "Only run tests that match the regex");
console.log("\t--jsonErrors", "Output errors in json format");
}
if (argv.help) {
usage();
} else {
go();
}<|fim▁end|> | for (prop in tests) {
if (tests.hasOwnProperty(prop)) {
num_tests += tests[prop].length; |
<|file_name|>MidiPlayer.java<|end_file_name|><|fim▁begin|>package uk.ac.brighton.jamss;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.sound.midi.InvalidMidiDataException;
import javax.sound.midi.MidiSystem;
import javax.sound.midi.MidiUnavailableException;
import javax.sound.midi.Sequence;
import javax.sound.midi.Sequencer;
/**
* Creates a sequencer used to play a percussion sequence taken
* from a .midi file.
* @author Nick Walker
*/
class MidiPlayer {
// Midi meta event
public static final int END_OF_TRACK_MESSAGE = 47;
private Sequencer sequencer;
public float tempo;
private boolean loop;
public boolean paused;
public void setBPMs(float beatsPerMinute){
tempo = beatsPerMinute;
}
<|fim▁hole|>
/**
* Creates a new MidiPlayer object.
*/
public MidiPlayer() {
try {
sequencer = MidiSystem.getSequencer();
sequencer.open();
//sequencer.addMetaEventListener(this);
} catch (MidiUnavailableException ex) {
sequencer = null;
}
}
/**
* Loads a sequence from the file system. Returns null if an error occurs.
*/
public Sequence getSequence(String filename) {
try {
return getSequence(new FileInputStream(filename));
} catch (IOException ex) {
ex.printStackTrace();
return null;
}
}
/**
* Loads a sequence from an input stream. Returns null if an error occurs.
*/
public Sequence getSequence(InputStream is) {
try {
if (!is.markSupported()) {
is = new BufferedInputStream(is);
}
Sequence s = MidiSystem.getSequence(is);
is.close();
return s;
} catch (InvalidMidiDataException ex) {
ex.printStackTrace();
return null;
} catch (IOException ex) {
ex.printStackTrace();
return null;
}
}
/**
* Plays a sequence, optionally looping. This method returns immediately.
* The sequence is not played if it is invalid.
*/
public void play(Sequence sequence, boolean loop) {
if (sequencer != null && sequence != null && sequencer.isOpen()) {
try {
sequencer.setSequence(sequence);
sequencer.open();
/*if(loop) {
sequencer.setLoopStartPoint(0);
sequencer.setLoopEndPoint(-1);
sequencer.setLoopCount(Sequencer.LOOP_CONTINUOUSLY);
sequencer.setTempoInBPM(tempo);
}*/
sequencer.setTempoInBPM(tempo);
sequencer.start();
this.loop = loop;
} catch (InvalidMidiDataException ex) {
ex.printStackTrace();
} catch (MidiUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* This method is called by the sound system when a meta event occurs. In
* this case, when the end-of-track meta event is received, the sequence is
* restarted if looping is on.
*/
/*public void meta(MetaMessage event) {
if (event.getType() == END_OF_TRACK_MESSAGE) {
if (sequencer != null && sequencer.isOpen() && loop) {
sequencer.setMicrosecondPosition(0);
sequencer.setTempoInBPM(tempo);
sequencer.start();
}
}
}*/
/**
* Stops the sequencer and resets its position to the
* start of the sequence.
*/
public void stop() {
if (sequencer != null && sequencer.isOpen()) {
sequencer.stop();
sequencer.setMicrosecondPosition(0);
}
}
/**
* Closes the sequencer.
*/
public void close() {
if (sequencer != null && sequencer.isOpen()) {
sequencer.close();
}
}
/**
* Gets the sequencer.
*/
public Sequencer getSequencer() {
return sequencer;
}
/**
* Sets the paused state. Music may not immediately pause.
*/
public void setPaused(boolean paused) {
if (this.paused != paused && sequencer != null && sequencer.isOpen()) {
this.paused = paused;
if (paused) {
sequencer.stop();
} else {
sequencer.start();
}
}
}
/**
* Returns the paused state.
*/
public boolean isPaused() {
return paused;
}
}<|fim▁end|> | |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|>{
'name' : 'Signature templates for user emails',
'version' : '1.0.0',
'author' : 'IT-Projects LLC, Ivan Yelizariev',
'license': 'LGPL-3',
'category' : 'Social Network',
'website' : 'https://yelizariev.github.io',
'depends' : ['base'],
'data':[
'res_users_signature_views.xml',
'security/res_users_signature_security.xml',<|fim▁hole|>}<|fim▁end|> | 'security/ir.model.access.csv',
],
'installable': False |
<|file_name|>register.component.spec.ts<|end_file_name|><|fim▁begin|>import { ComponentFixture, TestBed, async, inject, tick, fakeAsync } from '@angular/core/testing';
import { Renderer, ElementRef } from '@angular/core';
import { Observable } from 'rxjs/Rx';
import { JhiLanguageService } from 'ng-jhipster';
import { MockLanguageService } from '../../../helpers/mock-language.service';
import { TravelerWebSiteTestModule } from '../../../test.module';
import { LoginModalService } from '../../../../../../main/webapp/app/shared';
import { Register } from '../../../../../../main/webapp/app/account/register/register.service';
import { RegisterComponent } from '../../../../../../main/webapp/app/account/register/register.component';
describe('Component Tests', () => {
describe('RegisterComponent', () => {
let fixture: ComponentFixture<RegisterComponent>;
let comp: RegisterComponent;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [TravelerWebSiteTestModule],
declarations: [RegisterComponent],
providers: [
Register,
{
provide: LoginModalService,
useValue: null
},
{
provide: Renderer,
useValue: null
},
{
provide: ElementRef,
useValue: null
}
]
}).overrideTemplate(RegisterComponent, '')
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(RegisterComponent);
comp = fixture.componentInstance;
comp.ngOnInit();
});
it('should ensure the two passwords entered match', () => {
comp.registerAccount.password = 'password';
comp.confirmPassword = 'non-matching';
comp.register();
expect(comp.doNotMatch).toEqual('ERROR');
});
it('should update success to OK after creating an account',
inject([Register, JhiLanguageService],
fakeAsync((service: Register, mockTranslate: MockLanguageService) => {
spyOn(service, 'save').and.returnValue(Observable.of({}));
comp.registerAccount.password = comp.confirmPassword = 'password';
comp.register();
tick();
expect(service.save).toHaveBeenCalledWith({
password: 'password',
langKey: 'en'
});
expect(comp.success).toEqual(true);
expect(comp.registerAccount.langKey).toEqual('en');
expect(mockTranslate.getCurrentSpy).toHaveBeenCalled();
expect(comp.errorUserExists).toBeNull();
expect(comp.errorEmailExists).toBeNull();
expect(comp.error).toBeNull();
})
)
);
it('should notify of user existence upon 400/login already in use',
inject([Register],
fakeAsync((service: Register) => {
spyOn(service, 'save').and.returnValue(Observable.throw({
status: 400,
_body: 'login already in use'
}));
comp.registerAccount.password = comp.confirmPassword = 'password';
comp.register();
tick();
expect(comp.errorUserExists).toEqual('ERROR');
expect(comp.errorEmailExists).toBeNull();
expect(comp.error).toBeNull();
})<|fim▁hole|> inject([Register],
fakeAsync((service: Register) => {
spyOn(service, 'save').and.returnValue(Observable.throw({
status: 400,
_body: 'email address already in use'
}));
comp.registerAccount.password = comp.confirmPassword = 'password';
comp.register();
tick();
expect(comp.errorEmailExists).toEqual('ERROR');
expect(comp.errorUserExists).toBeNull();
expect(comp.error).toBeNull();
})
)
);
it('should notify of generic error',
inject([Register],
fakeAsync((service: Register) => {
spyOn(service, 'save').and.returnValue(Observable.throw({
status: 503
}));
comp.registerAccount.password = comp.confirmPassword = 'password';
comp.register();
tick();
expect(comp.errorUserExists).toBeNull();
expect(comp.errorEmailExists).toBeNull();
expect(comp.error).toEqual('ERROR');
})
)
);
});
});<|fim▁end|> | )
);
it('should notify of email existence upon 400/email address already in use', |
<|file_name|>bitcoin_sr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="sr" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Heavycoin</source>
<translation>О Heavycoin-у</translation>
</message>
<message>
<location line="+39"/>
<source><b>Heavycoin</b> version</source>
<translation><b>Heavycoin</b> верзија</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Heavycoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Адресар</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Кликните два пута да промените адресу и/или етикету</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Прави нову адресу</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Копира изабрану адресу на системски клипборд</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Нова адреса</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Heavycoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Ово су Ваше Heavycoin адресе за примање уплата. Можете да сваком пошиљаоцу дате другачију адресу да би пратили ко је вршио уплате.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Prikaži &QR kod</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Heavycoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Heavycoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Избриши</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Heavycoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Извоз података из адресара</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Зарезом одвојене вредности (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Грешка током извоза</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Није могуће писати у фајл %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Етикета</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(без етикете)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Унесите лозинку</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Нова лозинка</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Поновите нову лозинку</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Унесите нову лозинку за приступ новчанику.<br/>Молимо Вас да лозинка буде <b>10 или више насумице одабраних знакова</b>, или <b>осам или више речи</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Шифровање новчаника</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ова акција захтева лозинку Вашег новчаника да би га откључала.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Откључавање новчаника</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ова акција захтева да унесете лозинку да би дешифловала новчаник.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Дешифровање новчаника</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Промена лозинке</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Унесите стару и нову лозинку за шифровање новчаника.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Одобрите шифровање новчаника</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!</source>
<translation>Упозорење: Ако се ваш новчаник шифрује а потом изгубите лозинкзу, ви ћете <b>ИЗГУБИТИ СВЕ BITCOIN-Е</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Да ли сте сигурни да желите да се новчаник шифује?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Новчаник је шифрован</translation>
</message>
<message>
<location line="-56"/>
<source>Heavycoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your Heavycoins from being stolen by malware infecting your computer.</source>
<translation>Heavycoin će se sad zatvoriti da bi završio proces enkripcije. Zapamti da enkripcija tvog novčanika ne može u potpunosti da zaštiti tvoje bitcoine da ne budu ukradeni od malawarea koji bi inficirao tvoj kompjuter.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Неуспело шифровање новчаника</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Настала је унутрашња грешка током шифровања новчаника. Ваш новчаник није шифрован.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Лозинке које сте унели се не подударају.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Неуспело откључавање новчаника</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Лозинка коју сте унели за откључавање новчаника је нетачна.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Неуспело дешифровање новчаника</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Лозинка за приступ новчанику је успешно промењена.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Синхронизација са мрежом у току...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Општи преглед</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Погледајте општи преглед новчаника</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Трансакције</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Претражите историјат трансакција</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Уредите запамћене адресе и њихове етикете</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Прегледајте листу адреса на којима прихватате уплате</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>I&zlaz</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Напустите програм</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Heavycoin</source>
<translation>Прегледајте информације о Heavycoin-у</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>О &Qt-у</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Прегледајте информације о Qt-у</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>П&оставке...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Шифровање новчаника...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup новчаника</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>Промени &лозинку...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Heavycoin address</source>
<translation>Пошаљите новац на bitcoin адресу</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Heavycoin</source>
<translation>Изаберите могућности bitcoin-а</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/><|fim▁hole|> <source>Change the passphrase used for wallet encryption</source>
<translation>Мењање лозинке којом се шифрује новчаник</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Heavycoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>новчаник</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Heavycoin</source>
<translation>&О Heavycoin-у</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Heavycoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Heavycoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Фајл</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Подешавања</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>П&омоћ</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Трака са картицама</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Heavycoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Heavycoin network</source>
<translation><numerusform>%n активна веза са Heavycoin мрежом</numerusform><numerusform>%n активне везе са Heavycoin мрежом</numerusform><numerusform>%n активних веза са Heavycoin мрежом</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Ажурно</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Ажурирање у току...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Послана трансакција</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Придошла трансакција</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1⏎ Iznos: %2⏎ Tip: %3⏎ Adresa: %4⏎</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Heavycoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Новчаник јс <b>шифрован</b> и тренутно <b>откључан</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Новчаник јс <b>шифрован</b> и тренутно <b>закључан</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Heavycoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Измени адресу</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Етикета</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Адреса</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Унешена адреса "%1" се већ налази у адресару.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Heavycoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Немогуће откључати новчаник.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Heavycoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>верзија</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Korišćenje:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Поставке</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Heavycoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Heavycoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Heavycoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Heavycoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Heavycoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Јединица за приказивање износа:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Heavycoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Heavycoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Heavycoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Непотврђено:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>новчаник</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Недавне трансакције</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start heavycoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Zatraži isplatu</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Iznos:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>&Етикета</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Poruka:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Snimi kao...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Heavycoin-Qt help message to get a list with possible Heavycoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Heavycoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Heavycoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Heavycoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Heavycoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Слање новца</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Ukloni sva polja sa transakcijama</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Потврди акцију слања</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Пошаљи</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Да ли сте сигурни да желите да пошаљете %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>и</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Етикета</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Izaberite adresu iz adresara</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Heavycoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Unesite Heavycoin adresu (n.pr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Heavycoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Heavycoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Heavycoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Unesite Heavycoin adresu (n.pr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Heavycoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Heavycoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Otvorite do %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/nepotvrdjeno</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 potvrde</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>етикета</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, nije još uvek uspešno emitovan</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>nepoznato</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>detalji transakcije</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ovaj odeljak pokazuje detaljan opis transakcije</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>tip</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Otvoreno do %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline * van mreže (%1 potvrdjenih)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Nepotvrdjeno (%1 of %2 potvrdjenih)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Potvrdjena (%1 potvrdjenih)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ovaj blok nije primljen od ostalih čvorova (nodova) i verovatno neće biti prihvaćen!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generisan ali nije prihvaćen</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Primljen sa</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Primljeno od</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Poslat ka</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Isplata samom sebi</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minirano</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status vaše transakcije. Predjite mišem preko ovog polja da bi ste videli broj konfirmacija</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum i vreme primljene transakcije.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tip transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Destinacija i adresa transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Iznos odbijen ili dodat balansu.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Sve</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Danas</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>ove nedelje</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ovog meseca</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Prošlog meseca</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ove godine</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Opseg...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Primljen sa</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Poslat ka</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Vama - samom sebi</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minirano</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Drugi</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Navedite adresu ili naziv koji bi ste potražili</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Min iznos</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>kopiraj adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>kopiraj naziv</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>kopiraj iznos</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>promeni naziv</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Izvezi podatke o transakcijama</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Зарезом одвојене вредности (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Potvrdjen</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>tip</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Етикета</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Грешка током извоза</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Није могуће писати у фајл %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Opseg:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>do</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Слање новца</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Heavycoin version</source>
<translation>Heavycoin верзија</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Korišćenje:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or bitcoind</source>
<translation>Pošalji naredbu na -server ili bitcoinid
</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Listaj komande</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Zatraži pomoć za komande</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opcije</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: bitcoin.conf)</source>
<translation>Potvrdi željeni konfiguracioni fajl (podrazumevani:bitcoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: bitcoind.pid)</source>
<translation>Konkretizuj pid fajl (podrazumevani: bitcoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Gde je konkretni data direktorijum </translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>Slušaj konekcije na <port> (default: 8333 or testnet: 18333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Održavaj najviše <n> konekcija po priključku (default: 125)
</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332 or testnet: 18332)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Prihvati komandnu liniju i JSON-RPC komande</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Radi u pozadini kao daemon servis i prihvati komande</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Koristi testnu mrežu</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=bitcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Heavycoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Heavycoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Heavycoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Heavycoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Korisničko ime za JSON-RPC konekcije</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Lozinka za JSON-RPC konekcije</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Dozvoli JSON-RPC konekcije sa posebne IP adrese</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Pošalji komande to nodu koji radi na <ip> (default: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Odredi veličinu zaštićenih ključeva na <n> (default: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Ponovo skeniraj lanac blokova za nedostajuće transakcije iz novčanika</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Koristi OpenSSL (https) za JSON-RPC konekcije</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>privatni ključ za Server (podrazumevan: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Prihvatljive cifre (podrazumevano: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Ova poruka Pomoći</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>učitavam adrese....</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Heavycoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Heavycoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Učitavam blok indeksa...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Heavycoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Новчаник се учитава...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Ponovo skeniram...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Završeno učitavanje</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>uLispParser.py<|end_file_name|><|fim▁begin|>"""
BNF reference: http://theory.lcs.mit.edu/~rivest/sexp.txt
<sexp> :: <string> | <list>
<string> :: <display>? <simple-string> ;
<simple-string> :: <raw> | <token> | <base-64> | <hexadecimal> |
<quoted-string> ;
<display> :: "[" <simple-string> "]" ;
<raw> :: <decimal> ":" <bytes> ;
<decimal> :: <decimal-digit>+ ;
-- decimal numbers should have no unnecessary leading zeros
<bytes> -- any string of bytes, of the indicated length
<token> :: <tokenchar>+ ;
<base-64> :: <decimal>? "|" ( <base-64-char> | <whitespace> )* "|" ;
<hexadecimal> :: "#" ( <hex-digit> | <white-space> )* "#" ;
<quoted-string> :: <decimal>? <quoted-string-body>
<quoted-string-body> :: "\"" <bytes> "\""
<list> :: "(" ( <sexp> | <whitespace> )* ")" ;
<whitespace> :: <whitespace-char>* ;
<token-char> :: <alpha> | <decimal-digit> | <simple-punc> ;
<alpha> :: <upper-case> | <lower-case> | <digit> ;
<lower-case> :: "a" | ... | "z" ;
<upper-case> :: "A" | ... | "Z" ;
<decimal-digit> :: "0" | ... | "9" ;
<hex-digit> :: <decimal-digit> | "A" | ... | "F" | "a" | ... | "f" ;
<simple-punc> :: "-" | "." | "/" | "_" | ":" | "*" | "+" | "=" ;
<whitespace-char> :: " " | "\t" | "\r" | "\n" ;
<base-64-char> :: <alpha> | <decimal-digit> | "+" | "/" | "=" ;
<null> :: "" ;
"""
from pyparsing import *
from base64 import b64decode
import pprint
def verifyLen(s, l, t):
t = t[0]
if t.len is not None:
t1len = len(t[1])
if t1len != t.len:
raise ParseFatalException(s, l, "invalid data of length %d, expected %s" % (t1len, t.len))
return t[1]
# define punctuation literals
LPAR, RPAR, LBRK, RBRK, LBRC, RBRC, VBAR = map(Suppress, "()[]{}|")
decimal = Regex(r'0|[1-9]\d*').setParseAction(lambda t: int(t[0]))
hexadecimal = ("#" + OneOrMore(Word(hexnums)) + "#") \
.setParseAction(lambda t: int("".join(t[1:-1]), 16))
bytes = Word(printables)
raw = Group(decimal("len") + Suppress(":") + bytes).setParseAction(verifyLen)
token = Word(alphanums + "-./_:*+=")
base64_ = Group(
Optional(decimal | hexadecimal, default=None)("len") + VBAR + OneOrMore(Word(alphanums + "+/=")).setParseAction(
lambda t: b64decode("".join(t))) + VBAR).setParseAction(verifyLen)
qString = Group(Optional(decimal, default=None)("len") +
dblQuotedString.setParseAction(removeQuotes)).setParseAction(verifyLen)
simpleString = base64_ | raw | decimal | token | hexadecimal | qString<|fim▁hole|>token = Word(alphanums + "-./_:*+=!<>")
simpleString = real | base64_ | raw | decimal | token | hexadecimal | qString
display = LBRK + simpleString + RBRK
string_ = Optional(display) + simpleString
uLisp_parse = Forward()
sexpList = Group(LPAR + ZeroOrMore(uLisp_parse) + RPAR)
uLisp_parse << ( string_ | sexpList )<|fim▁end|> |
# extended definitions
decimal = Regex(r'-?0|[1-9]\d*').setParseAction(lambda t: int(t[0]))
real = Regex(r"[+-]?\d+\.\d*([eE][+-]?\d+)?").setParseAction(lambda tokens: float(tokens[0])) |
<|file_name|>enc_test.cc<|end_file_name|><|fim▁begin|>/*
* This file is part of RHexLib,
*
* Copyright (c) 2001 The University of Michigan, its Regents,
* Fellows, Employees and Agents. All rights reserved, and distributed as
* free software under the following license.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions, the following disclaimer and the
* file called "CREDITS" which accompanies this distribution.
*
* 2) Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions, the following disclaimer and the file
* called "CREDITS" which accompanies this distribution in the
* documentation and/or other materials provided with the distribution.
*
* 3) Neither the name of the University of Michigan, Ann Arbor or the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*********************************************************************
* $Id: enc_test.cc,v 1.2 2001/07/12 17:14:10 ulucs Exp $
*
* Example program to test the low level EncoderHW interface
*
* Created : Uluc Saranli, 10/16/2000
* Last Modified : Uluc Saranli, 06/27/2001
*
********************************************************************/
// ==========================================================================
// This program tests the encoder interfaces in RHexLib. On execution,
// the program continuously prints the encoder reads for all 6
// axex. The following keyboard commands are implemented:
//
// 'r' : reset all encoders
// 'd' : Disable all encoders
// 'e' : Enable all encoders
//
// All other keys exit the program
//
// Note: This example does not function with the virtual hardware
// because the virtual hardware requires the module manager to be
// active to function.
//
// ==========================================================================
#include <stdio.h>
#include "sysutil.hh"
// If we are running QNX, determine which Hardware we should use.
// Note that the RHEX_HARDWARE environment variable determines this.
#ifdef _QNX4_
#ifdef _MICHIGAN_
#include "MichiganHW.hh"
MichiganHW hw;
#endif
#ifdef _MCGILL_
#include "McGillHW.hh"
McGillHW hw;
#endif<|fim▁hole|>#endif // #ifdef _QNX4_
#ifdef _LINUX_
#include "SimSectHW.hh"
SimSectHW hw;
#endif
int main( void ) {
int axis;
int done = 0;
int inp;
// This is necessary in the abscence of a call to MMChooseHardware()
hw.initialize();
// Enable all 6 encoders
for ( axis = 0; axis < 6; axis++ )
hw.encoders->enable( axis );
// Loop until user interrupt
while ( !done ) {
// Read and print all the encoder values
printf( " Encoders: 0x%x, 0x%x, 0x%x, 0x%x, 0x%x, 0x%x\n",
hw.encoders->read( 0 ),
hw.encoders->read( 1 ),
hw.encoders->read( 2 ),
hw.encoders->read( 3 ),
hw.encoders->read( 4 ),
hw.encoders->read( 5 ) );
// Check for user input
if ( kbhit() ) {
inp = getch();
switch ( inp ) {
case 'r': // Reset all encoders
for ( axis = 0; axis < 6; axis++ )
hw.encoders->reset( axis );
break;
case 'd': // Disable all encoders
for ( axis = 0; axis < 6; axis++ )
hw.encoders->disable( axis );
break;
case 'e': // Enable all encoders
for ( axis = 0; axis < 6; axis++ )
hw.encoders->enable( axis );
break;
default: // Exit on any other keystroke
done = 1;
}
}
}
for ( axis = 0; axis < 6; axis++ )
hw.encoders->disable( axis );
// This is necessary in the abscence of a call to MMChooseHardware()
hw.cleanup();
return 0;
}<|fim▁end|> | |
<|file_name|>orienters.py<|end_file_name|><|fim▁begin|>from sympy.core.basic import Basic
from sympy import (sympify, eye, sin, cos, rot_axis1, rot_axis2,
rot_axis3, ImmutableMatrix as Matrix, Symbol)
from sympy.core.cache import cacheit
import sympy.vector
class Orienter(Basic):
"""
Super-class for all orienter classes.
"""
def rotation_matrix(self):
"""
The rotation matrix corresponding to this orienter
instance.
"""
return self._parent_orient
class AxisOrienter(Orienter):
"""
Class to denote an axis orienter.
"""
def __new__(cls, angle, axis):
if not isinstance(axis, sympy.vector.Vector):
raise TypeError("axis should be a Vector")
angle = sympify(angle)
obj = super(AxisOrienter, cls).__new__(cls, angle,
axis)
obj._angle = angle
obj._axis = axis
return obj
def __init__(self, angle, axis):
"""
Axis rotation is a rotation about an arbitrary axis by
some angle. The angle is supplied as a SymPy expr scalar, and
the axis is supplied as a Vector.
Parameters
==========
angle : Expr
The angle by which the new system is to be rotated
axis : Vector
The axis around which the rotation has to be performed
Examples
========
>>> from sympy.vector import CoordSys3D
>>> from sympy import symbols
>>> q1 = symbols('q1')
>>> N = CoordSys3D('N')
>>> from sympy.vector import AxisOrienter
>>> orienter = AxisOrienter(q1, N.i + 2 * N.j)
>>> B = N.orient_new('B', (orienter, ))
"""
# Dummy initializer for docstrings
pass
@cacheit
def rotation_matrix(self, system):
"""
The rotation matrix corresponding to this orienter
instance.
Parameters
==========
system : CoordSys3D
The coordinate system wrt which the rotation matrix
is to be computed
"""
axis = sympy.vector.express(self.axis, system).normalize()
axis = axis.to_matrix(system)
theta = self.angle
parent_orient = ((eye(3) - axis * axis.T) * cos(theta) +
Matrix([[0, -axis[2], axis[1]],
[axis[2], 0, -axis[0]],
[-axis[1], axis[0], 0]]) * sin(theta) +
axis * axis.T)
parent_orient = parent_orient.T
return parent_orient
@property
def angle(self):
return self._angle
@property
def axis(self):
return self._axis
class ThreeAngleOrienter(Orienter):
"""
Super-class for Body and Space orienters.
"""
def __new__(cls, angle1, angle2, angle3, rot_order):
approved_orders = ('123', '231', '312', '132', '213',
'321', '121', '131', '212', '232',
'313', '323', '')
original_rot_order = rot_order
rot_order = str(rot_order).upper()
if not (len(rot_order) == 3):
raise TypeError('rot_order should be a str of length 3')
rot_order = [i.replace('X', '1') for i in rot_order]
rot_order = [i.replace('Y', '2') for i in rot_order]
rot_order = [i.replace('Z', '3') for i in rot_order]
rot_order = ''.join(rot_order)
if rot_order not in approved_orders:
raise TypeError('Invalid rot_type parameter')
a1 = int(rot_order[0])
a2 = int(rot_order[1])
a3 = int(rot_order[2])
angle1 = sympify(angle1)
angle2 = sympify(angle2)
angle3 = sympify(angle3)
if cls._in_order:
parent_orient = (_rot(a1, angle1) *
_rot(a2, angle2) *
_rot(a3, angle3))
else:
parent_orient = (_rot(a3, angle3) *
_rot(a2, angle2) *
_rot(a1, angle1))
parent_orient = parent_orient.T
obj = super(ThreeAngleOrienter, cls).__new__(
cls, angle1, angle2, angle3, Symbol(original_rot_order))
obj._angle1 = angle1
obj._angle2 = angle2
obj._angle3 = angle3
obj._rot_order = original_rot_order
obj._parent_orient = parent_orient
return obj
@property
def angle1(self):
return self._angle1
@property
def angle2(self):
return self._angle2
@property
def angle3(self):
return self._angle3
@property
def rot_order(self):
return self._rot_order
class BodyOrienter(ThreeAngleOrienter):
"""
Class to denote a body-orienter.
"""
_in_order = True
def __new__(cls, angle1, angle2, angle3, rot_order):
obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3,
rot_order)
return obj
def __init__(self, angle1, angle2, angle3, rot_order):
"""
Body orientation takes this coordinate system through three
successive simple rotations.
Body fixed rotations include both Euler Angles and
Tait-Bryan Angles, see https://en.wikipedia.org/wiki/Euler_angles.
Parameters
==========
angle1, angle2, angle3 : Expr
Three successive angles to rotate the coordinate system by
rotation_order : string
String defining the order of axes for rotation
Examples
========
>>> from sympy.vector import CoordSys3D, BodyOrienter
>>> from sympy import symbols
>>> q1, q2, q3 = symbols('q1 q2 q3')
>>> N = CoordSys3D('N')
A 'Body' fixed rotation is described by three angles and
three body-fixed rotation axes. To orient a coordinate system D
with respect to N, each sequential rotation is always about
the orthogonal unit vectors fixed to D. For example, a '123'
rotation will specify rotations about N.i, then D.j, then
D.k. (Initially, D.i is same as N.i)
Therefore,
>>> body_orienter = BodyOrienter(q1, q2, q3, '123')
>>> D = N.orient_new('D', (body_orienter, ))
is same as
>>> from sympy.vector import AxisOrienter
>>> axis_orienter1 = AxisOrienter(q1, N.i)
>>> D = N.orient_new('D', (axis_orienter1, ))
>>> axis_orienter2 = AxisOrienter(q2, D.j)
>>> D = D.orient_new('D', (axis_orienter2, ))
>>> axis_orienter3 = AxisOrienter(q3, D.k)
>>> D = D.orient_new('D', (axis_orienter3, ))
Acceptable rotation orders are of length 3, expressed in XYZ or
123, and cannot have a rotation about about an axis twice in a row.
>>> body_orienter1 = BodyOrienter(q1, q2, q3, '123')
>>> body_orienter2 = BodyOrienter(q1, q2, 0, 'ZXZ')
>>> body_orienter3 = BodyOrienter(0, 0, 0, 'XYX')
"""
# Dummy initializer for docstrings
pass
class SpaceOrienter(ThreeAngleOrienter):
"""
Class to denote a space-orienter.
"""
_in_order = False
def __new__(cls, angle1, angle2, angle3, rot_order):
obj = ThreeAngleOrienter.__new__(cls, angle1, angle2, angle3,
rot_order)
return obj
def __init__(self, angle1, angle2, angle3, rot_order):
"""
Space rotation is similar to Body rotation, but the rotations
are applied in the opposite order.
Parameters
==========
angle1, angle2, angle3 : Expr
Three successive angles to rotate the coordinate system by
rotation_order : string
String defining the order of axes for rotation
See Also
========
BodyOrienter : Orienter to orient systems wrt Euler angles.
Examples
========
>>> from sympy.vector import CoordSys3D, SpaceOrienter
>>> from sympy import symbols
>>> q1, q2, q3 = symbols('q1 q2 q3')
>>> N = CoordSys3D('N')
To orient a coordinate system D with respect to N, each
sequential rotation is always about N's orthogonal unit vectors.
For example, a '123' rotation will specify rotations about<|fim▁hole|>
>>> space_orienter = SpaceOrienter(q1, q2, q3, '312')
>>> D = N.orient_new('D', (space_orienter, ))
is same as
>>> from sympy.vector import AxisOrienter
>>> axis_orienter1 = AxisOrienter(q1, N.i)
>>> B = N.orient_new('B', (axis_orienter1, ))
>>> axis_orienter2 = AxisOrienter(q2, N.j)
>>> C = B.orient_new('C', (axis_orienter2, ))
>>> axis_orienter3 = AxisOrienter(q3, N.k)
>>> D = C.orient_new('C', (axis_orienter3, ))
"""
# Dummy initializer for docstrings
pass
class QuaternionOrienter(Orienter):
"""
Class to denote a quaternion-orienter.
"""
def __new__(cls, q0, q1, q2, q3):
q0 = sympify(q0)
q1 = sympify(q1)
q2 = sympify(q2)
q3 = sympify(q3)
parent_orient = (Matrix([[q0 ** 2 + q1 ** 2 - q2 ** 2 -
q3 ** 2,
2 * (q1 * q2 - q0 * q3),
2 * (q0 * q2 + q1 * q3)],
[2 * (q1 * q2 + q0 * q3),
q0 ** 2 - q1 ** 2 +
q2 ** 2 - q3 ** 2,
2 * (q2 * q3 - q0 * q1)],
[2 * (q1 * q3 - q0 * q2),
2 * (q0 * q1 + q2 * q3),
q0 ** 2 - q1 ** 2 -
q2 ** 2 + q3 ** 2]]))
parent_orient = parent_orient.T
obj = super(QuaternionOrienter, cls).__new__(cls, q0, q1, q2, q3)
obj._q0 = q0
obj._q1 = q1
obj._q2 = q2
obj._q3 = q3
obj._parent_orient = parent_orient
return obj
def __init__(self, angle1, angle2, angle3, rot_order):
"""
Quaternion orientation orients the new CoordSys3D with
Quaternions, defined as a finite rotation about lambda, a unit
vector, by some amount theta.
This orientation is described by four parameters:
q0 = cos(theta/2)
q1 = lambda_x sin(theta/2)
q2 = lambda_y sin(theta/2)
q3 = lambda_z sin(theta/2)
Quaternion does not take in a rotation order.
Parameters
==========
q0, q1, q2, q3 : Expr
The quaternions to rotate the coordinate system by
Examples
========
>>> from sympy.vector import CoordSys3D
>>> from sympy import symbols
>>> q0, q1, q2, q3 = symbols('q0 q1 q2 q3')
>>> N = CoordSys3D('N')
>>> from sympy.vector import QuaternionOrienter
>>> q_orienter = QuaternionOrienter(q0, q1, q2, q3)
>>> B = N.orient_new('B', (q_orienter, ))
"""
# Dummy initializer for docstrings
pass
@property
def q0(self):
return self._q0
@property
def q1(self):
return self._q1
@property
def q2(self):
return self._q2
@property
def q3(self):
return self._q3
def _rot(axis, angle):
"""DCM for simple axis 1, 2 or 3 rotations. """
if axis == 1:
return Matrix(rot_axis1(angle).T)
elif axis == 2:
return Matrix(rot_axis2(angle).T)
elif axis == 3:
return Matrix(rot_axis3(angle).T)<|fim▁end|> | N.i, then N.j, then N.k.
Therefore, |
<|file_name|>length_expr.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Robert Clipsham <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>
extern crate pnet;
#[packet]
pub struct PacketWithPayload {
banana: u8,
#[length = "banana + 7.5"]
var_length: Vec<u8>,
#[payload]
payload: Vec<u8>
}
fn main() {}<|fim▁end|> | // error-pattern: Only field names, constants, integers, basic arithmetic expressions (+ - * / %) and parentheses are allowed in the "length" attribute
#![feature(custom_attribute, plugin)]
#![plugin(pnet_macros_plugin)] |
<|file_name|>test_component.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
import datetime
from cwr.parser.encoder.dictionary import ComponentDictionaryEncoder<|fim▁hole|>"""
ComponentRecord to dictionary encoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestComponentRecordDictionaryEncoding(unittest.TestCase):
def setUp(self):
self._encoder = ComponentDictionaryEncoder()
def test_encoded(self):
data = ComponentRecord(record_type='COM',
transaction_sequence_n=3,
record_sequence_n=15,
title='TITLE',
writer_1_last_name='LAST NAME 1',
submitter_work_n='ABCD123',
writer_1_first_name='FIRST NAME 1',
writer_2_first_name='FIRST NAME 2',
writer_2_last_name='LAST NAME 2',
writer_1_ipi_base_n='I-000000229-7',
writer_1_ipi_name_n=14107338,
writer_2_ipi_base_n='I-000000339-7',
writer_2_ipi_name_n=14107400,
iswc='T0123456789',
duration=datetime.datetime.strptime('011200',
'%H%M%S').time())
encoded = self._encoder.encode(data)
self.assertEqual('COM', encoded['record_type'])
self.assertEqual(3, encoded['transaction_sequence_n'])
self.assertEqual(15, encoded['record_sequence_n'])
self.assertEqual('TITLE', encoded['title'])
self.assertEqual('LAST NAME 1', encoded['writer_1_last_name'])
self.assertEqual('ABCD123', encoded['submitter_work_n'])
self.assertEqual('FIRST NAME 1', encoded['writer_1_first_name'])
self.assertEqual('FIRST NAME 2', encoded['writer_2_first_name'])
self.assertEqual('LAST NAME 2', encoded['writer_2_last_name'])
self.assertEqual('LAST NAME 2', encoded['writer_2_last_name'])
self.assertEqual(14107338, encoded['writer_1_ipi_name_n'])
self.assertEqual(14107400, encoded['writer_2_ipi_name_n'])
self.assertEqual(datetime.datetime.strptime('011200', '%H%M%S').time(),
encoded['duration'])
self.assertEqual('I-000000229-7', encoded['writer_1_ipi_base_n'])
self.assertEqual('I-000000339-7', encoded['writer_2_ipi_base_n'])
self.assertEqual('T0123456789', encoded['iswc'])<|fim▁end|> | from cwr.work import ComponentRecord
|
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>import factory
from .models import (FKDummyModel, O2ODummyModel, BaseModel, ManyToManyToBaseModel,
ForeignKeyToBaseModel, OneToOneToBaseModel, ClassLevel1, ClassLevel2, ClassLevel3,
ManyToManyToBaseModelWithRelatedName, ChildModel, SubClassOfBaseModel)
class FKDummyModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "FKDummyModelName#{number}".format(number=str(x)))
class Meta:
model = FKDummyModel
class O2ODummyModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "O2ODummyModelName#{number}".format(number=str(x)))
class Meta:<|fim▁hole|> model = O2ODummyModel
class BaseModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "BaseModelName#{number}".format(number=str(x)))
fkey = factory.SubFactory(FKDummyModelFactory)
o2o = factory.SubFactory(O2ODummyModelFactory)
class Meta:
model = BaseModel
class SubClassOfBaseModelFactory(BaseModelFactory):
class Meta:
model = SubClassOfBaseModel
class ManyToManyToBaseModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "MaynyToManyToBaseModelName#{number}".format(number=str(x)))
class Meta:
model = ManyToManyToBaseModel
@factory.post_generation
def base_models(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for base_model in extracted:
self.m2m.add(base_model)
class ManyToManyToBaseModelWithRelatedNameFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "MaynyToManyToBaseModelName#{number}".format(number=str(x)))
class Meta:
model = ManyToManyToBaseModelWithRelatedName
@factory.post_generation
def base_models(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for base_model in extracted:
self.m2m.add(base_model)
class ForeignKeyToBaseModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "ForeignKeyToBseModelName#{number}".format(number=str(x)))
fkeyto = factory.SubFactory(BaseModelFactory)
class Meta:
model = ForeignKeyToBaseModel
class OneToOneToBaseModelFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "OneToOneToBaseModelName#{number}".format(number=str(x)))
o2oto = factory.SubFactory(BaseModelFactory)
class Meta:
model = OneToOneToBaseModel
class ClassLevel1Factory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "ClassLevel1#{number}".format(number=str(x)))
class Meta:
model = ClassLevel1
class ClassLevel2Factory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "ClassLevel2#{number}".format(number=str(x)))
fkey = factory.SubFactory(ClassLevel1Factory)
class Meta:
model = ClassLevel2
class ClassLevel3Factory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda x: "ClassLevel3#{number}".format(number=str(x)))
fkey = factory.SubFactory(ClassLevel2Factory)
class Meta:
model = ClassLevel3
class ChildModelFactory(BaseModelFactory):
child_field = factory.Sequence(lambda x: "ChildField#{number}".format(number=str(x)))
class Meta:
model = ChildModel<|fim▁end|> | |
<|file_name|>svm_how_to.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the<|fim▁hole|># following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from random import *
import numpy
import pdb
import cPickle
import bz2
import sys
import pylab
import nupic.bindings.algorithms as algo
from nupic.bindings.math import GetNumpyDataType
type = GetNumpyDataType('NTA_Real')
type = 'float32'
#--------------------------------------------------------------------------------
# Simple use case
#--------------------------------------------------------------------------------
def simple():
print "Simple"
numpy.random.seed(42)
n_dims = 2
n_class = 4
size = 200
labels = numpy.random.random_integers(0, n_class-1, size)
samples = numpy.zeros((size, n_dims), dtype=type)
do_plot = False
print "Generating data"
centers = numpy.array([[0,0],[0,1],[1,0],[1,1]])
for i in range(0, size):
t = 6.28 * numpy.random.random_sample()
samples[i][0] = 2 * centers[labels[i]][0] + .5*numpy.random.random() * numpy.cos(t)
samples[i][1] = 2 * centers[labels[i]][1] + .5*numpy.random.random() * numpy.sin(t)
classifier = algo.svm_dense(0, n_dims, probability=True, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Displaying problem"
problem = classifier.get_problem()
print "Problem size:", problem.size()
print "Problem dimensionality:", problem.n_dims()
print "Problem samples:"
s = numpy.zeros((problem.size(), problem.n_dims()+1), dtype=type)
problem.get_samples(s)
print s
if do_plot:
pylab.ion()
pylab.plot(s[s[:,0]==0,1], s[s[:,0]==0,2], '.', color='r')
pylab.plot(s[s[:,0]==1,1], s[s[:,0]==1,2], '+', color='b')
pylab.plot(s[s[:,0]==2,1], s[s[:,0]==2,2], '^', color='g')
pylab.plot(s[s[:,0]==3,1], s[s[:,0]==3,2], 'v', color='g')
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Displaying model"
model = classifier.get_model()
print "Number of support vectors:", model.size()
print "Number of classes:", model.n_class()
print "Number of dimensions: ", model.n_dims()
print "Support vectors:"
sv = numpy.zeros((model.size(), model.n_dims()), dtype=type)
model.get_support_vectors(sv)
print sv
if do_plot:
pylab.plot(sv[:,0], sv[:,1], 'o', color='g')
print "Support vector coefficients:"
svc = numpy.zeros((model.n_class()-1, model.size()), dtype=type)
model.get_support_vector_coefficients(svc)
print svc
print "Hyperplanes (for linear kernel only):"
h = model.get_hyperplanes()
print h
if do_plot:
xmin = numpy.min(samples[:,0])
xmax = numpy.max(samples[:,0])
xstep = (xmax - xmin) / 10
X = numpy.arange(xmin, xmax, xstep)
ymin = numpy.min(samples[:,1])
ymax = numpy.max(samples[:,1])
ystep = (ymax - ymin) / 10
Y = numpy.arange(ymin, ymax, ystep)
points = numpy.zeros((len(X), len(Y)))
for i,x in enumerate(X):
for j,y in enumerate(Y):
proba = numpy.zeros(model.n_class(), dtype=type)
classifier.predict_probability(numpy.array([x,y]), proba)
points[i,j] = proba[0]
pylab.contour(X,Y,points)
print "Cross-validation"
print classifier.cross_validate(2, gamma = .5, C = 10, eps = 1e-3)
print "Predicting"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
print "Discarding problem"
classifier.discard_problem()
print "Predicting after discarding the problem"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
#--------------------------------------------------------------------------------
# Persistence
#--------------------------------------------------------------------------------
def persistence():
print "Persistence"
numpy.random.seed(42)
n_dims = 2
n_class = 12
size = 100
labels = numpy.random.random_integers(0, 256, size)
samples = numpy.zeros((size, n_dims), dtype=type)
print "Generating data"
for i in range(0, size):
t = 6.28 * numpy.random.random_sample()
samples[i][0] = 2 * labels[i] + 1.5 * numpy.cos(t)
samples[i][1] = 2 * labels[i] + 1.5 * numpy.sin(t)
print "Creating dense classifier"
classifier = algo.svm_dense(0, n_dims = n_dims, seed=42)
print "Adding sample vectors to dense classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Pickling dense classifier"
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training dense classifier"
classifier.train(gamma = 1, C = 10, eps=1e-1)
print "Predicting with dense classifier"
print classifier.predict(samples[0])
print "Creating 0/1 classifier"
classifier01 = algo.svm_01(n_dims = n_dims, seed=42)
print "Adding sample vectors to 0/1 classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier01.add_sample(float(y), x)
print "Training 0/1 classifier"
classifier01.train(gamma = 1./3., C = 100, eps=1e-1)
print "Pickling 0/1 classifier"
cPickle.dump(classifier01, open('test', 'wb'))
classifier01 = cPickle.load(open('test', 'rb'))
print "Predicting with 0/1 classifier"
print classifier01.predict(numpy.array(samples[0], dtype=type))
#--------------------------------------------------------------------------------
# Cross validation
#--------------------------------------------------------------------------------
def cross_validation():
return
print "Cross validation"
numpy.random.seed(42)
labels = [0, 1, 1, 2, 1, 2]
samples = [[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 1, 1], [1, 1, 0], [0, 1, 1]]
classifier = algo.svm_dense(0, n_dims = 3, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Cross validation =",
print classifier.cross_validate(3, gamma = .5, C = 10, eps = 1e-3)
#--------------------------------------------------------------------------------
simple()
persistence()
cross_validation()<|fim▁end|> | |
<|file_name|>vgSound3DEntry.cpp<|end_file_name|><|fim▁begin|>#include <vgStableHeaders.h>
#include "vgentry/vgSound3DEntry.h"
#include <vgUIController/vgPropertyPage.h>
#include <vgUIController/vgUIController.h>
#include <vgKernel/vgkVec3.h>
#include <vgMesh/vgmMeshManager.h>
//#include <vgMath/vgfVector3.h>
#include <vgKernel/vgkSelectManager.h>
PropertiesParam vgSound3DEntry::s_ParamArray[s_NumOfParam];
vgSound3DEntry::vgSound3DEntry(vgSound::Sound3D* renderer)
:vgBaseEntry( renderer )
{
sound= (vgSound::Sound3D *)renderer;
if (sound)
{
m_sCaption = sound->getName();
b_play = sound->getPlayFlag();
x = sound->getSoundPos().x + vgKernel::CoordSystem::getSingleton().getProjectionCoord().x;
y = sound->getSoundPos().y + vgKernel::CoordSystem::getSingleton().getProjectionCoord().y;
z = sound->getSoundPos().z + vgKernel::CoordSystem::getSingleton().getProjectionCoord().z;
zMinus = -z;
sound->registerObserver( this );
vgKernel::CoordSystem::getSingleton().registerObserver(this);
}
}
vgSound3DEntry::~vgSound3DEntry(void)
{
sound->unregisterObserver( this );
sound = NULL;
}
void vgSound3DEntry::OnPropertyChanged(string paramName)
{
z = - zMinus;
vgSound::Sound3D *sound = (vgSound::Sound3D *)m_Renderer;
vgKernel::Vec3 aa = vgKernel::CoordSystem::getSingleton().getProjectionCoord();
sound->setAbsolutePos( x - aa.x, y - aa.y ,z - aa.z);
/* sound->GenBoundaryBox(sound->getSoundPos());*/
sound->setPlayFlag( b_play );
if (b_play)
{
sound->startPlaying( true );
}
else
sound->stopPlaying();
// ¸üÐÂTREEITEM
vgUI::UIController::getSingleton().GetWorkSpaceBar()->SetItemText(hTreeItem, m_Renderer->getName());
vgKernel::SelectManager::getSingleton().updateBox();
}
void vgSound3DEntry::onChanged(int eventId, void *param)
{
if (eventId == vgKernel::VG_OBS_PROPCHANGED)
{
vgSound::Sound3D *sound = (vgSound::Sound3D *)m_Renderer;
vgKernel::Vec3 xx = sound->getSoundPos();
x = xx.x + vgKernel::CoordSystem::getSingleton().getProjectionCoord().x;
y = xx.y + vgKernel::CoordSystem::getSingleton().getProjectionCoord().y;
z = xx.z + vgKernel::CoordSystem::getSingleton().getProjectionCoord().z;
//TRACE("New Camera Position %.2f %.2f %.2f \n", posPtr->x, posPtr->y, posPtr->z);
if (this == vgUI::UIController::getSingleton().GetCurrentSelectedNode())
{
s_ParamArray[1].pProp->SetValue(x);
s_ParamArray[2].pProp->SetValue(y);
zMinus = -z;
s_ParamArray[3].pProp->SetValue(zMinus);
}
}
if (eventId == vgKernel::VG_OBS_SELECTCHAGNED)
{
vgUI::UIController::getSingleton().SelectNode(this);
}
if (eventId == vgKernel::VG_OBS_ADDSELECTION)
{
vgUI::UIController::getSingleton().AddSelection(this);
return ;
}
}
void vgSound3DEntry::AddNodeTabs()
{
vgUI::UIController::getSingleton().RemoveAllPages();
<|fim▁hole|> s_ParamArray[0].typeId = PROP_ITEM_GROUP;
s_ParamArray[0].dataType = PROP_DATA_NONE;
s_ParamArray[0].connectedPtr = NULL;
s_ParamArray[0].comment = "ÉèÖÃÏà»úµÄ×ø±ê";
s_ParamArray[1].label = "X ×ø±ê";
s_ParamArray[1].typeId = PROP_ITEM_DATA;
s_ParamArray[1].dataType = PROP_DATA_FLOAT;
s_ParamArray[1].connectedPtr = &x;
s_ParamArray[1].comment = "ÉèÖÃX×ø±ê";
s_ParamArray[2].label = "Y ×ø±ê";
s_ParamArray[2].typeId = PROP_ITEM_DATA;
s_ParamArray[2].dataType = PROP_DATA_FLOAT;
s_ParamArray[2].connectedPtr = &y;
s_ParamArray[2].comment = "ÉèÖÃY×ø±ê";
s_ParamArray[3].label = "Z ×ø±ê";
s_ParamArray[3].typeId = PROP_ITEM_DATA;
s_ParamArray[3].dataType = PROP_DATA_FLOAT;
s_ParamArray[3].connectedPtr = &zMinus;
s_ParamArray[3].comment = "ÉèÖÃZ×ø±ê";
s_ParamArray[4].label = "ÆäËûÉèÖÃ";
s_ParamArray[4].typeId = PROP_ITEM_GROUP;
s_ParamArray[4].dataType = PROP_DATA_NONE;
s_ParamArray[4].connectedPtr = NULL;
s_ParamArray[4].comment = string();
s_ParamArray[5].label = "ÒôЧÃû³Æ";
s_ParamArray[5].typeId = PROP_ITEM_DATA;
s_ParamArray[5].dataType = PROP_DATA_STR;
s_ParamArray[5].connectedPtr = m_Renderer->getNamePtr();
s_ParamArray[5].comment = "ÎïÌåµÄÃû³Æ";
s_ParamArray[6].label = "ÊÇ·ñ²¥·Å";
s_ParamArray[6].typeId = PROP_ITEM_DATA;
s_ParamArray[6].dataType = PROP_DATA_BOOL;
s_ParamArray[6].connectedPtr = &b_play;
s_ParamArray[6].comment = "ÊÇ·ñ²¥·Å";
vgPropertyPage* propPage = vgUI::UIController::getSingleton().GetPropPage();
propPage->Create(NIDD_PROPERTY, pageViewBar->GetTabControl());
propPage->ConnectNode(this, s_ParamArray, s_NumOfParam);
pageViewBar->AddTab("×Ô¶¯ÊôÐÔ", propPage);
}
CMenu* vgSound3DEntry::GetContextMenu()
{
CMenu *menu = new CMenu;
VERIFY(menu->CreatePopupMenu());
// VERIFY(menu->AppendMenu(MF_STRING, NID_MESH_GOTO, _T("תµ½")));
VERIFY(menu->AppendMenu(MF_STRING, NID_MESH_DELETE,_T("ɾ³ý")));
return menu;
}<|fim▁end|> | vgPropertiesViewBar* pageViewBar = vgUI::UIController::getSingleton().GetPropertiesViewBar();
s_ParamArray[0].label = "×ø±êÖµÉèÖÃ";
|
<|file_name|>special_math_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.ops.special_math_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import special_math_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
class LBetaTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_one_dimensional_arg(self):
# Should evaluate to 1 and 1/2.
x_one = [1, 1.]
x_one_half = [2, 1.]
with self.session(use_gpu=True):
self.assertAllClose(
1, self.evaluate(math_ops.exp(special_math_ops.lbeta(x_one))))
self.assertAllClose(
0.5, self.evaluate(math_ops.exp(special_math_ops.lbeta(x_one_half))))
self.assertEqual([], special_math_ops.lbeta(x_one).get_shape())
def test_one_dimensional_arg_dynamic(self):
# Should evaluate to 1 and 1/2.
x_one = [1, 1.]
x_one_half = [2, 1.]
with self.session(use_gpu=True):
ph = array_ops.placeholder(dtypes.float32)
beta_ph = math_ops.exp(special_math_ops.lbeta(ph))
self.assertAllClose(1, beta_ph.eval(feed_dict={ph: x_one}))
self.assertAllClose(0.5,
beta_ph.eval(feed_dict={ph: x_one_half}))
def test_four_dimensional_arg_with_partial_shape_dynamic(self):
x_ = np.ones((3, 2, 3, 4))
# Gamma(1) = 0! = 1
# Gamma(1 + 1 + 1 + 1) = Gamma(4) = 3! = 6
# ==> Beta([1, 1, 1, 1])
# = Gamma(1) * Gamma(1) * Gamma(1) * Gamma(1) / Gamma(1 + 1 + 1 + 1)
# = 1 / 6
expected_beta_x = 1 / 6 * np.ones((3, 2, 3))
with self.session(use_gpu=True):
x_ph = array_ops.placeholder(dtypes.float32, [3, 2, 3, None])
beta_ph = math_ops.exp(special_math_ops.lbeta(x_ph))
self.assertAllClose(expected_beta_x,
beta_ph.eval(feed_dict={x_ph: x_}))
@test_util.run_in_graph_and_eager_modes
def test_two_dimensional_arg(self):
# Should evaluate to 1/2.
x_one_half = [[2, 1.], [2, 1.]]
with self.session(use_gpu=True):
self.assertAllClose(
[0.5, 0.5],
self.evaluate(math_ops.exp(special_math_ops.lbeta(x_one_half))))
self.assertEqual((2,), special_math_ops.lbeta(x_one_half).get_shape())
def test_two_dimensional_arg_dynamic(self):
# Should evaluate to 1/2.
x_one_half = [[2, 1.], [2, 1.]]
with self.session(use_gpu=True):
ph = array_ops.placeholder(dtypes.float32)
beta_ph = math_ops.exp(special_math_ops.lbeta(ph))
self.assertAllClose([0.5, 0.5],
beta_ph.eval(feed_dict={ph: x_one_half}))
@test_util.run_in_graph_and_eager_modes
def test_two_dimensional_proper_shape(self):
# Should evaluate to 1/2.
x_one_half = [[2, 1.], [2, 1.]]
with self.session(use_gpu=True):
self.assertAllClose(
[0.5, 0.5],
self.evaluate(math_ops.exp(special_math_ops.lbeta(x_one_half))))
self.assertEqual(
(2,),
self.evaluate(array_ops.shape(special_math_ops.lbeta(x_one_half))))
self.assertEqual(
tensor_shape.TensorShape([2]),
special_math_ops.lbeta(x_one_half).get_shape())
@test_util.run_in_graph_and_eager_modes
def test_complicated_shape(self):
with self.session(use_gpu=True):
x = ops.convert_to_tensor(np.random.rand(3, 2, 2))
self.assertAllEqual(
(3, 2), self.evaluate(array_ops.shape(special_math_ops.lbeta(x))))
self.assertEqual(
tensor_shape.TensorShape([3, 2]),
special_math_ops.lbeta(x).get_shape())
@test_util.run_in_graph_and_eager_modes
def test_length_1_last_dimension_results_in_one(self):
# If there is only one coefficient, the formula still works, and we get one
# as the answer, always.
x_a = [5.5]
x_b = [0.1]
with self.session(use_gpu=True):
self.assertAllClose(
1, self.evaluate(math_ops.exp(special_math_ops.lbeta(x_a))))
self.assertAllClose(
1, self.evaluate(math_ops.exp(special_math_ops.lbeta(x_b))))
self.assertEqual((), special_math_ops.lbeta(x_a).get_shape())
@test_util.run_in_graph_and_eager_modes
def test_empty_rank1_returns_negative_infinity(self):
with self.session(use_gpu=True):
x = constant_op.constant([], shape=[0])
lbeta_x = special_math_ops.lbeta(x)
expected_result = constant_op.constant(-np.inf, shape=())
self.assertAllEqual(self.evaluate(expected_result),
self.evaluate(lbeta_x))
self.assertEqual(expected_result.get_shape(), lbeta_x.get_shape())
@test_util.run_in_graph_and_eager_modes
def test_empty_rank2_with_zero_last_dim_returns_negative_infinity(self):
with self.session(use_gpu=True):
event_size = 0
for batch_size in [0, 1, 2]:
x = constant_op.constant([], shape=[batch_size, event_size])
lbeta_x = special_math_ops.lbeta(x)
expected_result = constant_op.constant(-np.inf, shape=[batch_size])
self.assertAllEqual(self.evaluate(expected_result),
self.evaluate(lbeta_x))
self.assertEqual(expected_result.get_shape(), lbeta_x.get_shape())
@test_util.run_in_graph_and_eager_modes
def test_empty_rank2_with_zero_batch_dim_returns_empty(self):
with self.session(use_gpu=True):
batch_size = 0
for event_size in [0, 1, 2]:
x = constant_op.constant([], shape=[batch_size, event_size])
lbeta_x = special_math_ops.lbeta(x)
expected_result = constant_op.constant([], shape=[batch_size])
self.assertAllEqual(self.evaluate(expected_result),
self.evaluate(lbeta_x))
self.assertEqual(expected_result.get_shape(), lbeta_x.get_shape())
class BesselTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_bessel_i0(self):
x_single = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float32)
x_double = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float64)
try:
from scipy import special # pylint: disable=g-import-not-at-top
self.assertAllClose(special.i0(x_single),
self.evaluate(special_math_ops.bessel_i0(x_single)))
self.assertAllClose(special.i0(x_double),
self.evaluate(special_math_ops.bessel_i0(x_double)))
except ImportError as e:
tf_logging.warn('Cannot test special functions: %s' % str(e))
@test_util.run_in_graph_and_eager_modes
def test_bessel_i1(self):
x_single = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float32)
x_double = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float64)
try:
from scipy import special # pylint: disable=g-import-not-at-top
self.assertAllClose(special.i1(x_single),
self.evaluate(special_math_ops.bessel_i1(x_single)))
self.assertAllClose(special.i1(x_double),
self.evaluate(special_math_ops.bessel_i1(x_double)))
except ImportError as e:
tf_logging.warn('Cannot test special functions: %s' % str(e))
class EinsumTest(test.TestCase):
simple_cases = [
'ij,jk->ik',
'ijk,jklm->il',
'ij,jk,kl->il',
'ijk->i',
'ijk->kji',
'ji,kj->ik',
'ikl,kji->kl',
'klj,lki->ij',
'ijk,ilj->kli',
'kij,mkb->ijmb',
'ijk,ijl,ikl->i',
'i,ijk,j->k',
'ij,ij,jk,kl->il',
'ij,kj,il,jm->ml',
'a,ab,abc->abc',
'a,b,ab->ab',
'ab,ab,c->',
'ab,ab,c->c',
'ab,ab,cd,cd->',
'ab,ab,cd,cd->ac',
'ab,ab,cd,cd->cd',
'ab,ab,cd,cd,ef,ef->',
'ab,cd,ef->abcdef',
'ab,cd,ef->acdf',<|fim▁hole|> 'ab,cd,de->abcde',
'ab,cd,de->be',
'ab,bcd,cd->abcd',
'ab,bcd,cd->abd',
'eb,cb,fb->cef',
'abcd,ad',
'bd,db,eac->ace',
'ba,ac,da->bcd',
'ab,ab',
'ab,ba',
'abc,abc',
'abc,bac',
'abc,cba',
'dba,ead,cad->bce',
'aef,fbc,dca->bde',
'iJ,Jk->ik',
'iJ,Ki->JK',
'iJk,Jklm->Jk'
'ij, jk, kl -> il',
'a, ab, abc -> abc',
'ab, ab, cd, cd, ef, ef -> ',
'abc, bac',
'iJ, Ki -> JK',
'iJk, Jklm -> Jk'
]
long_cases = [
'bca,cdb,dbf,afc->',
'efc,dbc,acf,fd->abe',
'ea,fb,gc,hd,abcd->efgh',
'ea,fb,abcd,gc,hd->efgh',
'abhe,hidj,jgba,hiab,gab',
'efc, dbc, acf, fd -> abe',
'abhe, hidj, jgba, hiab, gab',
]
invalid_cases = [
# bad formats
'',
'ijk ijk',
'ij.jk->ik',
'ij...,jk...->ik...',
'ij,k ->kji',
'ij,k-> kji',
# axis in output that does not exist
'ij,jk->im',
# incorrect number of dimensions
'ij,jkl->kl',
# this is allowed in numpy but not implemented here yet
'iij,jk'
]
dim_mismatch_cases = [('ijk,jkl->il', [(2, 3, 4), (3, 5, 6)])]
def disabled_test_simple(self):
for case in self.simple_cases:
self.run_test(case)
def test_long(self):
for case in self.long_cases:
self.run_test(case)
def test_invalid(self):
for axes in self.invalid_cases:
inputs = [
array_ops.placeholder(dtypes.float32, shape=(3, 4)),
array_ops.placeholder(dtypes.float32, shape=(3, 4)),
]
with self.assertRaises(ValueError):
_ = special_math_ops.einsum(axes, *inputs)
def test_invalid_keyword_arguments(self):
m0 = array_ops.placeholder(dtypes.int32, shape=(1, None))
m1 = array_ops.placeholder(dtypes.int32, shape=(None, 1))
with self.assertRaisesRegexp(
TypeError,
'invalid keyword arguments for this function: invalid1, invalid2'):
_ = special_math_ops.einsum(
'ij,jk->ik',
m0,
m1,
name='name',
invalid1='value1',
invalid2='value2')
def test_dim_mismatch(self):
for axes, input_shapes in self.dim_mismatch_cases:
inputs = [
array_ops.placeholder(dtypes.float32, shape=shape)
for shape in input_shapes
]
with self.assertRaises(ValueError):
_ = special_math_ops.einsum(axes, *inputs)
def run_test(self, axes):
all_axes = {ax: np.random.randint(4, 12) for ax in axes if ax.isalpha()}
input_vals = []
input_axes, _, _ = axes.partition('->')
for idx in input_axes.split(','):
shape = [all_axes[ax] for ax in idx if ax.isalpha()]
input_vals.append(np.random.random(shape))
input_tensors = [constant_op.constant(val) for val in input_vals]
output_tensor = special_math_ops.einsum(axes, *input_tensors)
with self.session(use_gpu=True):
output_value = self.evaluate(output_tensor)
correct_value = np.einsum(axes, *input_vals)
err = np.abs(correct_value - output_value).max()
# print(axes, err)
self.assertLess(err, 1e-8)
def test_input_is_placeholder(self):
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(1, None))
m1 = array_ops.placeholder(dtypes.int32, shape=(None, 1))
out = special_math_ops.einsum('ij,jk->ik', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[1, 2, 3]],
m1: [[2], [1], [1]],
}
self.assertAllClose([[7]], sess.run(out, feed_dict=feed_dict))
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(None, 3))
m1 = array_ops.placeholder(dtypes.int32, shape=(3,))
out = special_math_ops.einsum('ij,j->i', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[1, 2, 3]],
m1: [2, 1, 1],
}
self.assertAllClose([7], sess.run(out, feed_dict=feed_dict))
# Tests for placeholders which have two or more None values
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(None, None, 2))
m1 = array_ops.placeholder(dtypes.int32, shape=(2, 1))
out = special_math_ops.einsum('ijk,kl->ijl', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[[1, 2]]],
m1: [[3], [2]],
}
self.assertAllClose([[[7]]], sess.run(out, feed_dict=feed_dict))
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(2, 1))
m1 = array_ops.placeholder(dtypes.int32, shape=(None, None, 2))
out = special_math_ops.einsum('kl,ijk->ijl', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[3], [2]],
m1: [[[1, 2]]],
}
self.assertAllClose([[[7]]], sess.run(out, feed_dict=feed_dict))
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(None, None, 2))
m1 = array_ops.placeholder(dtypes.int32, shape=(2,))
out = special_math_ops.einsum('ijk,k->ij', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[[1, 2]]],
m1: [3, 2],
}
self.assertAllClose([[7]], sess.run(out, feed_dict=feed_dict))
with ops.Graph().as_default():
m0 = array_ops.placeholder(dtypes.int32, shape=(None, 2, None, 2))
m1 = array_ops.placeholder(dtypes.int32, shape=(None, 2))
out = special_math_ops.einsum('ijkl,ij->ikl', m0, m1)
with session.Session() as sess:
feed_dict = {
m0: [[[[1, 2]], [[2, 1]]]],
m1: [[3, 2]],
}
self.assertAllClose([[[7, 8]]], sess.run(out, feed_dict=feed_dict))
if __name__ == '__main__':
test.main()<|fim▁end|> | |
<|file_name|>connection.py<|end_file_name|><|fim▁begin|># Copyright 2009 Max Klymyshyn, Sonettic
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import socket
import subprocess
from apnsexceptions import *
from utils import *
class APNSConnectionContext(object):
certificate = None
def __init__(self, certificate = None):
self.certificate = certificate
def connect(self, host, port):
raise APNSNotImplementedMethod, "APNSConnectionContext.connect ssl method not implemented in context"
def write(data = None):
raise APNSNotImplementedMethod, "APNSConnectionContext.write method not implemented"
def read(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.read method not implemented"
def close(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.close method not implemented"
class OpenSSLCommandLine(APNSConnectionContext):
"""
This class execute and send data with openssl command line tool
"""
certificate = None
host = None
port = None
executable = None
debug = False
def __init__(self, certificate = None, executable = None, debug = False):
self.certificate = certificate
self.executable = executable
self.debug = debug
def connect(self, host, port):
self.host = host
self.port = port
def _command(self):
command = "%(executable)s s_client -ssl3 -cert %(cert)s -connect %(host)s:%(port)s" % \
{
'executable' : self.executable,
'cert' : self.certificate,
'host' : self.host,
'port' : self.port
}
return subprocess.Popen(command.split(' '), shell=False, bufsize=256, \
stdin=subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
def write(self, data = None):
pipe = self._command()
std_in = pipe.stdin<|fim▁hole|> std_in.close()
std_out = pipe.stdout
if self.debug:
print "-------------- SSL Debug Output --------------"
print command
print "----------------------------------------------"
print std_out.read()
std_out.close()
pipe.wait()
def read(self, blockSize = 1024):
"""
There is method to read data from feedback service.
WARNING! It's not tested and doesn't work yet!
"""
pipe = self._command()
std_out = pipe.stdout
data = std_out.read()
#pipe.wait()
std_out.close()
return data
def context(self):
return self
def close(self):
pass
class SSLModuleConnection(APNSConnectionContext):
"""
This is class which implement APNS connection based on
"ssl" module.
"""
socket = None
certificate = None
connectionContext = None
ssl_module = None
def __init__(self, certificate = None, ssl_module = None):
self.socket = None
self.connectionContext = None
self.certificate = certificate
self.ssl_module = ssl_module
def context(self):
"""
Initialize SSL context.
"""
if self.connectionContext != None:
return self
self.socket = socket.socket()
self.connectionContext = self.ssl_module.wrap_socket(
self.socket,
ssl_version = self.ssl_module.PROTOCOL_TLSv1,
certfile = self.certificate
)
return self
def certificate(self, path):
self.certificate = path
return self
def read(self, blockSize = 1024):
"""
Make connection to the host and port.
"""
return self.connectionContext.read(blockSize)
def write(self, data = None):
"""
Make connection to the host and port.
"""
self.connectionContext.write(data)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.connectionContext.connect((host, port))
def close(self):
"""
Close connection.
"""
self.connectionContext.close()
self.socket.close()
class APNSConnection(APNSConnectionContext):
"""
APNSConnection wrap SSL connection to the Apple Push Notification Server.
"""
debug = False
connectionContext = None
def __init__(self, certificate = None,
ssl_command = "openssl",
force_ssl_command = False,
disable_executable_search = False,
debug = False):
self.connectionContext = None
self.debug = debug
if not os.path.exists(str(certificate)):
raise APNSCertificateNotFoundError, "Apple Push Notification Service Certificate file %s not found." % str(certificate)
try:
if force_ssl_command:
raise ImportError, "There is force_ssl_command forces command line tool"
# use ssl library to handle secure connection
import ssl as ssl_module
self.connectionContext = SSLModuleConnection(certificate, ssl_module = ssl_module)
except:
# use command line openssl tool to handle secure connection
if not disable_executable_search:
executable = find_executable(ssl_command)
else:
executable = ssl_command
if not executable:
raise APNSNoCommandFound, "SSL Executable [%s] not found in your PATH environment" % str(ssl_command)
self.connectionContext = OpenSSLCommandLine(certificate, executable, debug = debug)
self.certificate = str(certificate)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.context().connect(host, port)
return self
def certificate(self, path):
self.context().certificate(path)
return self
def write(self, data = None):
self.context().write(data)
def read(self, blockSize = 1024):
return self.context().read(blockSize)
def context(self):
if not self.connectionContext:
raise APNSNoSSLContextFound, "There is no SSL context available in your python environment."
return self.connectionContext.context()
def close(self):
"""
Close connection.
"""
self.context().close()<|fim▁end|> | std_in.write(data)
std_in.flush() |
<|file_name|>label.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from 'angular2/core';
@Component({
selector: 'rio-label',
template: `
<label [id]="qaid">
<ng-content></ng-content><|fim▁hole|>export class RioLabel {
@Input() qaid: string;
};<|fim▁end|> | </label>
`
}) |
<|file_name|>packer-test.rs<|end_file_name|><|fim▁begin|>extern crate image;
extern crate texture_packer;
use std::path::Path;
use std::fs::File;
use texture_packer::texture::Texture;
use texture_packer::{ TexturePacker, TexturePackerConfig };
use texture_packer::importer::ImageImporter;
use texture_packer::exporter::ImageExporter;
const MAX_IMAGE_WIDTH: u32 = 400;
const MAX_IMAGE_HEIGHT: u32 = 400;
fn main() {
let mut config = TexturePackerConfig::default();
config.max_width = MAX_IMAGE_WIDTH;
config.max_height = MAX_IMAGE_HEIGHT;
config.allow_rotation = false;
config.texture_outlines = true;
config.border_padding = 2;
let ref mut texture_packer = TexturePacker::new_skyline(config);
for i in 1 .. 11 {
let file = format!("{}.png", i);
let ref path = ["./examples/assets/", &file[..]].concat();
let ref path = Path::new(path);
let texture = ImageImporter::import_from_file(path).unwrap();
texture_packer.pack_own(file, texture);
}
let image = ImageExporter::export(texture_packer).unwrap();
let path = "./examples/output/skyline-packer-output.png";
let ref path = Path::new(path);
let ref mut file = File::create(path).unwrap();<|fim▁hole|> println!("{} x {}", texture_packer.width(), texture_packer.height());
image.save(file, image::PNG).unwrap();
}<|fim▁end|> | |
<|file_name|>stream.py<|end_file_name|><|fim▁begin|># mhkutil - A utility for dealing with Mohawk archives
#
# mhkutil is the legal property of its developers, whose names
# can be found in the AUTHORS file distributed with this source
# distribution.
#
# mhkutil is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# mhkutil is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mhkutil. If not, see <http://www.gnu.org/licenses/>.
import os
import struct
# TODO: Find a better place for this
def makeTag(text):
if len(text) != 4:
raise Exception('Invalid text size {0}'.format(len(text)))
return struct.unpack('>L', text)[0]
# TODO: Find a better place for this
def tagToString(tag):
return struct.pack('>L', tag)
class Stream:
def readByte(self):
return struct.unpack('B', self.read(1))[0]
def readSByte(self):
return struct.unpack('b', self.read(1))[0]
def readUint16LE(self):
return struct.unpack('<H', self.read(2))[0]
def readSint16LE(self):
return struct.unpack('<h', self.read(2))[0]
def readUint16BE(self):
return struct.unpack('>H', self.read(2))[0]
def readSint16BE(self):
return struct.unpack('>h', self.read(2))[0]
def readUint32LE(self):
return struct.unpack('<L', self.read(4))[0]
def readSint32LE(self):
return struct.unpack('<l', self.read(4))[0]
def readUint32BE(self):
return struct.unpack('>L', self.read(4))[0]
def readSint32BE(self):
return struct.unpack('>l', self.read(4))[0]
def readCString(self):
text = ''
while True:
char = self.readByte()
if char == 0:<|fim▁hole|> text += chr(char)
return text
class WriteStream:
def writeByte(self, x):
self.write(struct.pack('B', x))
def writeSByte(self, x):
self.write(struct.pack('b', x))
def writeUint16LE(self, x):
self.write(struct.pack('<H', x))
def writeSint16LE(self, x):
self.write(struct.pack('<h', x))
def writeUint16BE(self, x):
self.write(struct.pack('>H', x))
def writeSint16BE(self, x):
self.write(struct.pack('>h', x))
def writeUint32LE(self, x):
self.write(struct.pack('<L', x))
def writeSint32LE(self, x):
self.write(struct.pack('<l', x))
def writeUint32BE(self, x):
self.write(struct.pack('>L', x))
def writeSint32BE(self, x):
self.write(struct.pack('>l', x))
class FileStream(Stream):
def __init__(self, handle):
self._handle = handle
handle.seek(0, os.SEEK_END)
self._size = handle.tell()
handle.seek(0)
def tell(self):
return self._handle.tell()
def size(self):
return self._size
def seek(self, offset, whence=os.SEEK_SET):
return self._handle.seek(offset, whence)
def read(self, size):
return bytearray(self._handle.read(size))
class FileWriteStream(WriteStream):
def __init__(self, handle):
self._handle = handle
def write(self, x):
self._handle.write(x)
class ByteStream(Stream):
def __init__(self, data):
self._data = data
self._pos = 0
def tell(self):
return self._pos
def size(self):
return len(self._data)
def seek(self, offset, whence=os.SEEK_SET):
if whence == os.SEEK_CUR:
self._pos += offset
elif whence == os.SEEK_END:
self._pos = len(self._data) + offset
else:
self._pos = offset
def read(self, size):
if size == 0:
return bytearray()
start = self._pos
end = start + size
self._pos = end
return self._data[start:end]<|fim▁end|> | break
|
<|file_name|>0004_remove_extract_provider.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('finance', '0003_auto_20140929_0130'),
]
operations = [<|fim▁hole|> migrations.RemoveField(
model_name='extract',
name='provider',
),
]<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2013 by Yaco Sistemas <[email protected]> or <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
INPLACEEDIT_EDIT_EMPTY_VALUE = (getattr(settings, 'INPLACEEDIT_EDIT_EMPTY_VALUE', None) and<|fim▁hole|>INPLACEEDIT_EDIT_MESSAGE_TRANSLATION = (getattr(settings, 'INPLACEEDIT_EDIT_MESSAGE_TRANSLATION', None) and
_(settings.INPLACEEDIT_EDIT_MESSAGE_TRANSLATION) or _('Write a translation'))
INPLACEEDIT_SUCCESS_TEXT = (getattr(settings, 'INPLACEEDIT_SUCCESS_TEXT', None) and
_(settings.INPLACEEDIT_SUCCESS_TEXT) or _('Successfully saved'))
INPLACEEDIT_UNSAVED_TEXT = (getattr(settings, 'INPLACEEDIT_UNSAVED_TEXT', None) and
_(settings.INPLACEEDIT_UNSAVED_TEXT) or _('You have unsaved changes!'))
INPLACE_ENABLE_CLASS = getattr(settings, 'ADAPTOR_INPLACEEDIT_EDIT', 'enable')
DEFAULT_INPLACE_EDIT_OPTIONS = getattr(settings, "DEFAULT_INPLACE_EDIT_OPTIONS", {})
DEFAULT_INPLACE_EDIT_OPTIONS_ONE_BY_ONE = getattr(settings, 'DEFAULT_INPLACE_EDIT_OPTIONS_ONE_BY_ONE', False)
ADAPTOR_INPLACEEDIT_EDIT = getattr(settings, 'ADAPTOR_INPLACEEDIT_EDIT', None)
ADAPTOR_INPLACEEDIT = getattr(settings, 'ADAPTOR_INPLACEEDIT', {})
INPLACE_GET_FIELD_URL = getattr(settings, 'INPLACE_GET_FIELD_URL', None)
INPLACE_SAVE_URL = getattr(settings, 'INPLACE_SAVE_URL', None)
INPLACE_FIELD_TYPES = getattr(settings, 'INPLACE_FIELD_TYPES', 'input, select, textarea')
INPLACE_FOCUS_WHEN_EDITING = getattr(settings, 'INPLACE_FOCUS_WHEN_EDITING', True)<|fim▁end|> | _(settings.INPLACEEDIT_EDIT_EMPTY_VALUE) or _('Doubleclick to edit'))
INPLACEEDIT_AUTO_SAVE = getattr(settings, 'INPLACEEDIT_AUTO_SAVE', False)
INPLACEEDIT_EVENT = getattr(settings, 'INPLACEEDIT_EVENT', 'dblclick')
INPLACEEDIT_DISABLE_CLICK = getattr(settings, 'INPLACEEDIT_DISABLE_CLICK', True) |
<|file_name|>empty_dir.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package common
import (
"fmt"
"path"
. "github.com/onsi/ginkgo"
"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/uuid"
"k8s.io/kubernetes/test/e2e/framework"
imageutils "k8s.io/kubernetes/test/utils/image"
)
const (
volumePath = "/test-volume"
)
var (
testImageRootUid = imageutils.GetE2EImage(imageutils.Mounttest)
testImageNonRootUid = imageutils.GetE2EImage(imageutils.MounttestUser)
)
var _ = Describe("[sig-storage] EmptyDir volumes", func() {
f := framework.NewDefaultFramework("emptydir")
Context("when FSGroup is specified [NodeFeature:FSGroup]", func() {
It("new files should be created with FSGroup ownership when container is root", func() {
doTestSetgidFSGroup(f, testImageRootUid, v1.StorageMediumMemory)
})
It("new files should be created with FSGroup ownership when container is non-root", func() {
doTestSetgidFSGroup(f, testImageNonRootUid, v1.StorageMediumMemory)
})
It("nonexistent volume subPath should have the correct mode and owner using FSGroup", func() {
doTestSubPathFSGroup(f, testImageNonRootUid, v1.StorageMediumMemory)
})
It("files with FSGroup ownership should support (root,0644,tmpfs)", func() {
doTest0644FSGroup(f, testImageRootUid, v1.StorageMediumMemory)
})
It("volume on default medium should have the correct mode using FSGroup", func() {
doTestVolumeModeFSGroup(f, testImageRootUid, v1.StorageMediumDefault)
})<|fim▁hole|>
It("volume on tmpfs should have the correct mode using FSGroup", func() {
doTestVolumeModeFSGroup(f, testImageRootUid, v1.StorageMediumMemory)
})
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode default
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume MUST have mode set as -rwxrwxrwx and mount type set to tmpfs.
*/
framework.ConformanceIt("volume on tmpfs should have the correct mode [NodeConformance]", func() {
doTestVolumeMode(f, testImageRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0644
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0644. The volume MUST have mode -rw-r--r-- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0644,tmpfs) [NodeConformance]", func() {
doTest0644(f, testImageRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0666
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0666. The volume MUST have mode -rw-rw-rw- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0666,tmpfs) [NodeConformance]", func() {
doTest0666(f, testImageRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0777
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0777. The volume MUST have mode set as -rwxrwxrwx and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0777,tmpfs) [NodeConformance]", func() {
doTest0777(f, testImageRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0644, non-root user
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0644. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rw-r--r-- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0644,tmpfs) [NodeConformance]", func() {
doTest0644(f, testImageNonRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0666,, non-root user
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0666. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rw-rw-rw- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0666,tmpfs) [NodeConformance]", func() {
doTest0666(f, testImageNonRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium memory, volume mode 0777, non-root user
Description: A Pod created with an 'emptyDir' Volume and 'medium' as 'Memory', the volume mode set to 0777. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rwxrwxrwx and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0777,tmpfs) [NodeConformance]", func() {
doTest0777(f, testImageNonRootUid, v1.StorageMediumMemory)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode default
Description: A Pod created with an 'emptyDir' Volume, the volume MUST have mode set as -rwxrwxrwx and mount type set to tmpfs.
*/
framework.ConformanceIt("volume on default medium should have the correct mode [NodeConformance]", func() {
doTestVolumeMode(f, testImageRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0644
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0644. The volume MUST have mode -rw-r--r-- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0644,default) [NodeConformance]", func() {
doTest0644(f, testImageRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0666
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0666. The volume MUST have mode -rw-rw-rw- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0666,default) [NodeConformance]", func() {
doTest0666(f, testImageRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0777
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0777. The volume MUST have mode set as -rwxrwxrwx and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (root,0777,default) [NodeConformance]", func() {
doTest0777(f, testImageRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0644
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0644. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rw-r--r-- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0644,default) [NodeConformance]", func() {
doTest0644(f, testImageNonRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0666
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0666. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rw-rw-rw- and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0666,default) [NodeConformance]", func() {
doTest0666(f, testImageNonRootUid, v1.StorageMediumDefault)
})
/*
Release : v1.9
Testname: EmptyDir, medium default, volume mode 0777
Description: A Pod created with an 'emptyDir' Volume, the volume mode set to 0777. Volume is mounted into the container where container is run as a non-root user. The volume MUST have mode -rwxrwxrwx and mount type set to tmpfs and the contents MUST be readable.
*/
framework.ConformanceIt("should support (non-root,0777,default) [NodeConformance]", func() {
doTest0777(f, testImageNonRootUid, v1.StorageMediumDefault)
})
})
const (
containerName = "test-container"
volumeName = "test-volume"
)
func doTestSetgidFSGroup(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
filePath = path.Join(volumePath, "test-file")
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(testImageRootUid, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--new_file_0660=%v", filePath),
fmt.Sprintf("--file_perm=%v", filePath),
fmt.Sprintf("--file_owner=%v", filePath),
}
fsGroup := int64(123)
pod.Spec.SecurityContext.FSGroup = &fsGroup
msg := fmt.Sprintf("emptydir 0644 on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume/test-file\": -rw-rw----",
"content of file \"/test-volume/test-file\": mount-tester new file",
"owner GID of \"/test-volume/test-file\": 123",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTestSubPathFSGroup(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
subPath = "test-sub"
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(image, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--file_perm=%v", volumePath),
fmt.Sprintf("--file_owner=%v", volumePath),
fmt.Sprintf("--file_mode=%v", volumePath),
}
pod.Spec.Containers[0].VolumeMounts[0].SubPath = subPath
fsGroup := int64(123)
pod.Spec.SecurityContext.FSGroup = &fsGroup
msg := fmt.Sprintf("emptydir subpath on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume\": -rwxrwxrwx",
"owner UID of \"/test-volume\": 0",
"owner GID of \"/test-volume\": 123",
"mode of file \"/test-volume\": dgtrwxrwxrwx",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTestVolumeModeFSGroup(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(testImageRootUid, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--file_perm=%v", volumePath),
}
fsGroup := int64(1001)
pod.Spec.SecurityContext.FSGroup = &fsGroup
msg := fmt.Sprintf("emptydir volume type on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume\": -rwxrwxrwx",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTest0644FSGroup(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
filePath = path.Join(volumePath, "test-file")
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(image, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--new_file_0644=%v", filePath),
fmt.Sprintf("--file_perm=%v", filePath),
}
fsGroup := int64(123)
pod.Spec.SecurityContext.FSGroup = &fsGroup
msg := fmt.Sprintf("emptydir 0644 on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume/test-file\": -rw-r--r--",
"content of file \"/test-volume/test-file\": mount-tester new file",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTestVolumeMode(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(testImageRootUid, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--file_perm=%v", volumePath),
}
msg := fmt.Sprintf("emptydir volume type on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume\": -rwxrwxrwx",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTest0644(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
filePath = path.Join(volumePath, "test-file")
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(image, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--new_file_0644=%v", filePath),
fmt.Sprintf("--file_perm=%v", filePath),
}
msg := fmt.Sprintf("emptydir 0644 on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume/test-file\": -rw-r--r--",
"content of file \"/test-volume/test-file\": mount-tester new file",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTest0666(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
filePath = path.Join(volumePath, "test-file")
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(image, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--new_file_0666=%v", filePath),
fmt.Sprintf("--file_perm=%v", filePath),
}
msg := fmt.Sprintf("emptydir 0666 on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume/test-file\": -rw-rw-rw-",
"content of file \"/test-volume/test-file\": mount-tester new file",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func doTest0777(f *framework.Framework, image string, medium v1.StorageMedium) {
var (
filePath = path.Join(volumePath, "test-file")
source = &v1.EmptyDirVolumeSource{Medium: medium}
pod = testPodWithVolume(image, volumePath, source)
)
pod.Spec.Containers[0].Args = []string{
fmt.Sprintf("--fs_type=%v", volumePath),
fmt.Sprintf("--new_file_0777=%v", filePath),
fmt.Sprintf("--file_perm=%v", filePath),
}
msg := fmt.Sprintf("emptydir 0777 on %v", formatMedium(medium))
out := []string{
"perms of file \"/test-volume/test-file\": -rwxrwxrwx",
"content of file \"/test-volume/test-file\": mount-tester new file",
}
if medium == v1.StorageMediumMemory {
out = append(out, "mount type of \"/test-volume\": tmpfs")
}
f.TestContainerOutput(msg, pod, 0, out)
}
func formatMedium(medium v1.StorageMedium) string {
if medium == v1.StorageMediumMemory {
return "tmpfs"
}
return "node default medium"
}
func testPodWithVolume(image, path string, source *v1.EmptyDirVolumeSource) *v1.Pod {
podName := "pod-" + string(uuid.NewUUID())
return &v1.Pod{
TypeMeta: metav1.TypeMeta{
Kind: "Pod",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: podName,
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: containerName,
Image: image,
VolumeMounts: []v1.VolumeMount{
{
Name: volumeName,
MountPath: path,
},
},
},
},
SecurityContext: &v1.PodSecurityContext{
SELinuxOptions: &v1.SELinuxOptions{
Level: "s0",
},
},
RestartPolicy: v1.RestartPolicyNever,
Volumes: []v1.Volume{
{
Name: volumeName,
VolumeSource: v1.VolumeSource{
EmptyDir: source,
},
},
},
},
}
}<|fim▁end|> | |
<|file_name|>ai.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> fn consider(&mut self, board: &mut Board) -> Command;
}<|fim▁end|> | use reversi::*;
pub trait AI { |
<|file_name|>proxy.js<|end_file_name|><|fim▁begin|>import net from 'net';
import log from './log.js';
export default function(options, onConnect) {
// proxy server
let proxy = net.createServer(function(client) {
let server;
// Create a new connection to the target server
server = net.connect(options.port);
// 2-way pipe between proxy and target server
client.pipe(server).pipe(client);
client.on('close', function() {<|fim▁hole|> });
server.on('close', function() {
client.end();
});
client.on('error', function(err) {
log.error('Client: ' + err.toString());
client.end();
server.end();
});
server.on('error', function(err) {
log.error('Server: ' + err.toString());
client.end();
server.end();
});
onConnect(client, server);
});
proxy.listen(options.proxyPort);
}<|fim▁end|> | server.end(); |
<|file_name|>einstein.py<|end_file_name|><|fim▁begin|>from pyeda.inter import *
'''
The Englishman lives in the red house.
The Swede keeps dogs.
The Dane drinks tea.
The green house is just to the left of the white one.
The owner of the green house drinks coffee.
The Pall Mall smoker keeps birds.
The owner of the yellow house smokes Dunhills.
The man in the center house drinks milk.
The Norwegian lives in the first house.
The Blend smoker has a neighbor who keeps cats.
The man who smokes Blue Masters drinks beer.
The man who keeps horses lives next to the Dunhill smoker.
The German smokes Prince.
The Norwegian lives next to the blue house.
The Blend smoker has a neighbor who drinks water.
'''
X = exprvars('x', (1,6), (1,6), (1,6))
class Solve:
def __init__(self):
X = exprvars('x', (1,6), (1,6), (1,6))
self.DIGITS = "123456789"
self.F = And (*[ And (*[ OneHot (*[X[r,c,v] for v in range(1,6)]) for c in range(1,6)]) for r in range(1,6)])
self.C = And (*[ And (*[ OneHot (*[X[r,c,v] for r in range(1,6)]) for v in range(1,6)]) for c in range(1,6)])
# The Englishman lives in the red house.
self.r1 = Or (*[ And(X[r, 1, 1], X[r, 2, 1]) for r in range(1,6)])
# The Swede keeps dogs.
self.r2 = Or (*[ And(X[r, 1, 2], X[r, 3, 1]) for r in range(1,6)])
# The Dane drinks tea.
self.r3 = Or (*[ And(X[r, 1, 5], X[r, 4, 1]) for r in range(1,6)])
# The green house is just to the left of the white one.
self.r4 = Or (*[ And(X[r, 2, 2], X[r+1, 2, 3]) for r in range(1,5)])
# The owner of the green house drinks coffee.
self.r5 = Or (*[ And(X[r, 2, 2], X[r, 4, 2]) for r in range(1,6)])
# The Pall Mall smoker keeps birds.
self.r6 = Or (*[ And(X[r, 5, 1], X[r, 3, 2]) for r in range(1,6)])
# The owner of the yellow house smokes Dunhills.
self.r7 = Or (*[ And(X[r, 2, 4], X[r, 5, 2]) for r in range(1,6)])
# The man in the center house drinks milk.
self.r8 = X[3, 4, 3]
#The Norwegian lives in the first house.
self.r9 = X[1, 1, 3]<|fim▁hole|> #The Blend smoker has a neighbor who keeps cats.
self.r10 = Or (Or (*[ And(X[r, 5, 3], X[r+1, 3, 2]) for r in range(1,5)]), Or (*[ And(X[r, 5, 3], X[r-1, 3, 2]) for r in range(2,6)]))
#The man who smokes Blue Masters drinks beer.
self.r11 = Or (*[ And(X[r, 5, 4], X[r, 4, 4]) for r in range(1,6)])
#The man who keeps horses lives next to the Dunhill smoker.
self.r12 = Or (Or (*[ And(X[r, 3, 4], X[r+1, 5, 2]) for r in range(1,5)]), Or (*[ And(X[r, 3, 4], X[r-1, 5, 2]) for r in range(2,6)]))
#The German smokes Prince.
self.r13 = Or (*[ And(X[r, 1, 4], X[r, 5, 5]) for r in range(1,6)])
#The Norwegian lives next to the blue house.
self.r14 = Or (Or (*[ And(X[r, 1, 3], X[r+1, 2, 5]) for r in range(1,5)]), Or (*[ And(X[r, 1, 3], X[r-1, 2, 5]) for r in range(2,6)]))
#The Blend smoker has a neighbor who drinks water.
self.r15 = Or (Or (*[ And(X[r, 5, 3], X[r+1, 4, 5]) for r in range(1,5)]), Or (*[ And(X[r, 5, 3], X[r-1, 4, 5]) for r in range(2,6)]))
def solve(self):
S = And(self.F, self.C, self.r1, self.r2, self.r3, self.r4, self.r5, self.r6, self.r7, self.r8, self.r9, self.r10, self.r11, self.r12, self.r13, self.r14, self.r15)
#answer = And(X[1,1,3], X[2,1,5], X[3,1,1], X[4,1,4], X[5,1,2], X[1,2,4], X[2,2,5], X[3,2,1], X[4,2,2], X[5,2,3], X[1,3,3],X[2,3,4], X[3,3,2], X[4,3,5], X[5,3,1], X[1,4,5],X[2,4,1],X[3,4,3],X[4,4,2],X[5,4,4],X[1,5,2],X[2,5,3],X[3,5,1],X[4,5,5],X[5,5,4])
S = S.to_cnf()
return S.satisfy_one()
def toGrid(self, gridStr):
grid = True
assert (len(gridStr) == 81)
r = 0
c = 0
for ch in gridStr:
assert (ch in self.DIGITS or ch in ".")
if ch in self.DIGITS:
grid = And (grid, X[r+1, c+1, int(ch)])
r = r + (c == 8)
c = (c + 1) % 9
return grid
def display(self, solutions):
for s in solutions:
self.display(s)
print ("\n\n")
def displayOne(self, solution):
for r in range(1,6):
line = ""
for c in range(1,6):
for v in range(1,6):
if solution[X[r, c, v]]:
line = line + ("%d " % v)
print ("%s" % line)
if __name__ == "__main__":
gridStr = (
".73...8.."
"..413..5."
".85..631."
"5...9..3."
"..8.1.5.."
".1..6...7"
".516..28."
".4..529.."
"..2...64.")
s = Solve()
print (s.solve())
#s.displayOne(s.solve(gridStr))<|fim▁end|> | |
<|file_name|>normalize.py<|end_file_name|><|fim▁begin|># Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
import numbers
from torch.distributions.transforms import Transform
from pyro.ops.tensor_utils import safe_normalize
from .. import constraints
class Normalize(Transform):
"""
Safely project a vector onto the sphere wrt the ``p`` norm. This avoids
the singularity at zero by mapping to the vector ``[1, 0, 0, ..., 0]``.
"""
domain = constraints.real_vector
codomain = constraints.sphere
bijective = False
def __init__(self, p=2, cache_size=0):
assert isinstance(p, numbers.Number)
assert p >= 0
self.p = p
super().__init__(cache_size=cache_size)
def __eq__(self, other):
return type(self) == type(other) and self.p == other.p
def _call(self, x):<|fim▁hole|> return safe_normalize(x, p=self.p)
def _inverse(self, y):
return y
def with_cache(self, cache_size=1):
if self._cache_size == cache_size:
return self
return Normalize(self.p, cache_size=cache_size)<|fim▁end|> | |
<|file_name|>blkstoragetest.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package blkstoragetest
import (
"crypto/sha256"
"hash"
"io/ioutil"<|fim▁hole|> "path/filepath"
"testing"
"github.com/hyperledger/fabric-protos-go/common"
"github.com/hyperledger/fabric/common/ledger/blkstorage"
"github.com/hyperledger/fabric/common/metrics/disabled"
"github.com/hyperledger/fabric/protoutil"
"github.com/stretchr/testify/require"
)
var (
testNewHashFunc = func() (hash.Hash, error) {
return sha256.New(), nil
}
attrsToIndex = []blkstorage.IndexableAttr{
blkstorage.IndexableAttrBlockHash,
blkstorage.IndexableAttrBlockNum,
blkstorage.IndexableAttrTxID,
blkstorage.IndexableAttrBlockNumTranNum,
}
)
// BootstrapBlockstoreFromSnapshot does the following:
// - create a block store using the provided blocks
// - generate a snapshot from the block store
// - bootstrap another block store from the snapshot
func BootstrapBlockstoreFromSnapshot(t *testing.T, ledgerName string, blocks []*common.Block) (*blkstorage.BlockStore, func()) {
require.NotEqual(t, 0, len(blocks))
testDir, err := ioutil.TempDir("", ledgerName)
require.NoError(t, err)
snapshotDir := filepath.Join(testDir, "snapshot")
require.NoError(t, os.Mkdir(snapshotDir, 0o755))
conf := blkstorage.NewConf(testDir, 0)
indexConfig := &blkstorage.IndexConfig{AttrsToIndex: attrsToIndex}
provider, err := blkstorage.NewProvider(conf, indexConfig, &disabled.Provider{})
require.NoError(t, err)
// create an original store from the provided blocks so that we can create a snapshot
originalBlkStore, err := provider.Open(ledgerName + "original")
require.NoError(t, err)
for _, block := range blocks {
require.NoError(t, originalBlkStore.AddBlock(block))
}
_, err = originalBlkStore.ExportTxIds(snapshotDir, testNewHashFunc)
require.NoError(t, err)
lastBlockInSnapshot := blocks[len(blocks)-1]
snapshotInfo := &blkstorage.SnapshotInfo{
LastBlockHash: protoutil.BlockHeaderHash(lastBlockInSnapshot.Header),
LastBlockNum: lastBlockInSnapshot.Header.Number,
PreviousBlockHash: lastBlockInSnapshot.Header.PreviousHash,
}
err = provider.ImportFromSnapshot(ledgerName, snapshotDir, snapshotInfo)
require.NoError(t, err)
blockStore, err := provider.Open(ledgerName)
require.NoError(t, err)
cleanup := func() {
provider.Close()
os.RemoveAll(testDir)
}
return blockStore, cleanup
}<|fim▁end|> | "os" |
<|file_name|>http_retry.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class HttpRetry(object):
"""HttpRetry operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def head408(
self, custom_headers=None, raw=False, **operation_config):
"""
Return 408 status code, then 200 after retry
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/408'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.head(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def put500(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 500 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/500'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def patch500(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 500 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/500'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get502(
self, custom_headers=None, raw=False, **operation_config):
"""
Return 502 status code, then 200 after retry
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/502'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post503(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 503 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/503'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete503(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 503 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/503'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}<|fim▁hole|> if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def put504(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 504 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/504'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def patch504(
self, boolean_value=None, custom_headers=None, raw=False, **operation_config):
"""
Return 504 status code, then 200 after retry
:param boolean_value: Simple boolean value true
:type boolean_value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/http/retry/504'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if boolean_value is not None:
body_content = self._serialize.body(boolean_value, 'bool')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response<|fim▁end|> | header_parameters['Content-Type'] = 'application/json; charset=utf-8' |
<|file_name|>bitcoin_nl.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="nl" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="14"/>
<source>About AndroidToken</source>
<translation>Over AndroidToken
</translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="53"/>
<source><b>AndroidToken</b> version</source>
<translation><b>AndroidToken</b> versie</translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="85"/>
<source>Copyright © 2011-2013 AndroidToken Developers
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>Copyright © 2011-2013 AndroidToken Ontwikkelaars
Dit is experimentele software.
Gedistribueerd onder de MIT/X11 software licentie, zie het bijgevoegde bestand license.txt of http://www.opensource.org/licenses/mit-license.php.
Dit product bevat software ontwikkeld door het OpenSSL Project voor gebruik in de OpenSSL Toolkit (http://www.openssl.org/) en cryptografische software gemaakt door Eric Young ([email protected]) en UPnP software geschreven door Thomas Bernard.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="14"/>
<source>Address Book</source>
<translation>Adresboek</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="20"/>
<source>These are your AndroidToken addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Dit zijn uw AndroidToken-adressen om betalingen te ontvangen. U kunt er voor kiezen om een adres aan te maken voor elke afzender. Op deze manier kunt u bijhouden wie al aan u betaald heeft.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="33"/>
<source>Double-click to edit address or label</source>
<translation>Dubbelklik om adres of label te wijzigen</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="57"/>
<source>Create a new address</source>
<translation>Maak een nieuw adres aan</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="60"/>
<source>&New Address...</source>
<translation>&Nieuw Adres...</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="71"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopieer het huidig geselecteerde adres naar het klembord</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="74"/>
<source>&Copy to Clipboard</source>
<translation>&Kopieer naar Klembord</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="85"/>
<source>Show &QR Code</source>
<translation>Toon &QR-Code</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="96"/>
<source>Sign a message to prove you own this address</source>
<translation>Onderteken een bericht om te bewijzen dat u dit adres bezit</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="99"/>
<source>&Sign Message</source>
<translation>&Onderteken Bericht</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="110"/>
<source>Delete the currently selected address from the list. Only sending addresses can be deleted.</source>
<translation>Verwijder het huidige geselecteerde adres van de lijst. Alleen zend-adressen kunnen verwijderd worden, niet uw ontvangstadressen.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="113"/>
<source>&Delete</source>
<translation>&Verwijder</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="61"/>
<source>Copy address</source>
<translation>Kopieer adres</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="62"/>
<source>Copy label</source>
<translation>Kopieer label</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="63"/>
<source>Edit</source>
<translation>Bewerk</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="64"/>
<source>Delete</source>
<translation>Verwijder</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="281"/>
<source>Export Address Book Data</source>
<translation>Exporteer Gegevens van het Adresboek</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="282"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommagescheiden bestand (*.csv)</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="295"/>
<source>Error exporting</source>
<translation>Fout bij exporteren</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="295"/>
<source>Could not write to file %1.</source>
<translation>Kon niet schrijven naar bestand %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="77"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="77"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="113"/>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="26"/>
<source>Dialog</source>
<translation>Dialoog</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="32"/>
<location filename="../forms/askpassphrasedialog.ui" line="97"/>
<source>TextLabel</source>
<translation>TekstLabel</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="50"/>
<source>Enter passphrase</source>
<translation>Huidig wachtwoord</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="64"/>
<source>New passphrase</source>
<translation>Nieuwe wachtwoord</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="78"/>
<source>Repeat new passphrase</source>
<translation>Herhaal wachtwoord</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="34"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Vul een nieuw wachtwoord in voor uw portemonnee. <br/> Gebruik een wachtwoord van <b>10 of meer lukrake karakters</b>, of <b> acht of meer woorden</b> . </translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="35"/>
<source>Encrypt wallet</source>
<translation>Versleutel portemonnee</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="38"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te openen.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="43"/>
<source>Unlock wallet</source>
<translation>Open portemonnee</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="46"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te ontsleutelen</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="51"/>
<source>Decrypt wallet</source>
<translation>Ontsleutel portemonnee</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="54"/>
<source>Change passphrase</source>
<translation>Wijzig wachtwoord</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="55"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Vul uw oude en nieuwe portemonneewachtwoord in.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="101"/>
<source>Confirm wallet encryption</source>
<translation>Bevestig versleuteling van de portemonnee</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="102"/>
<source>WARNING: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR AndroidTokenS</b>!
Are you sure you wish to encrypt your wallet?</source>
<translation>WAARSCHUWING: Wanneer uw portemonnee wordt versleuteld en u verliest uw wachtwoord, dan verliest u<b>AL UW AndroidTokenS</b>!
Bent u er zeker van uw dat u uw portemonnee wilt versleutelen?</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="111"/>
<location filename="../askpassphrasedialog.cpp" line="160"/>
<source>Wallet encrypted</source>
<translation>Portemonnee versleuteld</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="112"/>
<source>AndroidToken will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your AndroidTokens from being stolen by malware infecting your computer.</source>
<translation>AndroidToken zal nu afsluiten om het versleutelingsproces te voltooien. Onthoud dat het versleutelen van uw portemonnee u niet volledig kan beschermen: Malware kan uw computer infecteren en uw AndroidTokens stelen.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="208"/>
<location filename="../askpassphrasedialog.cpp" line="232"/>
<source>Warning: The Caps Lock key is on.</source>
<translation>Waarschuwing: De Caps-Lock-toets staat aan.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="117"/>
<location filename="../askpassphrasedialog.cpp" line="124"/>
<location filename="../askpassphrasedialog.cpp" line="166"/>
<location filename="../askpassphrasedialog.cpp" line="172"/>
<source>Wallet encryption failed</source>
<translation>Portemonneeversleuteling mislukt</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="118"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Portemonneeversleuteling mislukt door een interne fout, Uw portemonnee is niet versleuteld.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="125"/>
<location filename="../askpassphrasedialog.cpp" line="173"/>
<source>The supplied passphrases do not match.</source>
<translation>Het opgegeven wachtwoord is niet correct</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="136"/>
<source>Wallet unlock failed</source>
<translation>Portemonnee openen mislukt</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="137"/>
<location filename="../askpassphrasedialog.cpp" line="148"/>
<location filename="../askpassphrasedialog.cpp" line="167"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Het opgegeven wachtwoord voor de portemonnee-ontsleuteling is niet correct.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="147"/>
<source>Wallet decryption failed</source>
<translation>Portemonnee-ontsleuteling mislukt</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="161"/>
<source>Wallet passphrase was succesfully changed.</source>
<translation>Portemonneewachtwoord is succesvol gewijzigd</translation>
</message>
</context>
<context>
<name>AndroidTokenGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="69"/>
<source>AndroidToken Wallet</source>
<translation>AndroidToken-portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="142"/>
<location filename="../bitcoingui.cpp" line="464"/>
<source>Synchronizing with network...</source>
<translation>Synchroniseren met netwerk...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="145"/>
<source>Block chain synchronization in progress</source>
<translation>Bezig met blokkenketen-synchronisatie</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="176"/>
<source>&Overview</source>
<translation>&Overzicht</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="177"/>
<source>Show general overview of wallet</source>
<translation>Toon algemeen overzicht van de portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="182"/>
<source>&Transactions</source>
<translation>&Transacties</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="183"/>
<source>Browse transaction history</source>
<translation>Blader door transactieverleden</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="188"/>
<source>&Address Book</source>
<translation>&Adresboek</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="189"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Bewerk de lijst van opgeslagen adressen en labels</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="194"/>
<source>&Receive coins</source>
<translation>&Ontvang munten</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="195"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Toon lijst van adressen om betalingen mee te ontvangen</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="200"/>
<source>&Send coins</source>
<translation>&Verstuur munten</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="201"/>
<source>Send coins to a AndroidToken address</source>
<translation>Verstuur munten naar een AndroidToken-adres</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="206"/>
<source>Sign &message</source>
<translation>&Onderteken Bericht</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="207"/>
<source>Prove you control an address</source>
<translation>Bewijs dat u een adres bezit</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="226"/>
<source>E&xit</source>
<translation>&Afsluiten</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="227"/>
<source>Quit application</source>
<translation>Programma afsluiten</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="230"/>
<source>&About %1</source>
<translation>&Over %1</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="231"/>
<source>Show information about AndroidToken</source>
<translation>Laat informatie zien over AndroidToken</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="233"/>
<source>About &Qt</source>
<translation>Over &Qt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="234"/>
<source>Show information about Qt</source>
<translation>Toon informatie over Qt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="236"/>
<source>&Options...</source>
<translation>&Opties...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="237"/>
<source>Modify configuration options for AndroidToken</source>
<translation>Wijzig instellingen van AndroidToken</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="239"/>
<source>Open &AndroidToken</source>
<translation>Open &AndroidToken</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="240"/>
<source>Show the AndroidToken window</source>
<translation>Toon AndroidToken-venster </translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="241"/>
<source>&Export...</source>
<translation>&Exporteer...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="242"/>
<source>Export the data in the current tab to a file</source>
<translation>Exporteer de data in de huidige tab naar een bestand</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="243"/>
<source>&Encrypt Wallet</source>
<translation>&Versleutel Portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="244"/>
<source>Encrypt or decrypt wallet</source>
<translation>Versleutel of ontsleutel portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="246"/>
<source>&Backup Wallet</source>
<translation>Backup &Portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="247"/>
<source>Backup wallet to another location</source>
<translation>&Backup portemonnee naar een andere locatie</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="248"/>
<source>&Change Passphrase</source>
<translation>&Wijzig Wachtwoord</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="249"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>wijzig het wachtwoord voor uw portemonneversleuteling</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="272"/>
<source>&File</source>
<translation>&Bestand</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="281"/>
<source>&Settings</source>
<translation>&Instellingen</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="287"/>
<source>&Help</source>
<translation>&Hulp</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="294"/>
<source>Tabs toolbar</source>
<translation>Tab-werkbalk</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="305"/>
<source>Actions toolbar</source>
<translation>Actie-werkbalk</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="317"/>
<source>[testnet]</source>
<translation>[testnetwerk]</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="407"/>
<source>AndroidToken-qt</source>
<translation>AndroidToken-qt</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="449"/>
<source>%n active connection(s) to AndroidToken network</source>
<translation><numerusform>%n actieve connectie naar AndroidTokennetwerk</numerusform><numerusform>%n actieve connecties naar AndroidTokennetwerk</numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="475"/>
<source>Downloaded %1 of %2 blocks of transaction history.</source>
<translation>%1 van %2 blokken van transactiehistorie opgehaald.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="487"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation>%1 blokken van transactiehistorie opgehaald.</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="502"/>
<source>%n second(s) ago</source>
<translation><numerusform>%n seconde geleden</numerusform><numerusform>%n seconden geleden</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="506"/>
<source>%n minute(s) ago</source>
<translation><numerusform>%n minuut geleden</numerusform><numerusform>%n minuten geleden</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="510"/>
<source>%n hour(s) ago</source>
<translation><numerusform>%n uur geleden</numerusform><numerusform>%n uur geleden</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="514"/>
<source>%n day(s) ago</source>
<translation><numerusform>%n dag geleden</numerusform><numerusform>%n dagen geleden</numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="520"/>
<source>Up to date</source>
<translation>Bijgewerkt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="525"/>
<source>Catching up...</source>
<translation>Aan het bijwerken...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="533"/>
<source>Last received block was generated %1.</source>
<translation>Laatst ontvangen blok is %1 gegenereerd.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="597"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Deze transactie overschrijdt de groottelimiet. Om de transactie alsnog te versturen kunt u transactiekosten betalen van %1. Deze transactiekosten gaan naar de nodes die uw transactie verwerken en het helpt op deze manier bij het ondersteunen van het netwerk. Wilt u de transactiekosten betalen?</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="602"/>
<source>Sending...</source>
<translation>Versturen...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="629"/>
<source>Sent transaction</source>
<translation>Verzonden transactie</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="630"/>
<source>Incoming transaction</source>
<translation>Binnenkomende transactie</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="631"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Bedrag: %2
Type: %3
Adres: %4
</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="751"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portemonnee is <b>versleuteld</b> en momenteel <b>geopend</b></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="759"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portemonnee is <b>versleuteld</b> en momenteel <b>gesloten</b></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="782"/>
<source>Backup Wallet</source>
<translation>Backup Portemonnee</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="782"/>
<source>Wallet Data (*.dat)</source>
<translation>Portemonnee-data (*.dat)</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="785"/>
<source>Backup Failed</source>
<translation>Backup Mislukt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="785"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Er is een fout opgetreden bij het wegschrijven van de portemonnee-data naar de nieuwe locatie.</translation>
</message>
</context>
<context>
<name>DisplayOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="270"/>
<source>&Unit to show amounts in: </source>
<translation>&Eenheid om bedrag in te tonen:</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="274"/>
<source>Choose the default subdivision unit to show in the interface, and when sending coins</source>
<translation>Kies de standaard onderverdelingseenheid om weer te geven in uw programma, en voor het versturen van munten</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="281"/>
<source>Display addresses in transaction list</source>
<translation>Toon adressen in uw transactielijst</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="14"/>
<source>Edit Address</source>
<translation>Bewerk Adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="25"/>
<source>&Label</source>
<translation>&Label</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="35"/>
<source>The label associated with this address book entry</source>
<translation>Het label dat geassocieerd is met dit adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="42"/>
<source>&Address</source>
<translation>&Adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="52"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Het adres dat geassocieerd is met deze adresboek-opgave. Dit kan alleen worden veranderd voor zend-adressen.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="20"/>
<source>New receiving address</source>
<translation>Nieuw ontvangstadres</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="24"/>
<source>New sending address</source>
<translation>Nieuw adres om naar te verzenden</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="27"/>
<source>Edit receiving address</source>
<translation>Bewerk ontvangstadres</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="31"/>
<source>Edit sending address</source>
<translation>Bewerk adres om naar te verzenden</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="91"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Het opgegeven adres "%1" bestaat al in uw adresboek.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="96"/>
<source>The entered address "%1" is not a valid AndroidToken address.</source>
<translation>Het opgegeven adres "%1" is een ongeldig AndroidTokenadres</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="101"/>
<source>Could not unlock wallet.</source>
<translation>Kon de portemonnee niet openen.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="106"/>
<source>New key generation failed.</source>
<translation>Genereren nieuwe sleutel mislukt.</translation>
</message>
</context>
<context>
<name>MainOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="170"/>
<source>&Start AndroidToken on window system startup</source>
<translation>Start &AndroidToken wanneer het systeem opstart</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="171"/>
<source>Automatically start AndroidToken after the computer is turned on</source>
<translation>Start AndroidToken automatisch wanneer de computer wordt aangezet</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="175"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimaliseer naar het systeemvak in plaats van de taakbalk</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="176"/>
<source>Show only a tray icon after minimizing the window</source>
<translation>Laat alleen een systeemvak-icoon zien wanneer het venster geminimaliseerd is</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="180"/>
<source>Map port using &UPnP</source>
<translation>Portmapping via &UPnP</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="181"/>
<source>Automatically open the AndroidToken client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Open de AndroidToken-poort automatisch op de router. Dit werkt alleen als de router UPnP ondersteunt.</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="185"/>
<source>M&inimize on close</source>
<translation>Minimaliseer bij &sluiten van het venster</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="186"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimaliseer het venster in de plaats van de applicatie af te sluiten als het venster gesloten wordt. Wanneer deze optie aan staan, kan de applicatie alleen worden afgesloten door Afsluiten te kiezen in het menu.</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="190"/>
<source>&Connect through SOCKS4 proxy:</source>
<translation>&Verbind via SOCKS4 proxy: </translation><|fim▁hole|> <translation>Verbind met het AndroidToken-netwerk door een SOCKS4 proxy (bijv. wanneer Tor gebruikt wordt)</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="196"/>
<source>Proxy &IP: </source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="202"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>IP-adres van de proxy (bijv. 127.0.0.1)</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="205"/>
<source>&Port: </source>
<translation>&Poort:</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="211"/>
<source>Port of the proxy (e.g. 1234)</source>
<translation>Poort waarop de proxy luistert (bijv. 1234)</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="217"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation>Optionele transactiekosten per kB die helpen om uw transacties snel te verwerken. De meeste transacties zijn 1 kB. Transactiekosten van 0,01 wordt aangeraden</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="223"/>
<source>Pay transaction &fee</source>
<translation>Betaal &transactiekosten</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="226"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation>Optionele transactiekosten per kB die helpen om uw transacties snel te verwerken. De meeste transacties zijn 1 kB. Transactiekosten van 0,01 wordt aangeraden</translation>
</message>
</context>
<context>
<name>MessagePage</name>
<message>
<location filename="../forms/messagepage.ui" line="14"/>
<source>Message</source>
<translation>Bericht</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="20"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>U kunt berichten ondertekenen met een van uw adressen om te bewijzen dat u dit adres bezit. Pas op dat u geen onduidelijke dingen ondertekent, want phishingaanvallen zouden u voor de gek kunnen houden om zo uw identiteit te stelen. Onderteken alleen berichten waarmee u het volledig eens bent.</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="38"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Het adres waaraan u wilt betalen (bijv. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="48"/>
<source>Choose adress from address book</source>
<translation>Kies adres uit adresboek</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="58"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="71"/>
<source>Paste address from clipboard</source>
<translation>Plak adres vanuit klembord</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="81"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="93"/>
<source>Enter the message you want to sign here</source>
<translation>Typ hier het bericht dat u wilt ondertekenen</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="105"/>
<source>Click "Sign Message" to get signature</source>
<translation>Klik "Onderteken Bericht" om de handtekening te verkrijgen</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="117"/>
<source>Sign a message to prove you own this address</source>
<translation>Onderteken een bericht om te bewijzen dat u dit adres bezit</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="120"/>
<source>&Sign Message</source>
<translation>&Onderteken Bericht</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="131"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopieer het huidig geselecteerde adres naar het klembord</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="134"/>
<source>&Copy to Clipboard</source>
<translation>&Kopieer naar Klembord</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="74"/>
<location filename="../messagepage.cpp" line="89"/>
<location filename="../messagepage.cpp" line="101"/>
<source>Error signing</source>
<translation>Fout bij het ondertekenen</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="74"/>
<source>%1 is not a valid address.</source>
<translation>%1 is geen geldig adres.</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="89"/>
<source>Private key for %1 is not available.</source>
<translation>Geheime sleutel voor %1 is niet beschikbaar.</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="101"/>
<source>Sign failed</source>
<translation>Ondertekenen mislukt</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../optionsdialog.cpp" line="79"/>
<source>Main</source>
<translation>Algemeen</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="84"/>
<source>Display</source>
<translation>Beeldscherm</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="104"/>
<source>Options</source>
<translation>Opties</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="14"/>
<source>Form</source>
<translation>Vorm</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="40"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="47"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="54"/>
<source>Number of transactions:</source>
<translation>Aantal transacties:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="61"/>
<source>0</source>
<translation>0</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="68"/>
<source>Unconfirmed:</source>
<translation>Onbevestigd:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="75"/>
<source>0 BTC</source>
<translation>0 BTC</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="82"/>
<source><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">
<html><head><meta name="qrichtext" content="1" /><style type="text/css">
p, li { white-space: pre-wrap; }
</style></head><body style=" font-family:'Ubuntu'; font-size:11pt; font-weight:400; font-style:normal;">
<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-weight:600;">Wallet</span></p></body></html></source>
<translation><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">
<html><head><meta name="qrichtext" content="1" /><style type="text/css">
p, li { white-space: pre-wrap; }
</style></head><body style=" font-family:'Ubuntu'; font-size:11pt; font-weight:400; font-style:normal;">
<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-weight:600;">Portemonnee</span></p></body></html></translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="122"/>
<source><b>Recent transactions</b></source>
<translation><b>Recente transacties</b></translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="103"/>
<source>Your current balance</source>
<translation>Uw huidige saldo</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Totaal aantal transacties dat nog moet worden bevestigd, en nog niet is meegeteld in uw huidige saldo </translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="111"/>
<source>Total number of transactions in wallet</source>
<translation>Totaal aantal transacties in uw portemonnee</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="14"/>
<source>Dialog</source>
<translation>Dialoog</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="32"/>
<source>QR Code</source>
<translation>QR-code</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="52"/>
<source>Request Payment</source>
<translation>Vraag betaling aan</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="67"/>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="102"/>
<source>BTC</source>
<translation>BTC</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="118"/>
<source>Label:</source>
<translation>Label:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="141"/>
<source>Message:</source>
<translation>Bericht:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="183"/>
<source>&Save As...</source>
<translation>&Opslaan Als...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="101"/>
<source>Save Image...</source>
<translation>Afbeelding Opslaan...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="101"/>
<source>PNG Images (*.png)</source>
<translation>PNG-Afbeeldingen (*.png)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="14"/>
<location filename="../sendcoinsdialog.cpp" line="122"/>
<location filename="../sendcoinsdialog.cpp" line="127"/>
<location filename="../sendcoinsdialog.cpp" line="132"/>
<location filename="../sendcoinsdialog.cpp" line="137"/>
<location filename="../sendcoinsdialog.cpp" line="143"/>
<location filename="../sendcoinsdialog.cpp" line="148"/>
<location filename="../sendcoinsdialog.cpp" line="153"/>
<source>Send Coins</source>
<translation>Verstuur munten</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="64"/>
<source>Send to multiple recipients at once</source>
<translation>Verstuur aan verschillende ontvangers ineens</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="67"/>
<source>&Add recipient...</source>
<translation>Voeg &ontvanger toe...</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="84"/>
<source>Remove all transaction fields</source>
<translation>Verwijder alle transactievelden</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="87"/>
<source>Clear all</source>
<translation>Verwijder alles</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="106"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="113"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="144"/>
<source>Confirm the send action</source>
<translation>Bevestig de verstuuractie</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="147"/>
<source>&Send</source>
<translation>&Verstuur</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="94"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> aan %2 (%3)</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="99"/>
<source>Confirm send coins</source>
<translation>Bevestig versturen munten</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="100"/>
<source>Are you sure you want to send %1?</source>
<translation>Weet u zeker dat u %1 wil versturen?</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="100"/>
<source> and </source>
<translation> en </translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="123"/>
<source>The recepient address is not valid, please recheck.</source>
<translation>Het ontvangstadres is niet geldig, controleer uw opgave.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="128"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Het ingevoerde gedrag moet groter zijn dan 0.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="133"/>
<source>Amount exceeds your balance</source>
<translation>Bedrag overschrijdt uw huidige saldo</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="138"/>
<source>Total exceeds your balance when the %1 transaction fee is included</source>
<translation>Totaal overschrijdt uw huidige saldo wanneer de %1 transactiekosten worden meegerekend</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="144"/>
<source>Duplicate address found, can only send to each address once in one send operation</source>
<translation>Dubbel adres gevonden, u kunt slechts eenmaal naar een bepaald adres verzenden per verstuurtransactie</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="149"/>
<source>Error: Transaction creation failed </source>
<translation>Fout: Aanmaak transactie mislukt</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="154"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Fout: De transactie was afgewezen. Dit kan gebeuren als u eerder uitgegeven munten opnieuw wilt versturen, zoals wanneer u een kopie van uw wallet.dat heeft gebruikt en in de kopie deze munten zijn gemarkeerd als uitgegeven, maar in de huidige nog niet.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="14"/>
<source>Form</source>
<translation>Vorm</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="29"/>
<source>A&mount:</source>
<translation>Bedra&g:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="42"/>
<source>Pay &To:</source>
<translation>Betaal &Aan:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="66"/>
<location filename="../sendcoinsentry.cpp" line="26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Vul een label in voor dit adres om het toe te voegen aan uw adresboek</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="75"/>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="93"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Het adres waaraan u wilt betalen (bijv. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="103"/>
<source>Choose address from address book</source>
<translation>Kies adres uit adresboek</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="113"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="120"/>
<source>Paste address from clipboard</source>
<translation>Plak adres vanuit klembord</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="130"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="137"/>
<source>Remove this recipient</source>
<translation>Verwijder deze ontvanger</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="25"/>
<source>Enter a AndroidToken address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Vul een AndroidTokenadres in (bijv. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="18"/>
<source>Open for %1 blocks</source>
<translation>Openen voor %1 blokken</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="20"/>
<source>Open until %1</source>
<translation>Openen totdat %1</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="26"/>
<source>%1/offline?</source>
<translation>%1/niet verbonden?</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="28"/>
<source>%1/unconfirmed</source>
<translation>%1/onbevestigd</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="30"/>
<source>%1 confirmations</source>
<translation>%1 bevestigingen</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="47"/>
<source><b>Status:</b> </source>
<translation><b>Status:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="52"/>
<source>, has not been successfully broadcast yet</source>
<translation>, is nog niet succesvol uitgezonden</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="54"/>
<source>, broadcast through %1 node</source>
<translation>, uitgezonden naar %1 node</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="56"/>
<source>, broadcast through %1 nodes</source>
<translation>, uitgezonden naar %1 nodes</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="60"/>
<source><b>Date:</b> </source>
<translation><b>Datum:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="67"/>
<source><b>Source:</b> Generated<br></source>
<translation><b>Bron:</b>Gegenereerd<br></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="73"/>
<location filename="../transactiondesc.cpp" line="90"/>
<source><b>From:</b> </source>
<translation><b>Van:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="90"/>
<source>unknown</source>
<translation>onbekend</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="91"/>
<location filename="../transactiondesc.cpp" line="114"/>
<location filename="../transactiondesc.cpp" line="173"/>
<source><b>To:</b> </source>
<translation><b> Aan:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="94"/>
<source> (yours, label: </source>
<translation>(Uw adres, label:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="96"/>
<source> (yours)</source>
<translation>(uw)</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="131"/>
<location filename="../transactiondesc.cpp" line="145"/>
<location filename="../transactiondesc.cpp" line="190"/>
<location filename="../transactiondesc.cpp" line="207"/>
<source><b>Credit:</b> </source>
<translation><b>Bij:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="133"/>
<source>(%1 matures in %2 more blocks)</source>
<translation>(%1 komt beschikbaar na %2 blokken)</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="137"/>
<source>(not accepted)</source>
<translation>(niet geaccepteerd)</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="181"/>
<location filename="../transactiondesc.cpp" line="189"/>
<location filename="../transactiondesc.cpp" line="204"/>
<source><b>Debit:</b> </source>
<translation><b>Af:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="195"/>
<source><b>Transaction fee:</b> </source>
<translation><b>Transactiekosten:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="211"/>
<source><b>Net amount:</b> </source>
<translation><b>Netto bedrag:</b></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="217"/>
<source>Message:</source>
<translation>Bericht:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="219"/>
<source>Comment:</source>
<translation>Opmerking:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="221"/>
<source>Transaction ID:</source>
<translation>Transactie-ID:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="224"/>
<source>Generated coins must wait 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, it will change to "not accepted" and not be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Gegeneerde munten moeten 120 blokken wachten voor ze kunnen worden uitgegeven. Uw net gegenereerde blok is uitgezonden aan het netwerk om te worden toegevoegd aan de blokkenketen. Als het niet wordt geaccepteerd in de keten, zal het blok als "ongeldig" worden aangemerkt en kan het niet worden uitgegeven. Dit kan soms gebeuren als een andere node net iets sneller een blok heeft gegenereerd; een paar seconden voor het uwe.</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="14"/>
<source>Transaction details</source>
<translation>Transactiedetails</translation>
</message>
<message>
<location filename="../forms/transactiondescdialog.ui" line="20"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Dit venster laat een uitgebreide beschrijving van de transactie zien</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="213"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="213"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="213"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="213"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message numerus="yes">
<location filename="../transactiontablemodel.cpp" line="274"/>
<source>Open for %n block(s)</source>
<translation><numerusform>Open gedurende %n blok</numerusform><numerusform>Open gedurende %n blokken</numerusform></translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="277"/>
<source>Open until %1</source>
<translation>Open tot %1</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="280"/>
<source>Offline (%1 confirmations)</source>
<translation>Niet verbonden (%1 bevestigingen)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="283"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Onbevestigd (%1 van %2 bevestigd)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="286"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Bevestigd (%1 bevestigingen)</translation>
</message>
<message numerus="yes">
<location filename="../transactiontablemodel.cpp" line="295"/>
<source>Mined balance will be available in %n more blocks</source>
<translation><numerusform>Ontgonnen saldo komt beschikbaar na %n blok</numerusform><numerusform>Ontgonnen saldo komt beschikbaar na %n blokken</numerusform></translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="301"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Dit blok is niet ontvangen bij andere nodes en zal waarschijnlijk niet worden geaccepteerd!</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="304"/>
<source>Generated but not accepted</source>
<translation>Gegenereerd maar niet geaccepteerd</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="347"/>
<source>Received with</source>
<translation>Ontvangen met</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="349"/>
<source>Received from</source>
<translation>Ontvangen van</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="352"/>
<source>Sent to</source>
<translation>Verzonden aan</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="354"/>
<source>Payment to yourself</source>
<translation>Betaling aan uzelf</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="356"/>
<source>Mined</source>
<translation>Ontgonnen</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="394"/>
<source>(n/a)</source>
<translation>(nvt)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="593"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transactiestatus. Houd de muiscursor boven dit veld om het aantal bevestigingen te laten zien.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="595"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum en tijd waarop deze transactie is ontvangen.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="597"/>
<source>Type of transaction.</source>
<translation>Type transactie.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="599"/>
<source>Destination address of transaction.</source>
<translation>Ontvangend adres van transactie</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="601"/>
<source>Amount removed from or added to balance.</source>
<translation>Bedrag verwijderd van of toegevoegd aan saldo</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="55"/>
<location filename="../transactionview.cpp" line="71"/>
<source>All</source>
<translation>Alles</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="56"/>
<source>Today</source>
<translation>Vandaag</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="57"/>
<source>This week</source>
<translation>Deze week</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="58"/>
<source>This month</source>
<translation>Deze maand</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="59"/>
<source>Last month</source>
<translation>Vorige maand</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="60"/>
<source>This year</source>
<translation>Dit jaar</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="61"/>
<source>Range...</source>
<translation>Bereik...</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="72"/>
<source>Received with</source>
<translation>Ontvangen met</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="74"/>
<source>Sent to</source>
<translation>Verzonden aan</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="76"/>
<source>To yourself</source>
<translation>Aan uzelf</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="77"/>
<source>Mined</source>
<translation>Ontgonnen</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="78"/>
<source>Other</source>
<translation>Anders</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="84"/>
<source>Enter address or label to search</source>
<translation>Vul adres of label in om te zoeken</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="90"/>
<source>Min amount</source>
<translation>Min. bedrag</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="124"/>
<source>Copy address</source>
<translation>Kopieer adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="125"/>
<source>Copy label</source>
<translation>Kopieer label</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="126"/>
<source>Copy amount</source>
<translation>Kopieer bedrag</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="127"/>
<source>Edit label</source>
<translation>Bewerk label</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="128"/>
<source>Show details...</source>
<translation>Toon details...</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="268"/>
<source>Export Transaction Data</source>
<translation>Exporteer transactiegegevens</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="269"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommagescheiden bestand (*.csv)</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="277"/>
<source>Confirmed</source>
<translation>Bevestigd</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="278"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="279"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="280"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="281"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="282"/>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="283"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="287"/>
<source>Error exporting</source>
<translation>Fout bij exporteren</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="287"/>
<source>Could not write to file %1.</source>
<translation>Kon niet schrijven naar bestand %1.</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="382"/>
<source>Range:</source>
<translation>Bereik:</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="390"/>
<source>to</source>
<translation>naar</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="145"/>
<source>Sending...</source>
<translation>Versturen...</translation>
</message>
</context>
<context>
<name>AndroidToken-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="3"/>
<source>AndroidToken version</source>
<translation>AndroidTokenversie</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="4"/>
<source>Usage:</source>
<translation>Gebruik:</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="5"/>
<source>Send command to -server or androidtokend</source>
<translation>Stuur commando naar -server of androidtokend
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="6"/>
<source>List commands</source>
<translation>List van commando's
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="7"/>
<source>Get help for a command</source>
<translation>Toon hulp voor een commando
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="8"/>
<source>Options:</source>
<translation>Opties:
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="9"/>
<source>Specify configuration file (default: AndroidToken.conf)</source>
<translation>Specifieer configuratiebestand (standaard: AndroidToken.conf)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="10"/>
<source>Specify pid file (default: androidtokend.pid)</source>
<translation>Specifieer pid-bestand (standaard: androidtokend.pid)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="11"/>
<source>Generate coins</source>
<translation>Genereer munten
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="12"/>
<source>Don't generate coins</source>
<translation>Genereer geen munten
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="13"/>
<source>Start minimized</source>
<translation>Geminimaliseerd starten
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="14"/>
<source>Specify data directory</source>
<translation>Stel datamap in
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="15"/>
<source>Specify connection timeout (in milliseconds)</source>
<translation>Specificeer de time-out tijd (in milliseconden)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="16"/>
<source>Connect through socks4 proxy</source>
<translation>Verbind via socks4 proxy
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="17"/>
<source>Allow DNS lookups for addnode and connect</source>
<translation>Sta DNS-naslag toe voor addnode en connect
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="18"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>Luister voor verbindingen op <poort> (standaard: 8333 of testnet: 18333)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="19"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Onderhoud maximaal <n> verbindingen naar peers (standaard: 125)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="20"/>
<source>Add a node to connect to</source>
<translation>Voeg een node toe om mee te verbinden
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="21"/>
<source>Connect only to the specified node</source>
<translation>Verbind alleen met deze node
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="22"/>
<source>Don't accept connections from outside</source>
<translation>Sta geen verbindingen van buitenaf toe
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="23"/>
<source>Don't bootstrap list of peers using DNS</source>
<translation>Gebruik geen DNS om de lijst met peers op te starten</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="24"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Drempel om verbinding te verbreken naar zich misdragende peers (standaard: 100)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="25"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Aantal seconden dat zich misdragende peers niet opnieuw mogen verbinden (standaard: 86400)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="28"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 10000)</source>
<translation>Maximale ontvangstbuffer per connectie, <n>*1000 bytes (standaard: 10000)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="29"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 10000)</source>
<translation>Maximale zendbuffer per connectie, <n>*1000 bytes (standaard: 10000)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="30"/>
<source>Don't attempt to use UPnP to map the listening port</source>
<translation>Probeer geen UPnP te gebruiken om de poort waarop geluisterd wordt te mappen
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="31"/>
<source>Attempt to use UPnP to map the listening port</source>
<translation>Probeer UPnP te gebruiken om de poort waarop geluisterd wordt te mappen
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="32"/>
<source>Fee per kB to add to transactions you send</source>
<translation>Transactiekosten per kB om toe te voegen aan transacties die u verzendt</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="33"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aanvaard commandoregel en JSON-RPC commando's
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="34"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Draai in de achtergrond als daemon en aanvaard commando's
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="35"/>
<source>Use the test network</source>
<translation>Gebruik het testnetwerk
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="36"/>
<source>Output extra debugging information</source>
<translation>Toon extra debuggingsinformatie</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="37"/>
<source>Prepend debug output with timestamp</source>
<translation>Voorzie de debuggingsuitvoer van een tijdsaanduiding</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="38"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Stuur trace/debug-info naar de console in plaats van het debug.log bestand</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="39"/>
<source>Send trace/debug info to debugger</source>
<translation>Stuur trace/debug-info naar debugger</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="40"/>
<source>Username for JSON-RPC connections</source>
<translation>Gebruikersnaam voor JSON-RPC verbindingen
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="41"/>
<source>Password for JSON-RPC connections</source>
<translation>Wachtwoord voor JSON-RPC verbindingen
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="42"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332)</source>
<translation>Luister voor JSON-RPC verbindingen op <poort> (standaard: 8332)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="43"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Sta JSON-RPC verbindingen van opgegeven IP adres toe
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="44"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Verstuur commando's naar proces dat op <ip> draait (standaard: 127.0.0.1)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="45"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Stel sleutelpoelgrootte in op <n> (standaard: 100)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="46"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Doorzoek de blokkenketen op ontbrekende portemonnee-transacties</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="47"/>
<source>
SSL options: (see the AndroidToken Wiki for SSL setup instructions)</source>
<translation>
SSL opties: (zie de AndroidToken wiki voor SSL instructies)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="50"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Gebruik OpenSSL (https) voor JSON-RPC verbindingen
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="51"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Certificaat-bestand voor server (standaard: server.cert)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="52"/>
<source>Server private key (default: server.pem)</source>
<translation>Geheime sleutel voor server (standaard: server.pem)
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="53"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Aanvaardbare ciphers (standaard: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="56"/>
<source>This help message</source>
<translation>Dit helpbericht
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="57"/>
<source>Cannot obtain a lock on data directory %s. AndroidToken is probably already running.</source>
<translation>Kan geen lock op de gegevensdirectory %s verkrijgen. AndroidToken draait vermoedelijk reeds.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="60"/>
<source>Loading addresses...</source>
<translation>Adressen aan het laden...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="61"/>
<source>Error loading addr.dat</source>
<translation>Fout bij laden addr.dat</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="63"/>
<source>Error loading blkindex.dat</source>
<translation>Fout bij laden blkindex.dat</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="65"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Fout bij laden wallet.dat: Portemonnee corrupt</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="66"/>
<source>Error loading wallet.dat: Wallet requires newer version of AndroidToken</source>
<translation>Fout bij laden wallet.dat: Portemonnee vereist een nieuwere versie van AndroidToken</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="67"/>
<source>Wallet needed to be rewritten: restart AndroidToken to complete</source>
<translation>Portemonnee moest herschreven worden: Herstart AndroidToken om te voltooien</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="68"/>
<source>Error loading wallet.dat</source>
<translation>Fout bij laden wallet.dat</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="62"/>
<source>Loading block index...</source>
<translation>Blokindex aan het laden...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="64"/>
<source>Loading wallet...</source>
<translation>Portemonnee aan het laden...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="69"/>
<source>Rescanning...</source>
<translation>Opnieuw aan het scannen ...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="70"/>
<source>Done loading</source>
<translation>Klaar met laden</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="71"/>
<source>Invalid -proxy address</source>
<translation>Foutief -proxy adres</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="72"/>
<source>Invalid amount for -paytxfee=<amount></source>
<translation>Ongeldig bedrag voor -paytxfee=<bedrag></translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="73"/>
<source>Warning: -paytxfee is set very high. This is the transaction fee you will pay if you send a transaction.</source>
<translation>Waarschuwing: -paytxfee is zeer hoog ingesteld. Dit zijn de transactiekosten die u betaalt bij het versturen van een transactie.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="76"/>
<source>Error: CreateThread(StartNode) failed</source>
<translation>Fout: CreateThread(StartNode) is mislukt</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="77"/>
<source>Warning: Disk space is low </source>
<translation>Waarschuwing: Weinig schijfruimte over </translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="78"/>
<source>Unable to bind to port %d on this computer. AndroidToken is probably already running.</source>
<translation>Kan niet binden aan poort %d op deze computer. AndroidToken draait vermoedelijk reeds.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="81"/>
<source>Warning: Please check that your computer's date and time are correct. If your clock is wrong AndroidToken will not work properly.</source>
<translation>Waarschuwing: Controleer dat de datum en tijd op uw computer correct zijn ingesteld. Als uw klok fout staat zal AndroidToken niet correct werken.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="84"/>
<source>beta</source>
<translation>beta</translation>
</message>
</context>
</TS><|fim▁end|> | </message>
<message>
<location filename="../optionsdialog.cpp" line="191"/>
<source>Connect to the Bitcon network through a SOCKS4 proxy (e.g. when connecting through Tor)</source> |
<|file_name|>test_job.py<|end_file_name|><|fim▁begin|># Python
import pytest
import mock
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from crum import impersonate
import datetime
# Django rest framework
from rest_framework.exceptions import PermissionDenied
from django.utils import timezone
# AWX
from awx.api.versioning import reverse
from awx.api.views import RelatedJobsPreventDeleteMixin, UnifiedJobDeletionMixin
from awx.main.models import (
JobTemplate,
User,
Job,
AdHocCommand,
ProjectUpdate,
)
@pytest.mark.django_db
def test_extra_credentials(get, organization_factory, job_template_factory, credential):
objs = organization_factory("org", superusers=['admin'])
jt = job_template_factory("jt", organization=objs.organization,
inventory='test_inv', project='test_proj').job_template
jt.credentials.add(credential)
jt.save()
job = jt.create_unified_job()
url = reverse('api:job_extra_credentials_list', kwargs={'version': 'v2', 'pk': job.pk})
response = get(url, user=objs.superusers.admin)
assert response.data.get('count') == 1
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response(
post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(machine_credential)
jt_user = User.objects.create(username='jobtemplateuser')
jt.execute_role.members.add(jt_user)
with impersonate(jt_user):
job = jt.create_unified_job()
# User capability is shown for this
r = get(job.get_absolute_url(), jt_user, expect=200)
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted extra_credential, launch denied w/ message
job.launch_config.credentials.add(net_credential)
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
assert 'launched with prompted fields' in r.data['detail']
assert 'do not have permission' in r.data['detail']
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response_other_user(get, post, inventory, project, alice, bob):
'''
Asserts custom permission denied message corresponding to
awx/main/tests/functional/test_rbac_job.py::TestJobRelaunchAccess::test_other_user_prompts
'''
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory, project=project,
ask_credential_on_launch=True,
ask_variables_on_launch=True)
jt.execute_role.members.add(alice, bob)
with impersonate(bob):
job = jt.create_unified_job(extra_vars={'job_var': 'foo2'})
# User capability is shown for this
r = get(job.get_absolute_url(), alice, expect=200)
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted data, launch denied w/ message
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, alice, expect=403)
assert 'Job was launched with prompts provided by another user' in r.data['detail']
@pytest.mark.django_db
def test_job_relaunch_without_creds(post, inventory, project, admin_user):
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project
)
job = jt.create_unified_job()
post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={},
user=admin_user,
expect=201
)
@pytest.mark.django_db
@pytest.mark.parametrize("status,hosts", [
('all', 'host1,host2,host3'),
('failed', 'host3'),
])
def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credential, admin_user, status, hosts):
h1 = inventory.hosts.create(name='host1') # no-op
h2 = inventory.hosts.create(name='host2') # changed host
h3 = inventory.hosts.create(name='host3') # failed host
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project
)
jt.credentials.add(machine_credential)
job = jt.create_unified_job(_eager_fields={'status': 'failed'}, limit='host1,host2,host3')
job.job_events.create(event='playbook_on_stats')
job.job_host_summaries.create(host=h1, failed=False, ok=1, changed=0, failures=0, host_name=h1.name)
job.job_host_summaries.create(host=h2, failed=False, ok=0, changed=1, failures=0, host_name=h2.name)
job.job_host_summaries.create(host=h3, failed=False, ok=0, changed=0, failures=1, host_name=h3.name)
r = post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={'hosts': status},
user=admin_user,
expect=201
)
assert r.data.get('limit') == hosts
@pytest.mark.django_db
def test_summary_fields_recent_jobs(job_template, admin_user, get):
jobs = []
for i in range(13):<|fim▁hole|> created=timezone.make_aware(datetime.datetime(2017, 3, 21, 9, i)),
finished=timezone.make_aware(datetime.datetime(2017, 3, 21, 10, i))
))
r = get(
url = job_template.get_absolute_url(),
user = admin_user,
exepect = 200
)
recent_jobs = r.data['summary_fields']['recent_jobs']
assert len(recent_jobs) == 10
assert recent_jobs == [{
'id': job.id,
'status': 'failed',
'finished': job.finished,
'type': 'job'
} for job in jobs[-10:][::-1]]
@pytest.mark.django_db
def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get):
workflow_job = slice_job_factory(3, spawn=True)
slice_jt = workflow_job.job_template
r = get(
url=slice_jt.get_absolute_url(),
user=admin_user,
expect=200
)
job_ids = [entry['id'] for entry in r.data['summary_fields']['recent_jobs']]
# decision is that workflow job should be shown in the related jobs
# joblets of the workflow job should NOT be shown
assert job_ids == [workflow_job.pk]
@pytest.mark.django_db
def test_block_unprocessed_events(delete, admin_user, mocker):
time_of_finish = parse("Thu Feb 28 09:10:20 2013 -0500")
job = Job.objects.create(
emitted_events=1,
status='finished',
finished=time_of_finish
)
request = mock.MagicMock()
class MockView(UnifiedJobDeletionMixin):
model = Job
def get_object(self):
return job
view = MockView()
time_of_request = time_of_finish + relativedelta(seconds=2)
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
r = view.destroy(request)
assert r.status_code == 400
@pytest.mark.django_db
def test_block_related_unprocessed_events(mocker, organization, project, delete, admin_user):
job_template = JobTemplate.objects.create(
project=project,
playbook='helloworld.yml'
)
time_of_finish = parse("Thu Feb 23 14:17:24 2012 -0500")
Job.objects.create(
emitted_events=1,
status='finished',
finished=time_of_finish,
job_template=job_template,
project=project
)
view = RelatedJobsPreventDeleteMixin()
time_of_request = time_of_finish + relativedelta(seconds=2)
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
with pytest.raises(PermissionDenied):
view.perform_destroy(organization)
@pytest.mark.django_db
def test_disallowed_http_update_methods(put, patch, post, inventory, project, admin_user):
jt = JobTemplate.objects.create(
name='test_disallowed_methods', inventory=inventory,
project=project
)
job = jt.create_unified_job()
post(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
put(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
patch(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
class TestControllerNode():
@pytest.fixture
def project_update(self, project):
return ProjectUpdate.objects.create(project=project)
@pytest.fixture
def job(self):
return JobTemplate.objects.create().create_unified_job()
@pytest.fixture
def adhoc(self, inventory):
return AdHocCommand.objects.create(inventory=inventory)
@pytest.mark.django_db
def test_field_controller_node_exists(self, sqlite_copy_expert,
admin_user, job, project_update,
inventory_update, adhoc, get, system_job_factory):
system_job = system_job_factory()
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)
assert 'controller_node' in r.data['results'][0]
r = get(job.get_absolute_url(), admin_user, expect=200)
assert 'controller_node' in r.data
r = get(reverse('api:ad_hoc_command_detail', kwargs={'pk': adhoc.pk}), admin_user, expect=200)
assert 'controller_node' in r.data
r = get(reverse('api:project_update_detail', kwargs={'pk': project_update.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data
r = get(reverse('api:inventory_update_detail', kwargs={'pk': inventory_update.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data
r = get(reverse('api:system_job_detail', kwargs={'pk': system_job.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data<|fim▁end|> | jobs.append(Job.objects.create(
job_template=job_template,
status='failed', |
<|file_name|>engine.io-tests.ts<|end_file_name|><|fim▁begin|>import engine = require('engine.io');
import http = require('http');
let serverOptions: engine.ServerOptions;
let server: engine.Server;
let httpServer: http.Server;
let attachOptions: engine.AttachOptions;
let serverAttachOptions: engine.ServerAttachOptions;
serverOptions = {};
serverOptions = {
pingTimeout: 60000,
pingInterval: 25000,
upgradeTimeout: 10000,
maxHttpBufferSize: 10E7,
transports: ['polling', 'websocket'],
allowUpgrades: true,
perMessageDeflate: true,
httpCompression: true,
cookie: 'io',
cookiePath: '/',<|fim▁hole|> console.log(req.url);
cb(null, true);
}
};
attachOptions = {
path: '/engine.io' ,
destroyUpgrade: true,
destroyUpgradeTimeout: 1000,
};
attachOptions.handlePreflightRequest = true;
attachOptions.handlePreflightRequest = false;
attachOptions.handlePreflightRequest = (server, req, res) => {
console.log(server.clientsCount);
console.log(req.httpVersion);
console.log(res.finished);
};
serverAttachOptions = { ...serverOptions, ...attachOptions };
console.log(engine.protocol);
httpServer = http.createServer();
httpServer.listen(8000);
server = engine(httpServer);
server.close();
server = engine(httpServer, serverOptions);
server.close();
httpServer.close();
server = engine.listen(8000);
server.httpServer!.close();
server.close();
server = engine.listen(8000, serverOptions);
server.httpServer!.close();
server.close();
server = engine.listen(8000, serverOptions, () => {});
server.httpServer!.close();
server.close();
httpServer = http.createServer();
httpServer.listen(8000);
server = engine.attach(httpServer);
server.close();
httpServer.close();
httpServer = http.createServer();
httpServer.listen(8000);
server = engine.attach(httpServer, serverOptions);
server.close();
httpServer.close();
httpServer = http.createServer();
httpServer.listen(8000);
server = engine.attach(httpServer, attachOptions);
server.close();
httpServer.close();
httpServer = http.createServer();
httpServer.listen(8000);
server = engine.attach(httpServer, serverAttachOptions);
server.close();
httpServer.close();
server = new engine.Server();
server.close();
server = new engine.Server();
server.close();
httpServer = http.createServer();
httpServer.listen(8000);
server = new engine.Server(serverOptions);
server.attach(httpServer);
server.attach(httpServer, attachOptions);
server.close();
httpServer.close();
server.generateId = (req) => Math.floor(Math.random() * 100000).toString();
httpServer = http.createServer();
httpServer.listen(8000);
server = new engine.Server(serverOptions);
httpServer.on('upgrade', (req, socket, head) => {
server.handleUpgrade(req, socket, head);
});
httpServer.on('request', (req, res) => {
server.handleRequest(req, res);
});
console.log(server.clients);
console.log(server.clientsCount);
server.on('connection', (socket) => {
console.log(socket.id);
console.log(socket.server.getMaxListeners());
console.log(socket.request.headers);
console.log(socket.upgraded);
console.log(socket.readyState);
console.log(server.clients[socket.id].id);
socket.on('close', (reason, description) => {
console.log('CLOSE', reason, description && description.message);
});
socket.on('message', (message) => {
console.log('MESSAGE', message);
});
socket.on('error', err => {
console.log('ERROR', err);
});
socket.on('flush', buffer => {
console.log('FLUSH', buffer);
});
socket.on('drain', () => {
console.log('DRAIN');
});
socket.on('packet', packet => {
console.log('PACKET', packet.type, packet.data);
});
socket.on('packetCreate', packet => {
console.log('PACKETCREATE', packet.type, packet.data);
});
socket.send('utf 8 string', {compress: false}, () => {
console.log("SENDCALLBACK");
});
socket.send(new Buffer([0, 1, 2, 3, 4, 5])); // binary data
});
server.once('flush', (socket, buffer) => {
console.log(socket.id);
console.log(buffer[0].type);
console.log(buffer[0].options);
console.log(buffer[0].data);
});
server.once('drain', (socket) => {
console.log(socket.id);
});
server.close();
httpServer.close();<|fim▁end|> | wsEngine: 'ws',
initialPacket: new Buffer([0, 1, 2, 3, 4, 5]),
allowRequest: (req, cb) => { |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>$(function() {
//搜索框交互
var
placeholder = window.INPUT_PLACEHOLDER || '请输入要搜索的关键词',
baiduUrl = 'http://www.baidu.com/s?wd=',
googleUrl = 'http://www.google.com.hk/search?q=',
searchEl = $('#search');
$('.button', searchEl).on('click', function(e) {
var
keyword = $('.keyword', searchEl).val(),
url = e.target.name == 'baidu' ? baiduUrl : googleUrl;
window.open(url + encodeURIComponent(keyword));
e.preventDefault();
});
$('.keyword', searchEl)
.val(placeholder)
.on('focus', function(e) {
var keyword = $(e.target);
if(keyword.val() == placeholder) {
keyword.removeClass('default-word').val('');
}
})
.on('blur', function(e) {
var keyword = $(e.target);
if(keyword.val() == '') {
keyword.addClass('default-word').val(placeholder);
}
});
//收藏
$('#header .icon-favor').on('click', function(e) {
var
title = document.title || '设计师网址导航',
url = window.location.href;
try {
if(window.sidebar && window.sidebar.addPanel) {
window.sidebar.addPanel(title, url, '');
}else if(window.external) {
window.external.AddFavorite(url, title);
}else {
throw 'NOT_SUPPORTED';
}
}catch(err) {
alert('您的浏览器不支持自动收藏,请使用Ctrl+D进行收藏');
}
e.preventDefault();
});
//加入首页
$('#header .icon-homepage').on('click', function(e) {
try {
if(window.netscape) {
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
Components.classes['@mozilla.org/preferences-service;1']
.getService(Components. interfaces.nsIPrefBranch)
.setCharPref('browser.startup.homepage',window.location.href);
alert('成功设为首页');
}else if(window.external) {
document.body.style.behavior='url(#default#homepage)';
document.body.setHomePage(location.href);
}else {
throw 'NOT_SUPPORTED';
}
}catch(err) {
alert('您的浏览器不支持或不允许自动设置首页, 请通过浏览器菜单设置');
}
e.preventDefault();
});
//导航区域
<|fim▁hole|> var tarEl = e.target;
if(tarEl.tagName == 'A' && $(tarEl).parents('section.active')[0]) {
e.stopPropagation();
}else {
if(tarEl.tagName != 'LI') {
tarEl = $(tarEl).parents('li')[0];
}
if(tarEl) {
var aEl = $('a', tarEl);
if(aEl.length) {
var src = aEl.attr('href');
if(aEl.attr('target') == '_blank') {
window.open(src);
}else {
location.href = src;
}
}
}
e.preventDefault();
}
});
//快捷导航
catalogAnimationRunning = false;
function highlightCatalog(target) {
// *效果1*
// var listItem = $('li', target);
// for(var i=0; i<6; i++) {
// $([listItem[i], listItem[i+6]]).delay(50*i).animate({opacity:0.1},200, function(){
// $(this).animate({opacity:1}, 200);
// });
// }
/*效果2*/
target.addClass('highlight');
setTimeout(function() {
target.removeClass('highlight');
}, 800);
/*效果3*/
// target.addClass('shake');
//setTimeout(function() {
// target.removeClass('shake');
// }, 2000);
}
$('#shortcut nav').on('click', function(e) {
if(e.target.tagName != 'A') {
return;
}
var keyword = $(e.target).attr('href').slice(1);
var target = $('section[data-catalog="'+keyword+'"]');
if(target[0] && !catalogAnimationRunning) {
catalogAnimationRunning = true;
var top = target.offset().top;
$('html, body').animate({
scrollTop: top-20
}, 200, function() {
highlightCatalog(target);
catalogAnimationRunning = false;
});
}
e.preventDefault();
});
//热门关键字
(function() {
var hotWordCtn = $('#content .tips .hot-words');
var titleStr = '<b>'+KeywordConfig.title+'</b>';
var curIndex = 0;
function showHotWord() {
var html = titleStr;
for(var i=curIndex; i<KeywordConfig.num+curIndex; i++) {
if(KeywordConfig.data[i]) {
html += '<a href="'+KeywordConfig.data[i].url+'" class="website" target="_blank"><strong>'+KeywordConfig.data[i].kw+'</strong></a>';
}
}
hotWordCtn.empty().append(html);
curIndex += KeywordConfig.num;
if(curIndex >= KeywordConfig.data.length) {
curIndex = 0;
}
var children = hotWordCtn.children();
for(var i=0; i<children.length; i++) {
$(children[i]).delay(100*i).animate({opacity:0.1},200, function(){
$(this).animate({opacity:1}, 200);
});
}
showHotWord.timeout = setTimeout(showHotWord, KeywordConfig.delay*1000);
}
hotWordCtn.on('mouseenter', function() {
if(showHotWord.timeout) {
clearTimeout(showHotWord.timeout);
}
}).on('mouseleave', function() {
showHotWord.timeout = setTimeout(showHotWord, KeywordConfig.delay*1000);
});
if(hotWordCtn[0] && KeywordConfig) {
showHotWord();
}
})();
//文章序号
$('#aside .classics-article-list li').each(function(i, item) {
$(item).css('backgroundPosition', '0 '+(6+i*-50)+'px');
});
//回顶部
var goToTopEl = $('#go-to-top');
$(window).scroll(function() {
if($(window).scrollTop() >0) {
goToTopEl.removeClass('hide');
}else {
goToTopEl.addClass('hide');
}
});
});<|fim▁end|> | $('#catalog,#website-map').on('click', '.website-list>li, .more-item', function(e) {
|
<|file_name|>test_cache.py<|end_file_name|><|fim▁begin|>import sys
from time import sleep
from cachey import Cache, Scorer, nbytes
def test_cache():
c = Cache(available_bytes=nbytes(1) * 3)
c.put('x', 1, 10)
assert c.get('x') == 1
assert 'x' in c
c.put('a', 1, 10)
c.put('b', 1, 10)
c.put('c', 1, 10)
assert set(c.data) == set('xbc')
c.put('d', 1, 10)
assert set(c.data) == set('xcd')
c.clear()
assert 'x' not in c
assert not c.data
assert not c.heap
def test_cache_scores_update():
c = Cache(available_bytes=nbytes(1) * 2)
c.put('x', 1, 1)
c.put('y', 1, 1)
c.get('x')
c.get('x')
c.get('x')
c.put('z', 1, 1)
assert set(c.data) == set('xz')
def test_memoize():
c = Cache(available_bytes=nbytes(1) * 3)
flag = [0]
def slow_inc(x):
flag[0] += 1
sleep(0.01)
return x + 1
memo_inc = c.memoize(slow_inc)
assert memo_inc(1) == 2
assert memo_inc(1) == 2
assert list(c.data.values()) == [2]<|fim▁hole|>
def test_callbacks():
hit_flag = [False]
def hit(key, value):
hit_flag[0] = (key, value)
miss_flag = [False]
def miss(key):
miss_flag[0] = key
c = Cache(100, hit=hit, miss=miss)
c.get('x')
assert miss_flag[0] == 'x'
assert hit_flag[0] == False
c.put('y', 1, 1)
c.get('y')
assert hit_flag[0] == ('y', 1)
def test_just_one_reference():
c = Cache(available_bytes=1000)
o = object()
x = sys.getrefcount(o)
c.put('key', o, cost=10)
y = sys.getrefcount(o)
assert y == x + 1
c.retire('key')
z = sys.getrefcount(o)
assert z == x<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># vim: set fileencoding=utf-8 ts=4 sw=4 expandtab fdm=marker:
"""
Small wrapper around the python ConfigParser module.
"""
import ConfigParser
CONFIG = ConfigParser.ConfigParser()
DEFAULTS = {
'patterns': {
'path' : '(?P<artist>\w+) - (?P<year>\d+) - (?P<album>\w+)'<|fim▁hole|>}
def get_param(section, name):
try:
param = CONFIG.get(section, name)
except ConfigParser.NoOptionError or ConfigParser.NoSectionError:
param = None
if not param:
# Do a default lookup
try:
param = DEFAULTS[section][name]
except KeyError:
# Parameter is not in defaults
LOG.error("Error: Parameter [%s][%s] does not exist", section, name)
param = ""
return param<|fim▁end|> | } |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from ._excel import ExcelXlsTableWriter, ExcelXlsxTableWriter<|fim▁hole|>from ._sqlite import SqliteTableWriter<|fim▁end|> | from ._pandas import PandasDataFramePickleWriter |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from . import stock_move<|fim▁hole|><|fim▁end|> | from . import product_product |
<|file_name|>node.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package node
import (
"fmt"
"strconv"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/uuid"
"github.com/kube-node/nodeset/pkg/nodeset/v1alpha1"
)
func (c *Controller) nodesetNodes(nodeset *v1alpha1.NodeSet) ([]*corev1.Node, error) {
objs, err := c.nodeIndexer.ByIndex(OwnerUIDIndex, string(nodeset.GetUID()))
if err != nil {
return nil, fmt.Errorf("failed to get nodes: %v", err)
}
nodes := make([]*corev1.Node, 0, len(objs))
for _, n := range objs {<|fim▁hole|> nodes = append(nodes, n.(*corev1.Node))
}
return nodes, nil
}
func (c *Controller) createNode(nodeset *v1alpha1.NodeSet) (*corev1.Node, error) {
nodeClass, err := c.nodeClassLister.Get(nodeset.Spec.NodeClass)
if err != nil {
return nil, fmt.Errorf("failed to get nodeclass: %v", err)
}
nodeName := nodeset.Name + "-" + string(uuid.NewUUID())[:6]
labels := map[string]string{}
for k, v := range nodeClass.NodeLabels {
labels[k] = v
}
labels[v1alpha1.NodeSetNameLabelKey] = nodeset.Name
labels[v1alpha1.ControllerLabelKey] = nodeClass.NodeController
labels[kubeHostnameLabelKey] = nodeName
mergedNodeClass, err := mergeNodeClass(nodeClass, nodeset.Spec.Config)
if err != nil {
return nil, fmt.Errorf("failed to merge nodeclass: %v", err)
}
content, _, err := convertNodeClassToString(mergedNodeClass)
if err != nil {
return nil, fmt.Errorf("failed to convert merged nodeclass to string: %v", err)
}
gv := v1alpha1.SchemeGroupVersion
node := &corev1.Node{
ObjectMeta: metav1.ObjectMeta{
Name: nodeName,
Labels: labels,
Annotations: map[string]string{
v1alpha1.NodeClassContentAnnotationKey: content,
v1alpha1.NodeSetGenerationAnnotationKey: strconv.Itoa(int(nodeset.Status.ObservedGeneration)),
},
OwnerReferences: []metav1.OwnerReference{*metav1.NewControllerRef(nodeset, gv.WithKind("NodeSet"))},
},
}
return node, nil
}<|fim▁end|> | |
<|file_name|>sctc.py<|end_file_name|><|fim▁begin|>import pyglet
import tkinter
WINDOW_WIDTH = 800 # x
WINDOW_HEIGHT = 600 # y
LEVEL = 6 # original was 4
X_SIZE = 2 * LEVEL
Y_SIZE = 3 * LEVEL
def get_resolution():
root = tkinter.Tk()
return root.winfo_screenwidth(), root.winfo_screenheight()<|fim▁hole|>
def get_position(x, y):
return ((x // X_SIZE) * X_SIZE, (y // Y_SIZE) * Y_SIZE)
def cursor(pos):
x1 = pos[0]
x2 = x1 + X_SIZE
y1 = pos[1]
y2 = y1 + Y_SIZE
return [x1, y1, x2, y1,
x2, y1, x2, y2,
x1, y1, x1, y2,
x1, y2, x2, y2]
class Rail:
def __init__(self, batch, pos):
x1 = pos[0]
x2 = x1 + X_SIZE
y1 = pos[1] + Y_SIZE / 2
batch.add(6, pyglet.gl.GL_LINES, None,
('v2f', [x1, y1-1, x2, y1-1,
x1, y1, x2, y1,
x1, y1+1, x2, y1+1]),
('c3B', (177, 57, 57,
177, 57, 57,
255, 82, 82,
255, 82, 82,
177, 57, 57,
177, 57, 57)))
class AppWin(pyglet.window.Window):
def __init__(self, **kwargs):
kwargs.update(dict(
caption='SimCTC',
))
super(AppWin, self).__init__(**kwargs)
self.batch = pyglet.graphics.Batch()
self.x = -1
self.y = -1
for m in range(0, WINDOW_WIDTH+1, X_SIZE):
for n in range(0, WINDOW_HEIGHT+1, Y_SIZE):
self.batch.add(1, pyglet.gl.GL_POINTS, None,
('v2i', (m, n)),
('c3B', (0, 255, 0)))
def on_draw(self):
self.clear()
self.batch.draw()
def on_mouse_motion(self, x, y, dx, dy):
replot = False
if self.x == -1 and self.y == -1:
pos = get_position(x, y)
self.cursor = self.batch.add(8, pyglet.gl.GL_LINES, None,
('v2f', cursor(pos)),
('c3B', (255, 0, 0)*8))
self.x = x
self.y = y
if abs(dx) > 0:
self.x = self.x + dx
replot = True
if abs(dy) > 0:
self.y = self.y + dy
replot = True
if replot:
self.cursor.vertices = cursor(get_position(self.x, self.y))
def on_mouse_release(self, x, y, button, modifiers):
if button == pyglet.window.mouse.LEFT:
Rail(self.batch, get_position(self.x, self.y))
if __name__ == '__main__':
window = AppWin(width=WINDOW_WIDTH, height=WINDOW_HEIGHT)
window.set_exclusive_mouse(True)
window.set_mouse_visible(False)
pyglet.app.run()<|fim▁end|> | |
<|file_name|>keen.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
/**
* Expose `Emitter`.
*/
module.exports = Emitter;
/**
* Initialize a new `Emitter`.
*
* @api public
*/
function Emitter(obj) {
if (obj) return mixin(obj);
};
/**
* Mixin the emitter properties.
*
* @param {Object} obj
* @return {Object}
* @api private
*/
function mixin(obj) {
for (var key in Emitter.prototype) {
obj[key] = Emitter.prototype[key];
}
return obj;
}
/**
* Listen on the given `event` with `fn`.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.on =
Emitter.prototype.addEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
(this._callbacks['$' + event] = this._callbacks['$' + event] || [])
.push(fn);
return this;
};
/**
* Adds an `event` listener that will be invoked a single
* time then automatically removed.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.once = function(event, fn){
function on() {
this.off(event, on);
fn.apply(this, arguments);
}
on.fn = fn;
this.on(event, on);
return this;
};
/**
* Remove the given callback for `event` or all
* registered callbacks.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.off =
Emitter.prototype.removeListener =
Emitter.prototype.removeAllListeners =
Emitter.prototype.removeEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
if (0 == arguments.length) {
this._callbacks = {};
return this;
}
var callbacks = this._callbacks['$' + event];
if (!callbacks) return this;
if (1 == arguments.length) {
delete this._callbacks['$' + event];
return this;
}
var cb;
for (var i = 0; i < callbacks.length; i++) {
cb = callbacks[i];
if (cb === fn || cb.fn === fn) {
callbacks.splice(i, 1);
break;
}
}
return this;
};
/**
* Emit `event` with the given args.
*
* @param {String} event
* @param {Mixed} ...
* @return {Emitter}
*/
Emitter.prototype.emit = function(event){
this._callbacks = this._callbacks || {};
var args = [].slice.call(arguments, 1)
, callbacks = this._callbacks['$' + event];
if (callbacks) {
callbacks = callbacks.slice(0);
for (var i = 0, len = callbacks.length; i < len; ++i) {
callbacks[i].apply(this, args);
}
}
return this;
};
/**
* Return array of callbacks for `event`.
*
* @param {String} event
* @return {Array}
* @api public
*/
Emitter.prototype.listeners = function(event){
this._callbacks = this._callbacks || {};
return this._callbacks['$' + event] || [];
};
/**
* Check if this emitter has `event` handlers.
*
* @param {String} event
* @return {Boolean}
* @api public
*/
Emitter.prototype.hasListeners = function(event){
return !! this.listeners(event).length;
};
},{}],2:[function(require,module,exports){
/*!
* domready (c) Dustin Diaz 2012 - License MIT
*/
!function (name, definition) {
if (typeof module != 'undefined') module.exports = definition()
else if (typeof define == 'function' && typeof define.amd == 'object') {}
else this[name] = definition()
}('domready', function (ready) {
var fns = [], fn, f = false
, doc = document
, testEl = doc.documentElement
, hack = testEl.doScroll
, domContentLoaded = 'DOMContentLoaded'
, addEventListener = 'addEventListener'
, onreadystatechange = 'onreadystatechange'
, readyState = 'readyState'
, loadedRgx = hack ? /^loaded|^c/ : /^loaded|c/
, loaded = loadedRgx.test(doc[readyState])
function flush(f) {
loaded = 1
while (f = fns.shift()) f()
}
doc[addEventListener] && doc[addEventListener](domContentLoaded, fn = function () {
doc.removeEventListener(domContentLoaded, fn, f)
flush()
}, f)
hack && doc.attachEvent(onreadystatechange, fn = function () {
if (/^c/.test(doc[readyState])) {
doc.detachEvent(onreadystatechange, fn)
flush()
}
})
return (ready = hack ?
function (fn) {
self != top ?
loaded ? fn() : fns.push(fn) :
function () {
try {
testEl.doScroll('left')
} catch (e) {
return setTimeout(function() { ready(fn) }, 50)
}
fn()
}()
} :
function (fn) {
loaded ? fn() : fns.push(fn)
})
})
},{}],3:[function(require,module,exports){
(function (global){
/*! JSON v3.3.2 | http://bestiejs.github.io/json3 | Copyright 2012-2014, Kit Cambridge | http://kit.mit-license.org */
;(function () {
var isLoader = typeof define === "function" && define.amd;
var objectTypes = {
"function": true,
"object": true
};
var freeExports = objectTypes[typeof exports] && exports && !exports.nodeType && exports;
var root = objectTypes[typeof window] && window || this,
freeGlobal = freeExports && objectTypes[typeof module] && module && !module.nodeType && typeof global == "object" && global;
if (freeGlobal && (freeGlobal["global"] === freeGlobal || freeGlobal["window"] === freeGlobal || freeGlobal["self"] === freeGlobal)) {
root = freeGlobal;
}
function runInContext(context, exports) {
context || (context = root["Object"]());
exports || (exports = root["Object"]());
var Number = context["Number"] || root["Number"],
String = context["String"] || root["String"],
Object = context["Object"] || root["Object"],
Date = context["Date"] || root["Date"],
SyntaxError = context["SyntaxError"] || root["SyntaxError"],
TypeError = context["TypeError"] || root["TypeError"],
Math = context["Math"] || root["Math"],
nativeJSON = context["JSON"] || root["JSON"];
if (typeof nativeJSON == "object" && nativeJSON) {
exports.stringify = nativeJSON.stringify;
exports.parse = nativeJSON.parse;
}
var objectProto = Object.prototype,
getClass = objectProto.toString,
isProperty, forEach, undef;
var isExtended = new Date(-3509827334573292);
try {
isExtended = isExtended.getUTCFullYear() == -109252 && isExtended.getUTCMonth() === 0 && isExtended.getUTCDate() === 1 &&
isExtended.getUTCHours() == 10 && isExtended.getUTCMinutes() == 37 && isExtended.getUTCSeconds() == 6 && isExtended.getUTCMilliseconds() == 708;
} catch (exception) {}
function has(name) {
if (has[name] !== undef) {
return has[name];
}
var isSupported;
if (name == "bug-string-char-index") {
isSupported = "a"[0] != "a";
} else if (name == "json") {
isSupported = has("json-stringify") && has("json-parse");
} else {
var value, serialized = '{"a":[1,true,false,null,"\\u0000\\b\\n\\f\\r\\t"]}';
if (name == "json-stringify") {
var stringify = exports.stringify, stringifySupported = typeof stringify == "function" && isExtended;
if (stringifySupported) {
(value = function () {
return 1;
}).toJSON = value;
try {
stringifySupported =
stringify(0) === "0" &&
stringify(new Number()) === "0" &&
stringify(new String()) == '""' &&
stringify(getClass) === undef &&
stringify(undef) === undef &&
stringify() === undef &&
stringify(value) === "1" &&
stringify([value]) == "[1]" &&
stringify([undef]) == "[null]" &&
stringify(null) == "null" &&
stringify([undef, getClass, null]) == "[null,null,null]" &&
stringify({ "a": [value, true, false, null, "\x00\b\n\f\r\t"] }) == serialized &&
stringify(null, value) === "1" &&
stringify([1, 2], null, 1) == "[\n 1,\n 2\n]" &&
stringify(new Date(-8.64e15)) == '"-271821-04-20T00:00:00.000Z"' &&
stringify(new Date(8.64e15)) == '"+275760-09-13T00:00:00.000Z"' &&
stringify(new Date(-621987552e5)) == '"-000001-01-01T00:00:00.000Z"' &&
stringify(new Date(-1)) == '"1969-12-31T23:59:59.999Z"';
} catch (exception) {
stringifySupported = false;
}
}
isSupported = stringifySupported;
}
if (name == "json-parse") {
var parse = exports.parse;
if (typeof parse == "function") {
try {
if (parse("0") === 0 && !parse(false)) {
value = parse(serialized);
var parseSupported = value["a"].length == 5 && value["a"][0] === 1;
if (parseSupported) {
try {
parseSupported = !parse('"\t"');
} catch (exception) {}
if (parseSupported) {
try {
parseSupported = parse("01") !== 1;
} catch (exception) {}
}
if (parseSupported) {
try {
parseSupported = parse("1.") !== 1;
} catch (exception) {}
}
}
}
} catch (exception) {
parseSupported = false;
}
}
isSupported = parseSupported;
}
}
return has[name] = !!isSupported;
}
if (!has("json")) {
var functionClass = "[object Function]",
dateClass = "[object Date]",
numberClass = "[object Number]",
stringClass = "[object String]",
arrayClass = "[object Array]",
booleanClass = "[object Boolean]";
var charIndexBuggy = has("bug-string-char-index");
if (!isExtended) {
var floor = Math.floor;
var Months = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334];
var getDay = function (year, month) {
return Months[month] + 365 * (year - 1970) + floor((year - 1969 + (month = +(month > 1))) / 4) - floor((year - 1901 + month) / 100) + floor((year - 1601 + month) / 400);
};
}
if (!(isProperty = objectProto.hasOwnProperty)) {
isProperty = function (property) {
var members = {}, constructor;
if ((members.__proto__ = null, members.__proto__ = {
"toString": 1
}, members).toString != getClass) {
isProperty = function (property) {
var original = this.__proto__, result = property in (this.__proto__ = null, this);
this.__proto__ = original;
return result;
};
} else {
constructor = members.constructor;
isProperty = function (property) {
var parent = (this.constructor || constructor).prototype;
return property in this && !(property in parent && this[property] === parent[property]);
};
}
members = null;
return isProperty.call(this, property);
};
}
forEach = function (object, callback) {
var size = 0, Properties, members, property;
(Properties = function () {
this.valueOf = 0;
}).prototype.valueOf = 0;
members = new Properties();
for (property in members) {
if (isProperty.call(members, property)) {
size++;
}
}
Properties = members = null;
if (!size) {
members = ["valueOf", "toString", "toLocaleString", "propertyIsEnumerable", "isPrototypeOf", "hasOwnProperty", "constructor"];
forEach = function (object, callback) {
var isFunction = getClass.call(object) == functionClass, property, length;
var hasProperty = !isFunction && typeof object.constructor != "function" && objectTypes[typeof object.hasOwnProperty] && object.hasOwnProperty || isProperty;
for (property in object) {
if (!(isFunction && property == "prototype") && hasProperty.call(object, property)) {
callback(property);
}
}
for (length = members.length; property = members[--length]; hasProperty.call(object, property) && callback(property));
};
} else if (size == 2) {
forEach = function (object, callback) {
var members = {}, isFunction = getClass.call(object) == functionClass, property;
for (property in object) {
if (!(isFunction && property == "prototype") && !isProperty.call(members, property) && (members[property] = 1) && isProperty.call(object, property)) {
callback(property);
}
}
};
} else {
forEach = function (object, callback) {
var isFunction = getClass.call(object) == functionClass, property, isConstructor;
for (property in object) {
if (!(isFunction && property == "prototype") && isProperty.call(object, property) && !(isConstructor = property === "constructor")) {
callback(property);
}
}
if (isConstructor || isProperty.call(object, (property = "constructor"))) {
callback(property);
}
};
}
return forEach(object, callback);
};
if (!has("json-stringify")) {
var Escapes = {
92: "\\\\",
34: '\\"',
8: "\\b",
12: "\\f",
10: "\\n",
13: "\\r",
9: "\\t"
};
var leadingZeroes = "000000";
var toPaddedString = function (width, value) {
return (leadingZeroes + (value || 0)).slice(-width);
};
var unicodePrefix = "\\u00";
var quote = function (value) {
var result = '"', index = 0, length = value.length, useCharIndex = !charIndexBuggy || length > 10;
var symbols = useCharIndex && (charIndexBuggy ? value.split("") : value);
for (; index < length; index++) {
var charCode = value.charCodeAt(index);
switch (charCode) {
case 8: case 9: case 10: case 12: case 13: case 34: case 92:
result += Escapes[charCode];
break;
default:
if (charCode < 32) {
result += unicodePrefix + toPaddedString(2, charCode.toString(16));
break;
}
result += useCharIndex ? symbols[index] : value.charAt(index);
}
}
return result + '"';
};
var serialize = function (property, object, callback, properties, whitespace, indentation, stack) {
var value, className, year, month, date, time, hours, minutes, seconds, milliseconds, results, element, index, length, prefix, result;
try {
value = object[property];
} catch (exception) {}
if (typeof value == "object" && value) {
className = getClass.call(value);
if (className == dateClass && !isProperty.call(value, "toJSON")) {
if (value > -1 / 0 && value < 1 / 0) {
if (getDay) {
date = floor(value / 864e5);
for (year = floor(date / 365.2425) + 1970 - 1; getDay(year + 1, 0) <= date; year++);
for (month = floor((date - getDay(year, 0)) / 30.42); getDay(year, month + 1) <= date; month++);
date = 1 + date - getDay(year, month);
time = (value % 864e5 + 864e5) % 864e5;
hours = floor(time / 36e5) % 24;
minutes = floor(time / 6e4) % 60;
seconds = floor(time / 1e3) % 60;
milliseconds = time % 1e3;
} else {
year = value.getUTCFullYear();
month = value.getUTCMonth();
date = value.getUTCDate();
hours = value.getUTCHours();
minutes = value.getUTCMinutes();
seconds = value.getUTCSeconds();
milliseconds = value.getUTCMilliseconds();
}
value = (year <= 0 || year >= 1e4 ? (year < 0 ? "-" : "+") + toPaddedString(6, year < 0 ? -year : year) : toPaddedString(4, year)) +
"-" + toPaddedString(2, month + 1) + "-" + toPaddedString(2, date) +
"T" + toPaddedString(2, hours) + ":" + toPaddedString(2, minutes) + ":" + toPaddedString(2, seconds) +
"." + toPaddedString(3, milliseconds) + "Z";
} else {
value = null;
}
} else if (typeof value.toJSON == "function" && ((className != numberClass && className != stringClass && className != arrayClass) || isProperty.call(value, "toJSON"))) {
value = value.toJSON(property);
}
}
if (callback) {
value = callback.call(object, property, value);
}
if (value === null) {
return "null";
}
className = getClass.call(value);
if (className == booleanClass) {
return "" + value;
} else if (className == numberClass) {
return value > -1 / 0 && value < 1 / 0 ? "" + value : "null";
} else if (className == stringClass) {
return quote("" + value);
}
if (typeof value == "object") {
for (length = stack.length; length--;) {
if (stack[length] === value) {
throw TypeError();
}
}
stack.push(value);
results = [];
prefix = indentation;
indentation += whitespace;
if (className == arrayClass) {
for (index = 0, length = value.length; index < length; index++) {
element = serialize(index, value, callback, properties, whitespace, indentation, stack);
results.push(element === undef ? "null" : element);
}
result = results.length ? (whitespace ? "[\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "]" : ("[" + results.join(",") + "]")) : "[]";
} else {
forEach(properties || value, function (property) {
var element = serialize(property, value, callback, properties, whitespace, indentation, stack);
if (element !== undef) {
results.push(quote(property) + ":" + (whitespace ? " " : "") + element);
}
});
result = results.length ? (whitespace ? "{\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "}" : ("{" + results.join(",") + "}")) : "{}";
}
stack.pop();
return result;
}
};
exports.stringify = function (source, filter, width) {
var whitespace, callback, properties, className;
if (objectTypes[typeof filter] && filter) {
if ((className = getClass.call(filter)) == functionClass) {
callback = filter;
} else if (className == arrayClass) {
properties = {};
for (var index = 0, length = filter.length, value; index < length; value = filter[index++], ((className = getClass.call(value)), className == stringClass || className == numberClass) && (properties[value] = 1));
}
}
if (width) {
if ((className = getClass.call(width)) == numberClass) {
if ((width -= width % 1) > 0) {
for (whitespace = "", width > 10 && (width = 10); whitespace.length < width; whitespace += " ");
}
} else if (className == stringClass) {
whitespace = width.length <= 10 ? width : width.slice(0, 10);
}
}
return serialize("", (value = {}, value[""] = source, value), callback, properties, whitespace, "", []);
};
}
if (!has("json-parse")) {
var fromCharCode = String.fromCharCode;
var Unescapes = {
92: "\\",
34: '"',
47: "/",
98: "\b",
116: "\t",
110: "\n",
102: "\f",
114: "\r"
};
var Index, Source;
var abort = function () {
Index = Source = null;
throw SyntaxError();
};
var lex = function () {
var source = Source, length = source.length, value, begin, position, isSigned, charCode;
while (Index < length) {
charCode = source.charCodeAt(Index);
switch (charCode) {
case 9: case 10: case 13: case 32:
Index++;
break;
case 123: case 125: case 91: case 93: case 58: case 44:
value = charIndexBuggy ? source.charAt(Index) : source[Index];
Index++;
return value;
case 34:
for (value = "@", Index++; Index < length;) {
charCode = source.charCodeAt(Index);
if (charCode < 32) {
abort();
} else if (charCode == 92) {
charCode = source.charCodeAt(++Index);
switch (charCode) {
case 92: case 34: case 47: case 98: case 116: case 110: case 102: case 114:
value += Unescapes[charCode];
Index++;
break;
case 117:
begin = ++Index;
for (position = Index + 4; Index < position; Index++) {
charCode = source.charCodeAt(Index);
if (!(charCode >= 48 && charCode <= 57 || charCode >= 97 && charCode <= 102 || charCode >= 65 && charCode <= 70)) {
abort();
}
}
value += fromCharCode("0x" + source.slice(begin, Index));
break;
default:
abort();
}
} else {
if (charCode == 34) {
break;
}
charCode = source.charCodeAt(Index);
begin = Index;
while (charCode >= 32 && charCode != 92 && charCode != 34) {
charCode = source.charCodeAt(++Index);
}
value += source.slice(begin, Index);
}
}
if (source.charCodeAt(Index) == 34) {
Index++;
return value;
}
abort();
default:
begin = Index;
if (charCode == 45) {
isSigned = true;
charCode = source.charCodeAt(++Index);
}
if (charCode >= 48 && charCode <= 57) {
if (charCode == 48 && ((charCode = source.charCodeAt(Index + 1)), charCode >= 48 && charCode <= 57)) {
abort();
}
isSigned = false;
for (; Index < length && ((charCode = source.charCodeAt(Index)), charCode >= 48 && charCode <= 57); Index++);
if (source.charCodeAt(Index) == 46) {
position = ++Index;
for (; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++);
if (position == Index) {
abort();
}
Index = position;
}
charCode = source.charCodeAt(Index);
if (charCode == 101 || charCode == 69) {
charCode = source.charCodeAt(++Index);
if (charCode == 43 || charCode == 45) {
Index++;
}
for (position = Index; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++);
if (position == Index) {
abort();
}
Index = position;
}
return +source.slice(begin, Index);
}
if (isSigned) {
abort();
}
if (source.slice(Index, Index + 4) == "true") {
Index += 4;
return true;
} else if (source.slice(Index, Index + 5) == "false") {
Index += 5;
return false;
} else if (source.slice(Index, Index + 4) == "null") {
Index += 4;
return null;
}
abort();
}
}
return "$";
};
var get = function (value) {
var results, hasMembers;
if (value == "$") {
abort();
}
if (typeof value == "string") {
if ((charIndexBuggy ? value.charAt(0) : value[0]) == "@") {
return value.slice(1);
}
if (value == "[") {
results = [];
for (;; hasMembers || (hasMembers = true)) {
value = lex();
if (value == "]") {
break;
}
if (hasMembers) {
if (value == ",") {
value = lex();
if (value == "]") {
abort();
}
} else {
abort();
}
}
if (value == ",") {
abort();
}
results.push(get(value));
}
return results;
} else if (value == "{") {
results = {};
for (;; hasMembers || (hasMembers = true)) {
value = lex();
if (value == "}") {
break;
}
if (hasMembers) {
if (value == ",") {
value = lex();
if (value == "}") {
abort();
}
} else {
abort();
}
}
if (value == "," || typeof value != "string" || (charIndexBuggy ? value.charAt(0) : value[0]) != "@" || lex() != ":") {
abort();
}
results[value.slice(1)] = get(lex());
}
return results;
}
abort();
}
return value;
};
var update = function (source, property, callback) {
var element = walk(source, property, callback);
if (element === undef) {
delete source[property];
} else {
source[property] = element;
}
};
var walk = function (source, property, callback) {
var value = source[property], length;
if (typeof value == "object" && value) {
if (getClass.call(value) == arrayClass) {
for (length = value.length; length--;) {
update(value, length, callback);
}
} else {
forEach(value, function (property) {
update(value, property, callback);
});
}
}
return callback.call(source, property, value);
};
exports.parse = function (source, callback) {
var result, value;
Index = 0;
Source = "" + source;
result = get(lex());
if (lex() != "$") {
abort();
}
Index = Source = null;
return callback && getClass.call(callback) == functionClass ? walk((value = {}, value[""] = result, value), "", callback) : result;
};
}
}
exports["runInContext"] = runInContext;
return exports;
}
if (freeExports && !isLoader) {
runInContext(root, freeExports);
} else {
var nativeJSON = root.JSON,
previousJSON = root["JSON3"],
isRestored = false;
var JSON3 = runInContext(root, (root["JSON3"] = {
"noConflict": function () {
if (!isRestored) {
isRestored = true;
root.JSON = nativeJSON;
root["JSON3"] = previousJSON;
nativeJSON = previousJSON = null;
}
return JSON3;
}
}));
root.JSON = {
"parse": JSON3.parse,
"stringify": JSON3.stringify
};
}
if (false) {
(function(){
return JSON3;
});
}
}).call(this);
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],4:[function(require,module,exports){
/**
* Reduce `arr` with `fn`.
*
* @param {Array} arr
* @param {Function} fn
* @param {Mixed} initial
*
* TODO: combatible error handling?
*/
module.exports = function(arr, fn, initial){
var idx = 0;
var len = arr.length;
var curr = arguments.length == 3
? initial
: arr[idx++];
while (idx < len) {
curr = fn.call(null, curr, arr[idx], ++idx, arr);
}
return curr;
};
},{}],5:[function(require,module,exports){
/**
* Copyright (c) 2011-2014 Felix Gnass
* Licensed under the MIT license
* http://spin.js.org/
*
* Example:
var opts = {
lines: 12
, length: 7
, width: 5
, radius: 10
, scale: 1.0
, corners: 1
, color: '#000'
, opacity: 1/4
, rotate: 0
, direction: 1
, speed: 1
, trail: 100
, fps: 20
, zIndex: 2e9
, className: 'spinner'
, top: '50%'
, left: '50%'
, shadow: false
, hwaccel: false
, position: 'absolute'
}
var target = document.getElementById('foo')
var spinner = new Spinner(opts).spin(target)
*/
;(function (root, factory) {
/* CommonJS */
if (typeof module == 'object' && module.exports) module.exports = factory()
/* AMD module */
else if (typeof define == 'function' && define.amd) {}
/* Browser global */
else root.Spinner = factory()
}(this, function () {
"use strict"
var prefixes = ['webkit', 'Moz', 'ms', 'O'] /* Vendor prefixes */
, animations = {} /* Animation rules keyed by their name */
, useCssAnimations /* Whether to use CSS animations or setTimeout */
, sheet /* A stylesheet to hold the @keyframe or VML rules. */
/**
* Utility function to create elements. If no tag name is given,
* a DIV is created. Optionally properties can be passed.
*/
function createEl (tag, prop) {
var el = document.createElement(tag || 'div')
, n
for (n in prop) el[n] = prop[n]
return el
}
/**
* Appends children and returns the parent.
*/
function ins (parent /* child1, child2, ...*/) {
for (var i = 1, n = arguments.length; i < n; i++) {
parent.appendChild(arguments[i])
}
return parent
}
/**
* Creates an opacity keyframe animation rule and returns its name.
* Since most mobile Webkits have timing issues with animation-delay,
* we create separate rules for each line/segment.
*/
function addAnimation (alpha, trail, i, lines) {
var name = ['opacity', trail, ~~(alpha * 100), i, lines].join('-')
, start = 0.01 + i/lines * 100
, z = Math.max(1 - (1-alpha) / trail * (100-start), alpha)
, prefix = useCssAnimations.substring(0, useCssAnimations.indexOf('Animation')).toLowerCase()
, pre = prefix && '-' + prefix + '-' || ''
if (!animations[name]) {
sheet.insertRule(
'@' + pre + 'keyframes ' + name + '{' +
'0%{opacity:' + z + '}' +
start + '%{opacity:' + alpha + '}' +
(start+0.01) + '%{opacity:1}' +
(start+trail) % 100 + '%{opacity:' + alpha + '}' +
'100%{opacity:' + z + '}' +
'}', sheet.cssRules.length)
animations[name] = 1
}
return name
}
/**
* Tries various vendor prefixes and returns the first supported property.
*/
function vendor (el, prop) {
var s = el.style
, pp
, i
prop = prop.charAt(0).toUpperCase() + prop.slice(1)
if (s[prop] !== undefined) return prop
for (i = 0; i < prefixes.length; i++) {
pp = prefixes[i]+prop
if (s[pp] !== undefined) return pp
}
}
/**
* Sets multiple style properties at once.
*/
function css (el, prop) {
for (var n in prop) {
el.style[vendor(el, n) || n] = prop[n]
}
return el
}
/**
* Fills in default values.
*/
function merge (obj) {
for (var i = 1; i < arguments.length; i++) {
var def = arguments[i]
for (var n in def) {
if (obj[n] === undefined) obj[n] = def[n]
}
}
return obj
}
/**
* Returns the line color from the given string or array.
*/
function getColor (color, idx) {
return typeof color == 'string' ? color : color[idx % color.length]
}
var defaults = {
lines: 12
, length: 7
, width: 5
, radius: 10
, scale: 1.0
, corners: 1
, color: '#000'
, opacity: 1/4
, rotate: 0
, direction: 1
, speed: 1
, trail: 100
, fps: 20
, zIndex: 2e9
, className: 'spinner'
, top: '50%'
, left: '50%'
, shadow: false
, hwaccel: false
, position: 'absolute'
}
/** The constructor */
function Spinner (o) {
this.opts = merge(o || {}, Spinner.defaults, defaults)
}
Spinner.defaults = {}
merge(Spinner.prototype, {
/**
* Adds the spinner to the given target element. If this instance is already
* spinning, it is automatically removed from its previous target b calling
* stop() internally.
*/
spin: function (target) {
this.stop()
var self = this
, o = self.opts
, el = self.el = createEl(null, {className: o.className})
css(el, {
position: o.position
, width: 0
, zIndex: o.zIndex
, left: o.left
, top: o.top
})
if (target) {
target.insertBefore(el, target.firstChild || null)
}
el.setAttribute('role', 'progressbar')
self.lines(el, self.opts)
if (!useCssAnimations) {
var i = 0
, start = (o.lines - 1) * (1 - o.direction) / 2
, alpha
, fps = o.fps
, f = fps / o.speed
, ostep = (1 - o.opacity) / (f * o.trail / 100)
, astep = f / o.lines
;(function anim () {
i++
for (var j = 0; j < o.lines; j++) {
alpha = Math.max(1 - (i + (o.lines - j) * astep) % f * ostep, o.opacity)
self.opacity(el, j * o.direction + start, alpha, o)
}
self.timeout = self.el && setTimeout(anim, ~~(1000 / fps))
})()
}
return self
}
/**
* Stops and removes the Spinner.
*/
, stop: function () {
var el = this.el
if (el) {
clearTimeout(this.timeout)
if (el.parentNode) el.parentNode.removeChild(el)
this.el = undefined
}
return this
}
/**
* Internal method that draws the individual lines. Will be overwritten
* in VML fallback mode below.
*/
, lines: function (el, o) {
var i = 0
, start = (o.lines - 1) * (1 - o.direction) / 2
, seg
function fill (color, shadow) {
return css(createEl(), {
position: 'absolute'
, width: o.scale * (o.length + o.width) + 'px'
, height: o.scale * o.width + 'px'
, background: color
, boxShadow: shadow
, transformOrigin: 'left'
, transform: 'rotate(' + ~~(360/o.lines*i + o.rotate) + 'deg) translate(' + o.scale*o.radius + 'px' + ',0)'
, borderRadius: (o.corners * o.scale * o.width >> 1) + 'px'
})
}
for (; i < o.lines; i++) {
seg = css(createEl(), {
position: 'absolute'
, top: 1 + ~(o.scale * o.width / 2) + 'px'
, transform: o.hwaccel ? 'translate3d(0,0,0)' : ''
, opacity: o.opacity
, animation: useCssAnimations && addAnimation(o.opacity, o.trail, start + i * o.direction, o.lines) + ' ' + 1 / o.speed + 's linear infinite'
})
if (o.shadow) ins(seg, css(fill('#000', '0 0 4px #000'), {top: '2px'}))
ins(el, ins(seg, fill(getColor(o.color, i), '0 0 1px rgba(0,0,0,.1)')))
}
return el
}
/**
* Internal method that adjusts the opacity of a single line.
* Will be overwritten in VML fallback mode below.
*/
, opacity: function (el, i, val) {
if (i < el.childNodes.length) el.childNodes[i].style.opacity = val
}
})
function initVML () {
/* Utility function to create a VML tag */
function vml (tag, attr) {
return createEl('<' + tag + ' xmlns="urn:schemas-microsoft.com:vml" class="spin-vml">', attr)
}
sheet.addRule('.spin-vml', 'behavior:url(#default#VML)')
Spinner.prototype.lines = function (el, o) {
var r = o.scale * (o.length + o.width)
, s = o.scale * 2 * r
function grp () {
return css(
vml('group', {
coordsize: s + ' ' + s
, coordorigin: -r + ' ' + -r
})
, { width: s, height: s }
)
}
var margin = -(o.width + o.length) * o.scale * 2 + 'px'
, g = css(grp(), {position: 'absolute', top: margin, left: margin})
, i
function seg (i, dx, filter) {
ins(
g
, ins(
css(grp(), {rotation: 360 / o.lines * i + 'deg', left: ~~dx})
, ins(
css(
vml('roundrect', {arcsize: o.corners})
, { width: r
, height: o.scale * o.width
, left: o.scale * o.radius
, top: -o.scale * o.width >> 1
, filter: filter
}
)
, vml('fill', {color: getColor(o.color, i), opacity: o.opacity})
, vml('stroke', {opacity: 0})
)
)
)
}
if (o.shadow)
for (i = 1; i <= o.lines; i++) {
seg(i, -2, 'progid:DXImageTransform.Microsoft.Blur(pixelradius=2,makeshadow=1,shadowopacity=.3)')
}
for (i = 1; i <= o.lines; i++) seg(i)
return ins(el, g)
}
Spinner.prototype.opacity = function (el, i, val, o) {
var c = el.firstChild
o = o.shadow && o.lines || 0
if (c && i + o < c.childNodes.length) {
c = c.childNodes[i + o]; c = c && c.firstChild; c = c && c.firstChild
if (c) c.opacity = val
}
}
}
if (typeof document !== 'undefined') {
sheet = (function () {
var el = createEl('style', {type : 'text/css'})
ins(document.getElementsByTagName('head')[0], el)
return el.sheet || el.styleSheet
}())
var probe = css(createEl('group'), {behavior: 'url(#default#VML)'})
if (!vendor(probe, 'transform') && probe.adj) initVML()
else useCssAnimations = vendor(probe, 'animation')
}
return Spinner
}));
},{}],6:[function(require,module,exports){
/**
* Module dependencies.
*/
var Emitter = require('emitter');
var reduce = require('reduce');
var requestBase = require('./request-base');
var isObject = require('./is-object');
/**
* Root reference for iframes.
*/
var root;
if (typeof window !== 'undefined') {
root = window;
} else if (typeof self !== 'undefined') {
root = self;
} else {
root = this;
}
/**
* Noop.
*/
function noop(){};
/**
* Check if `obj` is a host object,
* we don't want to serialize these :)
*
* TODO: future proof, move to compoent land
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isHost(obj) {
var str = {}.toString.call(obj);
switch (str) {
case '[object File]':
case '[object Blob]':
case '[object FormData]':
return true;
default:
return false;
}
}
/**
* Expose `request`.
*/
var request = module.exports = require('./request').bind(null, Request);
/**
* Determine XHR.
*/
request.getXHR = function () {
if (root.XMLHttpRequest
&& (!root.location || 'file:' != root.location.protocol
|| !root.ActiveXObject)) {
return new XMLHttpRequest;
} else {
try { return new ActiveXObject('Microsoft.XMLHTTP'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.6.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.3.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP'); } catch(e) {}
}
return false;
};
/**
* Removes leading and trailing whitespace, added to support IE.
*
* @param {String} s
* @return {String}
* @api private
*/
var trim = ''.trim
? function(s) { return s.trim(); }
: function(s) { return s.replace(/(^\s*|\s*$)/g, ''); };
/**
* Serialize the given `obj`.
*
* @param {Object} obj
* @return {String}
* @api private
*/
function serialize(obj) {
if (!isObject(obj)) return obj;
var pairs = [];
for (var key in obj) {
if (null != obj[key]) {
pushEncodedKeyValuePair(pairs, key, obj[key]);
}
}
return pairs.join('&');
}
/**
* Helps 'serialize' with serializing arrays.
* Mutates the pairs array.
*
* @param {Array} pairs
* @param {String} key
* @param {Mixed} val
*/
function pushEncodedKeyValuePair(pairs, key, val) {
if (Array.isArray(val)) {
return val.forEach(function(v) {
pushEncodedKeyValuePair(pairs, key, v);
});
}
pairs.push(encodeURIComponent(key)
+ '=' + encodeURIComponent(val));
}
/**
* Expose serialization method.
*/
request.serializeObject = serialize;
/**
* Parse the given x-www-form-urlencoded `str`.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseString(str) {
var obj = {};
var pairs = str.split('&');
var parts;
var pair;
for (var i = 0, len = pairs.length; i < len; ++i) {
pair = pairs[i];
parts = pair.split('=');
obj[decodeURIComponent(parts[0])] = decodeURIComponent(parts[1]);
}
return obj;
}
/**
* Expose parser.
*/
request.parseString = parseString;
/**
* Default MIME type map.
*
* superagent.types.xml = 'application/xml';
*
*/
request.types = {
html: 'text/html',
json: 'application/json',
xml: 'application/xml',
urlencoded: 'application/x-www-form-urlencoded',
'form': 'application/x-www-form-urlencoded',
'form-data': 'application/x-www-form-urlencoded'
};
/**
* Default serialization map.
*
* superagent.serialize['application/xml'] = function(obj){
* return 'generated xml here';
* };
*
*/
request.serialize = {
'application/x-www-form-urlencoded': serialize,
'application/json': JSON.stringify
};
/**
* Default parsers.
*
* superagent.parse['application/xml'] = function(str){
* return { object parsed from str };
* };
*
*/
request.parse = {
'application/x-www-form-urlencoded': parseString,
'application/json': JSON.parse
};
/**
* Parse the given header `str` into
* an object containing the mapped fields.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseHeader(str) {
var lines = str.split(/\r?\n/);
var fields = {};
var index;
var line;
var field;
var val;
lines.pop();
for (var i = 0, len = lines.length; i < len; ++i) {
line = lines[i];
index = line.indexOf(':');
field = line.slice(0, index).toLowerCase();
val = trim(line.slice(index + 1));
fields[field] = val;
}
return fields;
}
/**
* Check if `mime` is json or has +json structured syntax suffix.
*
* @param {String} mime
* @return {Boolean}
* @api private
*/
function isJSON(mime) {
return /[\/+]json\b/.test(mime);
}
/**
* Return the mime type for the given `str`.
*
* @param {String} str
* @return {String}
* @api private
*/
function type(str){
return str.split(/ *; */).shift();
};
/**
* Return header field parameters.
*
* @param {String} str
* @return {Object}
* @api private
*/
function params(str){
return reduce(str.split(/ *; */), function(obj, str){
var parts = str.split(/ *= */)
, key = parts.shift()
, val = parts.shift();
if (key && val) obj[key] = val;
return obj;
}, {});
};
/**
* Initialize a new `Response` with the given `xhr`.
*
* - set flags (.ok, .error, etc)
* - parse header
*
* Examples:
*
* Aliasing `superagent` as `request` is nice:
*
* request = superagent;
*
* We can use the promise-like API, or pass callbacks:
*
* request.get('/').end(function(res){});
* request.get('/', function(res){});
*
* Sending data can be chained:
*
* request
* .post('/user')
* .send({ name: 'tj' })
* .end(function(res){});
*
* Or passed to `.send()`:
*
* request
* .post('/user')
* .send({ name: 'tj' }, function(res){});
*
* Or passed to `.post()`:
*
* request
* .post('/user', { name: 'tj' })
* .end(function(res){});
*
* Or further reduced to a single call for simple cases:
*
* request
* .post('/user', { name: 'tj' }, function(res){});
*
* @param {XMLHTTPRequest} xhr
* @param {Object} options
* @api private
*/
function Response(req, options) {
options = options || {};
this.req = req;
this.xhr = this.req.xhr;
this.text = ((this.req.method !='HEAD' && (this.xhr.responseType === '' || this.xhr.responseType === 'text')) || typeof this.xhr.responseType === 'undefined')
? this.xhr.responseText
: null;
this.statusText = this.req.xhr.statusText;
this.setStatusProperties(this.xhr.status);
this.header = this.headers = parseHeader(this.xhr.getAllResponseHeaders());
this.header['content-type'] = this.xhr.getResponseHeader('content-type');
this.setHeaderProperties(this.header);
this.body = this.req.method != 'HEAD'
? this.parseBody(this.text ? this.text : this.xhr.response)
: null;
}
/**
* Get case-insensitive `field` value.
*
* @param {String} field
* @return {String}
* @api public
*/
Response.prototype.get = function(field){
return this.header[field.toLowerCase()];
};
/**
* Set header related properties:
*
* - `.type` the content type without params
*
* A response of "Content-Type: text/plain; charset=utf-8"
* will provide you with a `.type` of "text/plain".
*
* @param {Object} header
* @api private
*/
Response.prototype.setHeaderProperties = function(header){
var ct = this.header['content-type'] || '';
this.type = type(ct);
var obj = params(ct);
for (var key in obj) this[key] = obj[key];
};
/**
* Parse the given body `str`.
*
* Used for auto-parsing of bodies. Parsers
* are defined on the `superagent.parse` object.
*
* @param {String} str
* @return {Mixed}
* @api private
*/
Response.prototype.parseBody = function(str){
var parse = request.parse[this.type];
if (!parse && isJSON(this.type)) {
parse = request.parse['application/json'];
}
return parse && str && (str.length || str instanceof Object)
? parse(str)
: null;
};
/**
* Set flags such as `.ok` based on `status`.
*
* For example a 2xx response will give you a `.ok` of __true__
* whereas 5xx will be __false__ and `.error` will be __true__. The
* `.clientError` and `.serverError` are also available to be more
* specific, and `.statusType` is the class of error ranging from 1..5
* sometimes useful for mapping respond colors etc.
*
* "sugar" properties are also defined for common cases. Currently providing:
*
* - .noContent
* - .badRequest
* - .unauthorized
* - .notAcceptable
* - .notFound
*
* @param {Number} status
* @api private
*/
Response.prototype.setStatusProperties = function(status){
if (status === 1223) {
status = 204;
}
var type = status / 100 | 0;
this.status = this.statusCode = status;
this.statusType = type;
this.info = 1 == type;
this.ok = 2 == type;
this.clientError = 4 == type;
this.serverError = 5 == type;
this.error = (4 == type || 5 == type)
? this.toError()
: false;
this.accepted = 202 == status;
this.noContent = 204 == status;
this.badRequest = 400 == status;
this.unauthorized = 401 == status;
this.notAcceptable = 406 == status;
this.notFound = 404 == status;
this.forbidden = 403 == status;
};
/**
* Return an `Error` representative of this response.
*
* @return {Error}
* @api public
*/
Response.prototype.toError = function(){
var req = this.req;
var method = req.method;
var url = req.url;
var msg = 'cannot ' + method + ' ' + url + ' (' + this.status + ')';
var err = new Error(msg);
err.status = this.status;
err.method = method;
err.url = url;
return err;
};
/**
* Expose `Response`.
*/
request.Response = Response;
/**
* Initialize a new `Request` with the given `method` and `url`.
*
* @param {String} method
* @param {String} url
* @api public
*/
function Request(method, url) {
var self = this;
this._query = this._query || [];
this.method = method;
this.url = url;
this.header = {};
this._header = {};
this.on('end', function(){
var err = null;
var res = null;
try {
res = new Response(self);
} catch(e) {
err = new Error('Parser is unable to parse the response');
err.parse = true;
err.original = e;
err.rawResponse = self.xhr && self.xhr.responseText ? self.xhr.responseText : null;
err.statusCode = self.xhr && self.xhr.status ? self.xhr.status : null;
return self.callback(err);
}
self.emit('response', res);
if (err) {
return self.callback(err, res);
}
if (res.status >= 200 && res.status < 300) {
return self.callback(err, res);
}
var new_err = new Error(res.statusText || 'Unsuccessful HTTP response');
new_err.original = err;
new_err.response = res;
new_err.status = res.status;
self.callback(new_err, res);
});
}
/**
* Mixin `Emitter` and `requestBase`.
*/
Emitter(Request.prototype);
for (var key in requestBase) {
Request.prototype[key] = requestBase[key];
}
/**
* Abort the request, and clear potential timeout.
*
* @return {Request}
* @api public
*/
Request.prototype.abort = function(){
if (this.aborted) return;
this.aborted = true;
this.xhr.abort();
this.clearTimeout();
this.emit('abort');
return this;
};
/**
* Set Content-Type to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.xml = 'application/xml';
*
* request.post('/')
* .type('xml')
* .send(xmlstring)
* .end(callback);
*
* request.post('/')
* .type('application/xml')
* .send(xmlstring)
* .end(callback);
*
* @param {String} type
* @return {Request} for chaining
* @api public
*/
Request.prototype.type = function(type){
this.set('Content-Type', request.types[type] || type);
return this;
};
/**
* Set responseType to `val`. Presently valid responseTypes are 'blob' and
* 'arraybuffer'.
*
* Examples:
*
* req.get('/')
* .responseType('blob')
* .end(callback);
*
* @param {String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.responseType = function(val){
this._responseType = val;
return this;
};
/**
* Set Accept to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.json = 'application/json';
*
* request.get('/agent')
* .accept('json')
* .end(callback);
*
* request.get('/agent')
* .accept('application/json')
* .end(callback);
*
* @param {String} accept
* @return {Request} for chaining
* @api public
*/
Request.prototype.accept = function(type){
this.set('Accept', request.types[type] || type);
return this;
};
/**
* Set Authorization field value with `user` and `pass`.
*
* @param {String} user
* @param {String} pass
* @param {Object} options with 'type' property 'auto' or 'basic' (default 'basic')
* @return {Request} for chaining
* @api public
*/
Request.prototype.auth = function(user, pass, options){
if (!options) {
options = {
type: 'basic'
}
}
switch (options.type) {
case 'basic':
var str = btoa(user + ':' + pass);
this.set('Authorization', 'Basic ' + str);
break;
case 'auto':
this.username = user;
this.password = pass;
break;
}
return this;
};
/**
* Add query-string `val`.
*
* Examples:
*
* request.get('/shoes')
* .query('size=10')
* .query({ color: 'blue' })
*
* @param {Object|String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.query = function(val){
if ('string' != typeof val) val = serialize(val);
if (val) this._query.push(val);
return this;
};
/**
* Queue the given `file` as an attachment to the specified `field`,
* with optional `filename`.
*
* ``` js
* request.post('/upload')
* .attach(new Blob(['<a id="a"><b id="b">hey!</b></a>'], { type: "text/html"}))
* .end(callback);
* ```
*
* @param {String} field
* @param {Blob|File} file
* @param {String} filename
* @return {Request} for chaining
* @api public
*/
Request.prototype.attach = function(field, file, filename){
this._getFormData().append(field, file, filename || file.name);
return this;
};
Request.prototype._getFormData = function(){
if (!this._formData) {
this._formData = new root.FormData();
}
return this._formData;
};
/**
* Send `data` as the request body, defaulting the `.type()` to "json" when
* an object is given.
*
* Examples:
*
*
* request.post('/user')
* .type('json')
* .send('{"name":"tj"}')
* .end(callback)
*
*
* request.post('/user')
* .send({ name: 'tj' })
* .end(callback)
*
*
* request.post('/user')
* .type('form')
* .send('name=tj')
* .end(callback)
*
*
* request.post('/user')
* .type('form')
* .send({ name: 'tj' })
* .end(callback)
*
*
* request.post('/user')
* .send('name=tobi')
* .send('species=ferret')
* .end(callback)
*
* @param {String|Object} data
* @return {Request} for chaining
* @api public
*/
Request.prototype.send = function(data){
var obj = isObject(data);
var type = this._header['content-type'];
if (obj && isObject(this._data)) {
for (var key in data) {
this._data[key] = data[key];
}
} else if ('string' == typeof data) {
if (!type) this.type('form');
type = this._header['content-type'];
if ('application/x-www-form-urlencoded' == type) {
this._data = this._data
? this._data + '&' + data
: data;
} else {
this._data = (this._data || '') + data;
}
} else {
this._data = data;
}
if (!obj || isHost(data)) return this;
if (!type) this.type('json');
return this;
};
/**
* @deprecated
*/
Response.prototype.parse = function serialize(fn){
if (root.console) {
console.warn("Client-side parse() method has been renamed to serialize(). This method is not compatible with superagent v2.0");
}
this.serialize(fn);
return this;
};
Response.prototype.serialize = function serialize(fn){
this._parser = fn;
return this;
};
/**
* Invoke the callback with `err` and `res`
* and handle arity check.
*
* @param {Error} err
* @param {Response} res
* @api private
*/
Request.prototype.callback = function(err, res){
var fn = this._callback;
this.clearTimeout();
fn(err, res);
};
/**
* Invoke callback with x-domain error.
*
* @api private
*/
Request.prototype.crossDomainError = function(){
var err = new Error('Request has been terminated\nPossible causes: the network is offline, Origin is not allowed by Access-Control-Allow-Origin, the page is being unloaded, etc.');
err.crossDomain = true;
err.status = this.status;
err.method = this.method;
err.url = this.url;
this.callback(err);
};
/**
* Invoke callback with timeout error.
*
* @api private
*/
Request.prototype.timeoutError = function(){
var timeout = this._timeout;
var err = new Error('timeout of ' + timeout + 'ms exceeded');
err.timeout = timeout;
this.callback(err);
};
/**
* Enable transmission of cookies with x-domain requests.
*
* Note that for this to work the origin must not be
* using "Access-Control-Allow-Origin" with a wildcard,
* and also must set "Access-Control-Allow-Credentials"
* to "true".
*
* @api public
*/
Request.prototype.withCredentials = function(){
this._withCredentials = true;
return this;
};
/**
* Initiate request, invoking callback `fn(res)`
* with an instanceof `Response`.
*
* @param {Function} fn
* @return {Request} for chaining
* @api public
*/
Request.prototype.end = function(fn){
var self = this;
var xhr = this.xhr = request.getXHR();
var query = this._query.join('&');
var timeout = this._timeout;
var data = this._formData || this._data;
this._callback = fn || noop;
xhr.onreadystatechange = function(){
if (4 != xhr.readyState) return;
var status;
try { status = xhr.status } catch(e) { status = 0; }
if (0 == status) {
if (self.timedout) return self.timeoutError();
if (self.aborted) return;
return self.crossDomainError();
}
self.emit('end');
};
var handleProgress = function(e){
if (e.total > 0) {
e.percent = e.loaded / e.total * 100;
}
e.direction = 'download';
self.emit('progress', e);
};
if (this.hasListeners('progress')) {
xhr.onprogress = handleProgress;
}
try {
if (xhr.upload && this.hasListeners('progress')) {
xhr.upload.onprogress = handleProgress;
}
} catch(e) {
}
if (timeout && !this._timer) {
this._timer = setTimeout(function(){
self.timedout = true;
self.abort();
}, timeout);
}
if (query) {
query = request.serializeObject(query);
this.url += ~this.url.indexOf('?')
? '&' + query
: '?' + query;
}
if (this.username && this.password) {
xhr.open(this.method, this.url, true, this.username, this.password);
} else {
xhr.open(this.method, this.url, true);
}
if (this._withCredentials) xhr.withCredentials = true;
if ('GET' != this.method && 'HEAD' != this.method && 'string' != typeof data && !isHost(data)) {
var contentType = this._header['content-type'];
var serialize = this._parser || request.serialize[contentType ? contentType.split(';')[0] : ''];
if (!serialize && isJSON(contentType)) serialize = request.serialize['application/json'];
if (serialize) data = serialize(data);
}
for (var field in this.header) {
if (null == this.header[field]) continue;
xhr.setRequestHeader(field, this.header[field]);
}
if (this._responseType) {
xhr.responseType = this._responseType;
}
this.emit('request', this);
xhr.send(typeof data !== 'undefined' ? data : null);
return this;
};
/**
* Expose `Request`.
*/
request.Request = Request;
/**
* GET `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.get = function(url, data, fn){
var req = request('GET', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.query(data);
if (fn) req.end(fn);
return req;
};
/**
* HEAD `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.head = function(url, data, fn){
var req = request('HEAD', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* DELETE `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Function} fn
* @return {Request}
* @api public
*/
function del(url, fn){
var req = request('DELETE', url);
if (fn) req.end(fn);
return req;
};
request['del'] = del;
request['delete'] = del;
/**
* PATCH `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.patch = function(url, data, fn){
var req = request('PATCH', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* POST `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.post = function(url, data, fn){
var req = request('POST', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* PUT `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.put = function(url, data, fn){
var req = request('PUT', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;<|fim▁hole|>},{"./is-object":7,"./request":9,"./request-base":8,"emitter":1,"reduce":4}],7:[function(require,module,exports){
/**
* Check if `obj` is an object.
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isObject(obj) {
return null != obj && 'object' == typeof obj;
}
module.exports = isObject;
},{}],8:[function(require,module,exports){
/**
* Module of mixed-in functions shared between node and client code
*/
var isObject = require('./is-object');
/**
* Clear previous timeout.
*
* @return {Request} for chaining
* @api public
*/
exports.clearTimeout = function _clearTimeout(){
this._timeout = 0;
clearTimeout(this._timer);
return this;
};
/**
* Force given parser
*
* Sets the body parser no matter type.
*
* @param {Function}
* @api public
*/
exports.parse = function parse(fn){
this._parser = fn;
return this;
};
/**
* Set timeout to `ms`.
*
* @param {Number} ms
* @return {Request} for chaining
* @api public
*/
exports.timeout = function timeout(ms){
this._timeout = ms;
return this;
};
/**
* Faux promise support
*
* @param {Function} fulfill
* @param {Function} reject
* @return {Request}
*/
exports.then = function then(fulfill, reject) {
return this.end(function(err, res) {
err ? reject(err) : fulfill(res);
});
}
/**
* Allow for extension
*/
exports.use = function use(fn) {
fn(this);
return this;
}
/**
* Get request header `field`.
* Case-insensitive.
*
* @param {String} field
* @return {String}
* @api public
*/
exports.get = function(field){
return this._header[field.toLowerCase()];
};
/**
* Get case-insensitive header `field` value.
* This is a deprecated internal API. Use `.get(field)` instead.
*
* (getHeader is no longer used internally by the superagent code base)
*
* @param {String} field
* @return {String}
* @api private
* @deprecated
*/
exports.getHeader = exports.get;
/**
* Set header `field` to `val`, or multiple fields with one object.
* Case-insensitive.
*
* Examples:
*
* req.get('/')
* .set('Accept', 'application/json')
* .set('X-API-Key', 'foobar')
* .end(callback);
*
* req.get('/')
* .set({ Accept: 'application/json', 'X-API-Key': 'foobar' })
* .end(callback);
*
* @param {String|Object} field
* @param {String} val
* @return {Request} for chaining
* @api public
*/
exports.set = function(field, val){
if (isObject(field)) {
for (var key in field) {
this.set(key, field[key]);
}
return this;
}
this._header[field.toLowerCase()] = val;
this.header[field] = val;
return this;
};
/**
* Remove header `field`.
* Case-insensitive.
*
* Example:
*
* req.get('/')
* .unset('User-Agent')
* .end(callback);
*
* @param {String} field
*/
exports.unset = function(field){
delete this._header[field.toLowerCase()];
delete this.header[field];
return this;
};
/**
* Write the field `name` and `val` for "multipart/form-data"
* request bodies.
*
* ``` js
* request.post('/upload')
* .field('foo', 'bar')
* .end(callback);
* ```
*
* @param {String} name
* @param {String|Blob|File|Buffer|fs.ReadStream} val
* @return {Request} for chaining
* @api public
*/
exports.field = function(name, val) {
this._getFormData().append(name, val);
return this;
};
},{"./is-object":7}],9:[function(require,module,exports){
/**
* Issue a request:
*
* Examples:
*
* request('GET', '/users').end(callback)
* request('/users').end(callback)
* request('/users', callback)
*
* @param {String} method
* @param {String|Function} url or callback
* @return {Request}
* @api public
*/
function request(RequestConstructor, method, url) {
if ('function' == typeof url) {
return new RequestConstructor('GET', method).end(url);
}
if (2 == arguments.length) {
return new RequestConstructor('GET', method);
}
return new RequestConstructor(method, url);
}
module.exports = request;
},{}],10:[function(require,module,exports){
var Keen = require("./index"),
each = require("./utils/each");
module.exports = function(){
var loaded = window['Keen'] || null,
cached = window['_' + 'Keen'] || null,
clients,
ready;
if (loaded && cached) {
clients = cached['clients'] || {},
ready = cached['ready'] || [];
each(clients, function(client, id){
each(Keen.prototype, function(method, key){
loaded.prototype[key] = method;
});
each(["Query", "Request", "Dataset", "Dataviz"], function(name){
loaded[name] = (Keen[name]) ? Keen[name] : function(){};
});
if (client._config) {
client.configure.call(client, client._config);
}
if (client._setGlobalProperties) {
each(client._setGlobalProperties, function(fn){
client.setGlobalProperties.apply(client, fn);
});
}
if (client._addEvent) {
each(client._addEvent, function(obj){
client.addEvent.apply(client, obj);
});
}
var callback = client._on || [];
if (client._on) {
each(client._on, function(obj){
client.on.apply(client, obj);
});
client.trigger('ready');
}
each(["_config", "_setGlobalProperties", "_addEvent", "_on"], function(name){
if (client[name]) {
client[name] = undefined;
try{
delete client[name];
} catch(e){}
}
});
});
each(ready, function(cb, i){
Keen.once("ready", cb);
});
}
window['_' + 'Keen'] = undefined;
try {
delete window['_' + 'Keen']
} catch(e) {}
};
},{"./index":18,"./utils/each":31}],11:[function(require,module,exports){
module.exports = function(){
return "undefined" == typeof window ? "server" : "browser";
};
},{}],12:[function(require,module,exports){
var each = require('../utils/each'),
json = require('../utils/json-shim');
module.exports = function(params){
var query = [];
each(params, function(value, key){
if ('string' !== typeof value) {
value = json.stringify(value);
}
query.push(key + '=' + encodeURIComponent(value));
});
return '?' + query.join('&');
};
},{"../utils/each":31,"../utils/json-shim":34}],13:[function(require,module,exports){
module.exports = function(){
return new Date().getTimezoneOffset() * -60;
};
},{}],14:[function(require,module,exports){
module.exports = function(){
if ("undefined" !== typeof window) {
if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) {
return 2000;
}
}
return 16000;
};
},{}],15:[function(require,module,exports){
module.exports = function() {
var root = "undefined" == typeof window ? this : window;
if (root.XMLHttpRequest && ("file:" != root.location.protocol || !root.ActiveXObject)) {
return new XMLHttpRequest;
} else {
try { return new ActiveXObject("Microsoft.XMLHTTP"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP.6.0"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP.3.0"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP"); } catch(e) {}
}
return false;
};
},{}],16:[function(require,module,exports){
module.exports = function(err, res, callback) {
var cb = callback || function() {};
if (res && !res.ok) {
var is_err = res.body && res.body.error_code;
err = new Error(is_err ? res.body.message : 'Unknown error occurred');
err.code = is_err ? res.body.error_code : 'UnknownError';
}
if (err) {
cb(err, null);
}
else {
cb(null, res.body);
}
return;
};
},{}],17:[function(require,module,exports){
var superagent = require('superagent');
var each = require('../utils/each'),
getXHR = require('./get-xhr-object');
module.exports = function(type, opts){
return function(request) {
var __super__ = request.constructor.prototype.end;
if ( typeof window === 'undefined' ) return;
request.requestType = request.requestType || {};
request.requestType['type'] = type;
request.requestType['options'] = request.requestType['options'] || {
async: true,
success: {
responseText: '{ "created": true }',
status: 201
},
error: {
responseText: '{ "error_code": "ERROR", "message": "Request failed" }',
status: 404
}
};
if (opts) {
if ( typeof opts.async === 'boolean' ) {
request.requestType['options'].async = opts.async;
}
if ( opts.success ) {
extend(request.requestType['options'].success, opts.success);
}
if ( opts.error ) {
extend(request.requestType['options'].error, opts.error);
}
}
request.end = function(fn){
var self = this,
reqType = (this.requestType) ? this.requestType['type'] : 'xhr',
query,
timeout;
if ( ('GET' !== self['method'] || reqType === 'xhr' ) && self.requestType['options'].async ) {
__super__.call(self, fn);
return;
}
query = self._query.join('&');
timeout = self._timeout;
self._callback = fn || noop;
if (timeout && !self._timer) {
self._timer = setTimeout(function(){
abortRequest.call(self);
}, timeout);
}
if (query) {
query = superagent.serializeObject(query);
self.url += ~self.url.indexOf('?') ? '&' + query : '?' + query;
}
self.emit('request', self);
if ( !self.requestType['options'].async ) {
sendXhrSync.call(self);
}
else if ( reqType === 'jsonp' ) {
sendJsonp.call(self);
}
else if ( reqType === 'beacon' ) {
sendBeacon.call(self);
}
return self;
};
return request;
};
};
function sendXhrSync(){
var xhr = getXHR();
if (xhr) {
xhr.open('GET', this.url, false);
xhr.send(null);
}
return this;
}
function sendJsonp(){
var self = this,
timestamp = new Date().getTime(),
script = document.createElement('script'),
parent = document.getElementsByTagName('head')[0],
callbackName = 'keenJSONPCallback',
loaded = false;
callbackName += timestamp;
while (callbackName in window) {
callbackName += 'a';
}
window[callbackName] = function(response) {
if (loaded === true) return;
loaded = true;
handleSuccess.call(self, response);
cleanup();
};
script.src = self.url + '&jsonp=' + callbackName;
parent.appendChild(script);
script.onreadystatechange = function() {
if (loaded === false && self.readyState === 'loaded') {
loaded = true;
handleError.call(self);
cleanup();
}
};
script.onerror = function() {
if (loaded === false) {
loaded = true;
handleError.call(self);
cleanup();
}
};
function cleanup(){
window[callbackName] = undefined;
try {
delete window[callbackName];
} catch(e){}
parent.removeChild(script);
}
}
function sendBeacon(){
var self = this,
img = document.createElement('img'),
loaded = false;
img.onload = function() {
loaded = true;
if ('naturalHeight' in this) {
if (this.naturalHeight + this.naturalWidth === 0) {
this.onerror();
return;
}
} else if (this.width + this.height === 0) {
this.onerror();
return;
}
handleSuccess.call(self);
};
img.onerror = function() {
loaded = true;
handleError.call(self);
};
img.src = self.url + '&c=clv1';
}
function handleSuccess(res){
var opts = this.requestType['options']['success'],
response = '';
xhrShim.call(this, opts);
if (res) {
try {
response = JSON.stringify(res);
} catch(e) {}
}
else {
response = opts['responseText'];
}
this.xhr.responseText = response;
this.xhr.status = opts['status'];
this.emit('end');
}
function handleError(){
var opts = this.requestType['options']['error'];
xhrShim.call(this, opts);
this.xhr.responseText = opts['responseText'];
this.xhr.status = opts['status'];
this.emit('end');
}
function abortRequest(){
this.aborted = true;
this.clearTimeout();
this.emit('abort');
}
function xhrShim(opts){
this.xhr = {
getAllResponseHeaders: function(){ return ''; },
getResponseHeader: function(){ return 'application/json'; },
responseText: opts['responseText'],
status: opts['status']
};
return this;
}
},{"../utils/each":31,"./get-xhr-object":15,"superagent":6}],18:[function(require,module,exports){
var root = 'undefined' !== typeof window ? window : this;
var previous_Keen = root.Keen;
var Emitter = require('./utils/emitter-shim');
function Keen(config) {
this.configure(config || {});
Keen.trigger('client', this);
}
Keen.debug = false;
Keen.enabled = true;
Keen.loaded = true;
Keen.version = '3.4.1';
Emitter(Keen);
Emitter(Keen.prototype);
Keen.prototype.configure = function(cfg){
var config = cfg || {};
if (config['host']) {
config['host'].replace(/.*?:\/\//g, '');
}
if (config.protocol && config.protocol === 'auto') {
config['protocol'] = location.protocol.replace(/:/g, '');
}
this.config = {
projectId : config.projectId,
writeKey : config.writeKey,
readKey : config.readKey,
masterKey : config.masterKey,
requestType : config.requestType || 'jsonp',
host : config['host'] || 'api.keen.io/3.0',
protocol : config['protocol'] || 'https',
globalProperties: null
};
if (Keen.debug) {
this.on('error', Keen.log);
}
this.trigger('ready');
};
Keen.prototype.projectId = function(str){
if (!arguments.length) return this.config.projectId;
this.config.projectId = (str ? String(str) : null);
return this;
};
Keen.prototype.masterKey = function(str){
if (!arguments.length) return this.config.masterKey;
this.config.masterKey = (str ? String(str) : null);
return this;
};
Keen.prototype.readKey = function(str){
if (!arguments.length) return this.config.readKey;
this.config.readKey = (str ? String(str) : null);
return this;
};
Keen.prototype.writeKey = function(str){
if (!arguments.length) return this.config.writeKey;
this.config.writeKey = (str ? String(str) : null);
return this;
};
Keen.prototype.url = function(path){
if (!this.projectId()) {
this.trigger('error', 'Client is missing projectId property');
return;
}
return this.config.protocol + '://' + this.config.host + '/projects/' + this.projectId() + path;
};
Keen.log = function(message) {
if (Keen.debug && typeof console == 'object') {
console.log('[Keen IO]', message);
}
};
Keen.noConflict = function(){
root.Keen = previous_Keen;
return Keen;
};
Keen.ready = function(fn){
if (Keen.loaded) {
fn();
} else {
Keen.once('ready', fn);
}
};
module.exports = Keen;
},{"./utils/emitter-shim":32}],19:[function(require,module,exports){
var json = require('../utils/json-shim');
var request = require('superagent');
var Keen = require('../index');
var base64 = require('../utils/base64'),
each = require('../utils/each'),
getContext = require('../helpers/get-context'),
getQueryString = require('../helpers/get-query-string'),
getUrlMaxLength = require('../helpers/get-url-max-length'),
getXHR = require('../helpers/get-xhr-object'),
requestTypes = require('../helpers/superagent-request-types'),
responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(collection, payload, callback, async) {
var self = this,
urlBase = this.url('/events/' + encodeURIComponent(collection)),
reqType = this.config.requestType,
data = {},
cb = callback,
isAsync,
getUrl;
isAsync = ('boolean' === typeof async) ? async : true;
if (!Keen.enabled) {
handleValidationError.call(self, 'Keen.enabled = false');
return;
}
if (!self.projectId()) {
handleValidationError.call(self, 'Missing projectId property');
return;
}
if (!self.writeKey()) {
handleValidationError.call(self, 'Missing writeKey property');
return;
}
if (!collection || typeof collection !== 'string') {
handleValidationError.call(self, 'Collection name must be a string');
return;
}
if (self.config.globalProperties) {
data = self.config.globalProperties(collection);
}
each(payload, function(value, key){
data[key] = value;
});
if ( !getXHR() && 'xhr' === reqType ) {
reqType = 'jsonp';
}
if ( 'xhr' !== reqType || !isAsync ) {
getUrl = prepareGetRequest.call(self, urlBase, data);
}
if ( getUrl && getContext() === 'browser' ) {
request
.get(getUrl)
.use(requestTypes(reqType, { async: isAsync }))
.end(handleResponse);
}
else if ( getXHR() || getContext() === 'server' ) {
request
.post(urlBase)
.set('Content-Type', 'application/json')
.set('Authorization', self.writeKey())
.send(data)
.end(handleResponse);
}
else {
self.trigger('error', 'Request not sent: URL length exceeds current browser limit, and XHR (POST) is not supported.');
}
function handleResponse(err, res){
responseHandler(err, res, cb);
cb = callback = null;
}
function handleValidationError(msg){
var err = 'Event not recorded: ' + msg;
self.trigger('error', err);
if (cb) {
cb.call(self, err, null);
cb = callback = null;
}
}
return;
};
function prepareGetRequest(url, data){
url += getQueryString({
api_key : this.writeKey(),
data : base64.encode( json.stringify(data) ),
modified : new Date().getTime()
});
return ( url.length < getUrlMaxLength() ) ? url : false;
}
},{"../helpers/get-context":11,"../helpers/get-query-string":12,"../helpers/get-url-max-length":14,"../helpers/get-xhr-object":15,"../helpers/superagent-handle-response":16,"../helpers/superagent-request-types":17,"../index":18,"../utils/base64":29,"../utils/each":31,"../utils/json-shim":34,"superagent":6}],20:[function(require,module,exports){
var Keen = require('../index');
var request = require('superagent');
var each = require('../utils/each'),
getContext = require('../helpers/get-context'),
getXHR = require('../helpers/get-xhr-object'),
requestTypes = require('../helpers/superagent-request-types'),
responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(payload, callback) {
var self = this,
urlBase = this.url('/events'),
data = {},
cb = callback;
if (!Keen.enabled) {
handleValidationError.call(self, 'Keen.enabled = false');
return;
}
if (!self.projectId()) {
handleValidationError.call(self, 'Missing projectId property');
return;
}
if (!self.writeKey()) {
handleValidationError.call(self, 'Missing writeKey property');
return;
}
if (arguments.length > 2) {
handleValidationError.call(self, 'Incorrect arguments provided to #addEvents method');
return;
}
if (typeof payload !== 'object' || payload instanceof Array) {
handleValidationError.call(self, 'Request payload must be an object');
return;
}
if (self.config.globalProperties) {
each(payload, function(events, collection){
each(events, function(body, index){
var base = self.config.globalProperties(collection);
each(body, function(value, key){
base[key] = value;
});
data[collection].push(base);
});
});
}
else {
data = payload;
}
if ( getXHR() || getContext() === 'server' ) {
request
.post(urlBase)
.set('Content-Type', 'application/json')
.set('Authorization', self.writeKey())
.send(data)
.end(function(err, res){
responseHandler(err, res, cb);
cb = callback = null;
});
}
else {
self.trigger('error', 'Events not recorded: XHR support is required for batch upload');
}
function handleValidationError(msg){
var err = 'Events not recorded: ' + msg;
self.trigger('error', err);
if (cb) {
cb.call(self, err, null);
cb = callback = null;
}
}
return;
};
},{"../helpers/get-context":11,"../helpers/get-xhr-object":15,"../helpers/superagent-handle-response":16,"../helpers/superagent-request-types":17,"../index":18,"../utils/each":31,"superagent":6}],21:[function(require,module,exports){
var request = require('superagent');
var getQueryString = require('../helpers/get-query-string'),
handleResponse = require('../helpers/superagent-handle-response'),
requestTypes = require('../helpers/superagent-request-types');
module.exports = function(url, params, api_key, callback){
var reqType = this.config.requestType,
data = params || {};
if (reqType === 'beacon') {
reqType = 'jsonp';
}
data['api_key'] = data['api_key'] || api_key;
request
.get(url+getQueryString(data))
.use(requestTypes(reqType))
.end(function(err, res){
handleResponse(err, res, callback);
callback = null;
});
};
},{"../helpers/get-query-string":12,"../helpers/superagent-handle-response":16,"../helpers/superagent-request-types":17,"superagent":6}],22:[function(require,module,exports){
var request = require('superagent');
var handleResponse = require('../helpers/superagent-handle-response');
module.exports = function(url, data, api_key, callback){
request
.post(url)
.set('Content-Type', 'application/json')
.set('Authorization', api_key)
.send(data || {})
.end(function(err, res) {
handleResponse(err, res, callback);
callback = null;
});
};
},{"../helpers/superagent-handle-response":16,"superagent":6}],23:[function(require,module,exports){
var Request = require("../request");
module.exports = function(query, callback) {
var queries = [],
cb = callback,
request;
if (!this.config.projectId || !this.config.projectId.length) {
handleConfigError.call(this, 'Missing projectId property');
}
if (!this.config.readKey || !this.config.readKey.length) {
handleConfigError.call(this, 'Missing readKey property');
}
function handleConfigError(msg){
var err = 'Query not sent: ' + msg;
this.trigger('error', err);
if (cb) {
cb.call(this, err, null);
cb = callback = null;
}
}
if (query instanceof Array) {
queries = query;
} else {
queries.push(query);
}
request = new Request(this, queries, cb).refresh();
cb = callback = null;
return request;
};
},{"../request":27}],24:[function(require,module,exports){
module.exports = function(newGlobalProperties) {
if (newGlobalProperties && typeof(newGlobalProperties) == "function") {
this.config.globalProperties = newGlobalProperties;
} else {
this.trigger("error", "Invalid value for global properties: " + newGlobalProperties);
}
};
},{}],25:[function(require,module,exports){
var addEvent = require("./addEvent");
module.exports = function(jsEvent, eventCollection, payload, timeout, timeoutCallback){
var evt = jsEvent,
target = (evt.currentTarget) ? evt.currentTarget : (evt.srcElement || evt.target),
timer = timeout || 500,
triggered = false,
targetAttr = "",
callback,
win;
if (target.getAttribute !== void 0) {
targetAttr = target.getAttribute("target");
} else if (target.target) {
targetAttr = target.target;
}
if ((targetAttr == "_blank" || targetAttr == "blank") && !evt.metaKey) {
win = window.open("about:blank");
win.document.location = target.href;
}
if (target.nodeName === "A") {
callback = function(){
if(!triggered && !evt.metaKey && (targetAttr !== "_blank" && targetAttr !== "blank")){
triggered = true;
window.location = target.href;
}
};
} else if (target.nodeName === "FORM") {
callback = function(){
if(!triggered){
triggered = true;
target.submit();
}
};
} else {
this.trigger("error", "#trackExternalLink method not attached to an <a> or <form> DOM element");
}
if (timeoutCallback) {
callback = function(){
if(!triggered){
triggered = true;
timeoutCallback();
}
};
}
addEvent.call(this, eventCollection, payload, callback);
setTimeout(callback, timer);
if (!evt.metaKey) {
return false;
}
};
},{"./addEvent":19}],26:[function(require,module,exports){
var each = require("./utils/each"),
extend = require("./utils/extend"),
getTimezoneOffset = require("./helpers/get-timezone-offset"),
getQueryString = require("./helpers/get-query-string");
var Emitter = require('./utils/emitter-shim');
function Query(){
this.configure.apply(this, arguments);
};
Emitter(Query.prototype);
Query.prototype.configure = function(analysisType, params) {
this.analysis = analysisType;
this.params = this.params || {};
this.set(params);
if (this.params.timezone === void 0) {
this.params.timezone = getTimezoneOffset();
}
return this;
};
Query.prototype.set = function(attributes) {
var self = this;
each(attributes, function(v, k){
var key = k, value = v;
if (k.match(new RegExp("[A-Z]"))) {
key = k.replace(/([A-Z])/g, function($1) { return "_"+$1.toLowerCase(); });
}
self.params[key] = value;
if (value instanceof Array) {
each(value, function(dv, index){
if (dv instanceof Array == false && typeof dv === "object") {
each(dv, function(deepValue, deepKey){
if (deepKey.match(new RegExp("[A-Z]"))) {
var _deepKey = deepKey.replace(/([A-Z])/g, function($1) { return "_"+$1.toLowerCase(); });
delete self.params[key][index][deepKey];
self.params[key][index][_deepKey] = deepValue;
}
});
}
});
}
});
return self;
};
Query.prototype.get = function(attribute) {
var key = attribute;
if (key.match(new RegExp("[A-Z]"))) {
key = key.replace(/([A-Z])/g, function($1) { return "_"+$1.toLowerCase(); });
}
if (this.params) {
return this.params[key] || null;
}
};
Query.prototype.addFilter = function(property, operator, value) {
this.params.filters = this.params.filters || [];
this.params.filters.push({
"property_name": property,
"operator": operator,
"property_value": value
});
return this;
};
module.exports = Query;
},{"./helpers/get-query-string":12,"./helpers/get-timezone-offset":13,"./utils/each":31,"./utils/emitter-shim":32,"./utils/extend":33}],27:[function(require,module,exports){
var each = require('./utils/each'),
extend = require('./utils/extend'),
sendQuery = require('./utils/sendQuery'),
sendSavedQuery = require('./utils/sendSavedQuery');
var Emitter = require('./utils/emitter-shim');
var Keen = require('./');
var Query = require('./query');
function Request(client, queries, callback){
var cb = callback;
this.config = {
timeout: 300 * 1000
};
this.configure(client, queries, cb);
cb = callback = null;
};
Emitter(Request.prototype);
Request.prototype.configure = function(client, queries, callback){
var cb = callback;
extend(this, {
'client' : client,
'queries' : queries,
'data' : {},
'callback' : cb
});
cb = callback = null;
return this;
};
Request.prototype.timeout = function(ms){
if (!arguments.length) return this.config.timeout;
this.config.timeout = (!isNaN(parseInt(ms)) ? parseInt(ms) : null);
return this;
};
Request.prototype.refresh = function(){
var self = this,
completions = 0,
response = [],
errored = false;
var handleResponse = function(err, res, index){
if (errored) {
return;
}
if (err) {
self.trigger('error', err);
if (self.callback) {
self.callback(err, null);
}
errored = true;
return;
}
response[index] = res;
completions++;
if (completions == self.queries.length && !errored) {
self.data = (self.queries.length == 1) ? response[0] : response;
self.trigger('complete', null, self.data);
if (self.callback) {
self.callback(null, self.data);
}
}
};
each(self.queries, function(query, index){
var cbSequencer = function(err, res){
handleResponse(err, res, index);
};
var path = '/queries';
if (typeof query === 'string') {
path += '/saved/' + query + '/result';
sendSavedQuery.call(self, path, {}, cbSequencer);
}
else if (query instanceof Query) {
path += '/' + query.analysis;
if (query.analysis === 'saved') {
path += '/' + query.params.query_name + '/result';
sendSavedQuery.call(self, path, {}, cbSequencer);
}
else {
sendQuery.call(self, path, query.params, cbSequencer);
}
}
else {
var res = {
statusText: 'Bad Request',
responseText: { message: 'Error: Query ' + (+index+1) + ' of ' + self.queries.length + ' for project ' + self.client.projectId() + ' is not a valid request' }
};
self.trigger('error', res.responseText.message);
if (self.callback) {
self.callback(res.responseText.message, null);
}
}
});
return this;
};
module.exports = Request;
},{"./":18,"./query":26,"./utils/each":31,"./utils/emitter-shim":32,"./utils/extend":33,"./utils/sendQuery":36,"./utils/sendSavedQuery":37}],28:[function(require,module,exports){
var request = require('superagent');
var responseHandler = require('./helpers/superagent-handle-response');
function savedQueries() {
var _this = this;
this.all = function(callback) {
var url = _this.url('/queries/saved');
request
.get(url)
.set('Content-Type', 'application/json')
.set('Authorization', _this.masterKey())
.end(handleResponse);
function handleResponse(err, res){
responseHandler(err, res, callback);
callback = null;
}
};
this.get = function(queryName, callback) {
var url = _this.url('/queries/saved/' + queryName);
request
.get(url)
.set('Content-Type', 'application/json')
.set('Authorization', _this.masterKey())
.end(handleResponse);
function handleResponse(err, res){
responseHandler(err, res, callback);
callback = null;
}
};
this.update = function(queryName, body, callback) {
var url = _this.url('/queries/saved/' + queryName);
request
.put(url)
.set('Content-Type', 'application/json')
.set('Authorization', _this.masterKey())
.send(body || {})
.end(handleResponse);
function handleResponse(err, res){
responseHandler(err, res, callback);
callback = null;
}
};
this.create = this.update;
this.destroy = function(queryName, callback) {
var url = _this.url('/queries/saved/' + queryName);
request
.del(url)
.set('Content-Type', 'application/json')
.set('Authorization', _this.masterKey())
.end(handleResponse);
function handleResponse(err, res){
responseHandler(err, res, callback);
callback = null;
}
};
return this;
}
module.exports = savedQueries;
},{"./helpers/superagent-handle-response":16,"superagent":6}],29:[function(require,module,exports){
module.exports = {
map: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
encode: function (n) {
"use strict";
var o = "", i = 0, m = this.map, i1, i2, i3, e1, e2, e3, e4;
n = this.utf8.encode(n);
while (i < n.length) {
i1 = n.charCodeAt(i++); i2 = n.charCodeAt(i++); i3 = n.charCodeAt(i++);
e1 = (i1 >> 2); e2 = (((i1 & 3) << 4) | (i2 >> 4)); e3 = (isNaN(i2) ? 64 : ((i2 & 15) << 2) | (i3 >> 6));
e4 = (isNaN(i2) || isNaN(i3)) ? 64 : i3 & 63;
o = o + m.charAt(e1) + m.charAt(e2) + m.charAt(e3) + m.charAt(e4);
} return o;
},
decode: function (n) {
"use strict";
var o = "", i = 0, m = this.map, cc = String.fromCharCode, e1, e2, e3, e4, c1, c2, c3;
n = n.replace(/[^A-Za-z0-9\+\/\=]/g, "");
while (i < n.length) {
e1 = m.indexOf(n.charAt(i++)); e2 = m.indexOf(n.charAt(i++));
e3 = m.indexOf(n.charAt(i++)); e4 = m.indexOf(n.charAt(i++));
c1 = (e1 << 2) | (e2 >> 4); c2 = ((e2 & 15) << 4) | (e3 >> 2);
c3 = ((e3 & 3) << 6) | e4;
o = o + (cc(c1) + ((e3 != 64) ? cc(c2) : "")) + (((e4 != 64) ? cc(c3) : ""));
} return this.utf8.decode(o);
},
utf8: {
encode: function (n) {
"use strict";
var o = "", i = 0, cc = String.fromCharCode, c;
while (i < n.length) {
c = n.charCodeAt(i++); o = o + ((c < 128) ? cc(c) : ((c > 127) && (c < 2048)) ?
(cc((c >> 6) | 192) + cc((c & 63) | 128)) : (cc((c >> 12) | 224) + cc(((c >> 6) & 63) | 128) + cc((c & 63) | 128)));
} return o;
},
decode: function (n) {
"use strict";
var o = "", i = 0, cc = String.fromCharCode, c2, c;
while (i < n.length) {
c = n.charCodeAt(i);
o = o + ((c < 128) ? [cc(c), i++][0] : ((c > 191) && (c < 224)) ?
[cc(((c & 31) << 6) | ((c2 = n.charCodeAt(i + 1)) & 63)), (i += 2)][0] :
[cc(((c & 15) << 12) | (((c2 = n.charCodeAt(i + 1)) & 63) << 6) | ((c3 = n.charCodeAt(i + 2)) & 63)), (i += 3)][0]);
} return o;
}
}
};
},{}],30:[function(require,module,exports){
var json = require('./json-shim');
module.exports = function(target) {
return json.parse( json.stringify( target ) );
};
},{"./json-shim":34}],31:[function(require,module,exports){
module.exports = function(o, cb, s){
var n;
if (!o){
return 0;
}
s = !s ? o : s;
if (o instanceof Array){
for (n=0; n<o.length; n++) {
if (cb.call(s, o[n], n, o) === false){
return 0;
}
}
} else {
for (n in o){
if (o.hasOwnProperty(n)) {
if (cb.call(s, o[n], n, o) === false){
return 0;
}
}
}
}
return 1;
};
},{}],32:[function(require,module,exports){
var Emitter = require('component-emitter');
Emitter.prototype.trigger = Emitter.prototype.emit;
module.exports = Emitter;
},{"component-emitter":1}],33:[function(require,module,exports){
module.exports = function(target){
for (var i = 1; i < arguments.length; i++) {
for (var prop in arguments[i]){
target[prop] = arguments[i][prop];
}
}
return target;
};
},{}],34:[function(require,module,exports){
module.exports = ('undefined' !== typeof window && window.JSON) ? window.JSON : require("json3");
},{"json3":3}],35:[function(require,module,exports){
function parseParams(str){
var urlParams = {},
match,
pl = /\+/g,
search = /([^&=]+)=?([^&]*)/g,
decode = function (s) { return decodeURIComponent(s.replace(pl, " ")); },
query = str.split("?")[1];
while (!!(match=search.exec(query))) {
urlParams[decode(match[1])] = decode(match[2]);
}
return urlParams;
};
module.exports = parseParams;
},{}],36:[function(require,module,exports){
var request = require('superagent');
var getContext = require('../helpers/get-context'),
getXHR = require('../helpers/get-xhr-object'),
responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(path, params, callback){
var url = this.client.url(path);
if (!this.client.projectId()) {
this.client.trigger('error', 'Query not sent: Missing projectId property');
return;
}
if (!this.client.readKey()) {
this.client.trigger('error', 'Query not sent: Missing readKey property');
return;
}
if (getContext() === 'server' || getXHR()) {
request
.post(url)
.set('Content-Type', 'application/json')
.set('Authorization', this.client.readKey())
.timeout(this.timeout())
.send(params || {})
.end(handleResponse);
}
function handleResponse(err, res){
responseHandler(err, res, callback);
callback = null;
}
return;
}
},{"../helpers/get-context":11,"../helpers/get-xhr-object":15,"../helpers/superagent-handle-response":16,"superagent":6}],37:[function(require,module,exports){
var request = require('superagent');
var responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(path, params, callback){
var key;
if (this.client.readKey()) {
key = this.client.readKey();
}
else if (this.client.masterKey()) {
key = this.client.masterKey();
}
request
.get(this.client.url(path))
.set('Content-Type', 'application/json')
.set('Authorization', key)
.timeout(this.timeout())
.send()
.end(function(err, res) {
responseHandler(err, res, callback);
callback = null;
});
return;
}
},{"../helpers/superagent-handle-response":16,"superagent":6}],38:[function(require,module,exports){
var clone = require("../core/utils/clone"),
each = require("../core/utils/each"),
flatten = require("./utils/flatten"),
parse = require("./utils/parse");
var Emitter = require('../core/utils/emitter-shim');
function Dataset(){
this.data = {
input: {},
output: [['Index']]
};
this.meta = {
schema: {},
method: undefined
};
this.parser = undefined;
if (arguments.length > 0) {
this.parse.apply(this, arguments);
}
}
Dataset.defaults = {
delimeter: " -> "
};
Emitter(Dataset);
Emitter(Dataset.prototype);
Dataset.parser = require('./utils/parsers')(Dataset);
Dataset.prototype.input = function(obj){
if (!arguments.length) return this["data"]["input"];
this["data"]["input"] = (obj ? clone(obj) : null);
return this;
};
Dataset.prototype.output = function(arr){
if (!arguments.length) return this["data"].output;
this["data"].output = (arr instanceof Array ? arr : null);
return this;
}
Dataset.prototype.method = function(str){
if (!arguments.length) return this.meta["method"];
this.meta["method"] = (str ? String(str) : null);
return this;
};
Dataset.prototype.schema = function(obj){
if (!arguments.length) return this.meta.schema;
this.meta.schema = (obj ? obj : null);
return this;
};
Dataset.prototype.parse = function(raw, schema){
var options;
if (raw) this.input(raw);
if (schema) this.schema(schema);
this.output([[]]);
if (this.meta.schema.select) {
this.method("select");
options = extend({
records: "",
select: true
}, this.schema());
_select.call(this, _optHash(options));
}
else if (this.meta.schema.unpack) {
this.method("unpack");
options = extend({
records: "",
unpack: {
index: false,
value: false,
label: false
}
}, this.schema());
_unpack.call(this, _optHash(options));
}
return this;
};
function _select(cfg){
var self = this,
options = cfg || {},
target_set = [],
unique_keys = [];
var root, records_target;
if (options.records === "" || !options.records) {
root = [self.input()];
} else {
records_target = options.records.split(Dataset.defaults.delimeter);
root = parse.apply(self, [self.input()].concat(records_target))[0];
}
each(options.select, function(prop){
target_set.push(prop.path.split(Dataset.defaults.delimeter));
});
if (target_set.length == 0) {
each(root, function(record, interval){
var flat = flatten(record);
for (var key in flat) {
if (flat.hasOwnProperty(key) && unique_keys.indexOf(key) == -1) {
unique_keys.push(key);
target_set.push([key]);
}
}
});
}
var test = [[]];
each(target_set, function(props, i){
if (target_set.length == 1) {
test[0].push('label', 'value');
} else {
test[0].push(props.join("."));
}
});
each(root, function(record, i){
var flat = flatten(record);
if (target_set.length == 1) {
test.push([target_set.join("."), flat[target_set.join(".")]]);
} else {
test.push([]);
each(target_set, function(t, j){
var target = t.join(".");
test[i+1].push(flat[target]);
});
}
});
self.output(test);
self.format(options.select);
return self;
}
function _unpack(options){
var self = this, discovered_labels = [];
var value_set = (options.unpack.value) ? options.unpack.value.path.split(Dataset.defaults.delimeter) : false,
label_set = (options.unpack.label) ? options.unpack.label.path.split(Dataset.defaults.delimeter) : false,
index_set = (options.unpack.index) ? options.unpack.index.path.split(Dataset.defaults.delimeter) : false;
var value_desc = (value_set[value_set.length-1] !== "") ? value_set[value_set.length-1] : "Value",
label_desc = (label_set[label_set.length-1] !== "") ? label_set[label_set.length-1] : "Label",
index_desc = (index_set[index_set.length-1] !== "") ? index_set[index_set.length-1] : "Index";
var root = (function(){
var root;
if (options.records == "") {
root = [self.input()];
} else {
root = parse.apply(self, [self.input()].concat(options.records.split(Dataset.defaults.delimeter)));
}
return root[0];
})();
if (root instanceof Array == false) {
root = [root];
}
each(root, function(record, interval){
var labels = (label_set) ? parse.apply(self, [record].concat(label_set)) : [];
if (labels) {
discovered_labels = labels;
}
});
each(root, function(record, interval){
var plucked_value = (value_set) ? parse.apply(self, [record].concat(value_set)) : false,
plucked_index = (index_set) ? parse.apply(self, [record].concat(index_set)) : false;
if (plucked_index) {
each(plucked_index, function(){
self.data.output.push([]);
});
} else {
self.data.output.push([]);
}
if (plucked_index) {
if (interval == 0) {
self.data.output[0].push(index_desc);
if (discovered_labels.length > 0) {
each(discovered_labels, function(value, i){
self.data.output[0].push(value);
});
} else {
self.data.output[0].push(value_desc);
}
}
if (root.length < self.data.output.length-1) {
if (interval == 0) {
each(self.data.output, function(row, i){
if (i > 0) {
self.data.output[i].push(plucked_index[i-1]);
}
});
}
} else {
self.data.output[interval+1].push(plucked_index[0]);
}
}
if (!plucked_index && discovered_labels.length > 0) {
if (interval == 0) {
self.data.output[0].push(label_desc);
self.data.output[0].push(value_desc);
}
self.data.output[interval+1].push(discovered_labels[0]);
}
if (!plucked_index && discovered_labels.length == 0) {
self.data.output[0].push('');
}
if (plucked_value) {
if (root.length < self.data.output.length-1) {
if (interval == 0) {
each(self.data.output, function(row, i){
if (i > 0) {
self.data.output[i].push(plucked_value[i-1]);
}
});
}
} else {
each(plucked_value, function(value){
self.data.output[interval+1].push(value);
});
}
} else {
each(self.data.output[0], function(cell, i){
var offset = (plucked_index) ? 0 : -1;
if (i > offset) {
self.data.output[interval+1].push(null);
}
})
}
});
self.format(options.unpack);
return this;
}
function _optHash(options){
each(options.unpack, function(value, key, object){
if (value && is(value, 'string')) {
options.unpack[key] = { path: options.unpack[key] };
}
});
return options;
}
function is(o, t){
o = typeof(o);
if (!t){
return o != 'undefined';
}
return o == t;
}
function extend(o, e){
each(e, function(v, n){
if (is(o[n], 'object') && is(v, 'object')){
o[n] = extend(o[n], v);
} else if (v !== null) {
o[n] = v;
}
});
return o;
}
module.exports = Dataset;
},{"../core/utils/clone":30,"../core/utils/each":31,"../core/utils/emitter-shim":32,"./utils/flatten":51,"./utils/parse":52,"./utils/parsers":53}],39:[function(require,module,exports){
var extend = require("../core/utils/extend"),
Dataset = require("./dataset");
extend(Dataset.prototype, require("./lib/append"));
extend(Dataset.prototype, require("./lib/delete"));
extend(Dataset.prototype, require("./lib/filter"));
extend(Dataset.prototype, require("./lib/insert"));
extend(Dataset.prototype, require("./lib/select"));
extend(Dataset.prototype, require("./lib/set"));
extend(Dataset.prototype, require("./lib/sort"));
extend(Dataset.prototype, require("./lib/update"));
extend(Dataset.prototype, require("./lib/analyses"));
extend(Dataset.prototype, {
"format": require("./lib/format")
});
module.exports = Dataset;
},{"../core/utils/extend":33,"./dataset":38,"./lib/analyses":40,"./lib/append":41,"./lib/delete":42,"./lib/filter":43,"./lib/format":44,"./lib/insert":45,"./lib/select":46,"./lib/set":47,"./lib/sort":48,"./lib/update":49}],40:[function(require,module,exports){
var each = require("../../core/utils/each"),
arr = ["Average", "Maximum", "Minimum", "Sum"],
output = {};
output["average"] = function(arr, start, end){
var set = arr.slice(start||0, (end ? end+1 : arr.length)),
sum = 0,
avg = null;
each(set, function(val, i){
if (typeof val === "number" && !isNaN(parseFloat(val))) {
sum += parseFloat(val);
}
});
return sum / set.length;
};
output["maximum"] = function(arr, start, end){
var set = arr.slice(start||0, (end ? end+1 : arr.length)),
nums = [];
each(set, function(val, i){
if (typeof val === "number" && !isNaN(parseFloat(val))) {
nums.push(parseFloat(val));
}
});
return Math.max.apply(Math, nums);
};
output["minimum"] = function(arr, start, end){
var set = arr.slice(start||0, (end ? end+1 : arr.length)),
nums = [];
each(set, function(val, i){
if (typeof val === "number" && !isNaN(parseFloat(val))) {
nums.push(parseFloat(val));
}
});
return Math.min.apply(Math, nums);
};
output["sum"] = function(arr, start, end){
var set = arr.slice(start||0, (end ? end+1 : arr.length)),
sum = 0;
each(set, function(val, i){
if (typeof val === "number" && !isNaN(parseFloat(val))) {
sum += parseFloat(val);
}
});
return sum;
};
each(arr, function(v,i){
output["getColumn"+v] = output["getRow"+v] = function(arr){
return this[v.toLowerCase()](arr, 1);
};
});
output["getColumnLabel"] = output["getRowIndex"] = function(arr){
return arr[0];
};
module.exports = output;
},{"../../core/utils/each":31}],41:[function(require,module,exports){
var each = require("../../core/utils/each");
var createNullList = require('../utils/create-null-list');
module.exports = {
"appendColumn": appendColumn,
"appendRow": appendRow
};
function appendColumn(str, input){
var self = this,
args = Array.prototype.slice.call(arguments, 2),
label = (str !== undefined) ? str : null;
if (typeof input === "function") {
self.data.output[0].push(label);
each(self.output(), function(row, i){
var cell;
if (i > 0) {
cell = input.call(self, row, i);
if (typeof cell === "undefined") {
cell = null;
}
self.data.output[i].push(cell);
}
});
}
else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.output().length - 1) {
input = input.concat( createNullList(self.output().length - 1 - input.length) );
}
else {
each(input, function(value, i){
if (self.data.output.length -1 < input.length) {
appendRow.call(self, String( self.data.output.length ));
}
});
}
self.data.output[0].push(label);
each(input, function(value, i){
self.data.output[i+1][self.data.output[0].length-1] = value;
});
}
return self;
}
function appendRow(str, input){
var self = this,
args = Array.prototype.slice.call(arguments, 2),
label = (str !== undefined) ? str : null,
newRow = [];
newRow.push(label);
if (typeof input === "function") {
each(self.data.output[0], function(label, i){
var col, cell;
if (i > 0) {
col = self.selectColumn(i);
cell = input.call(self, col, i);
if (typeof cell === "undefined") {
cell = null;
}
newRow.push(cell);
}
});
self.data.output.push(newRow);
}
else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.data.output[0].length - 1) {
input = input.concat( createNullList( self.data.output[0].length - 1 - input.length ) );
}
else {
each(input, function(value, i){
if (self.data.output[0].length -1 < input.length) {
appendColumn.call(self, String( self.data.output[0].length ));
}
});
}
self.data.output.push( newRow.concat(input) );
}
return self;
}
},{"../../core/utils/each":31,"../utils/create-null-list":50}],42:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = {
"deleteColumn": deleteColumn,
"deleteRow": deleteRow
};
function deleteColumn(q){
var self = this,
index = (typeof q === 'number') ? q : this.data.output[0].indexOf(q);
if (index > -1) {
each(self.data.output, function(row, i){
self.data.output[i].splice(index, 1);
});
}
return self;
}
function deleteRow(q){
var index = (typeof q === 'number') ? q : this.selectColumn(0).indexOf(q);
if (index > -1) {
this.data.output.splice(index, 1);
}
return this;
}
},{"../../core/utils/each":31}],43:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = {
"filterColumns": filterColumns,
"filterRows": filterRows
};
function filterColumns(fn){
var self = this,
clone = new Array();
each(self.data.output, function(row, i){
clone.push([]);
});
each(self.data.output[0], function(col, i){
var selectedColumn = self.selectColumn(i);
if (i == 0 || fn.call(self, selectedColumn, i)) {
each(selectedColumn, function(cell, ri){
clone[ri].push(cell);
});
}
});
self.output(clone);
return self;
}
function filterRows(fn){
var self = this,
clone = [];
each(self.output(), function(row, i){
if (i == 0 || fn.call(self, row, i)) {
clone.push(row);
}
});
self.output(clone);
return self;
}
},{"../../core/utils/each":31}],44:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = function(options){
var self = this;
if (this.method() === 'select') {
each(self.output(), function(row, i){
if (i == 0) {
each(row, function(cell, j){
if (options[j] && options[j].label) {
self.data.output[i][j] = options[j].label;
}
});
} else {
each(row, function(cell, j){
self.data.output[i][j] = _applyFormat(self.data.output[i][j], options[j]);
});
}
});
}
if (this.method() === 'unpack') {
if (options.index) {
each(self.output(), function(row, i){
if (i == 0) {
if (options.index.label) {
self.data.output[i][0] = options.index.label;
}
} else {
self.data.output[i][0] = _applyFormat(self.data.output[i][0], options.index);
}
});
}
if (options.label) {
if (options.index) {
each(self.output(), function(row, i){
each(row, function(cell, j){
if (i == 0 && j > 0) {
self.data.output[i][j] = _applyFormat(self.data.output[i][j], options.label);
}
});
});
} else {
each(self.output(), function(row, i){
if (i > 0) {
self.data.output[i][0] = _applyFormat(self.data.output[i][0], options.label);
}
});
}
}
if (options.value) {
if (options.index) {
each(self.output(), function(row, i){
each(row, function(cell, j){
if (i > 0 && j > 0) {
self.data.output[i][j] = _applyFormat(self.data.output[i][j], options.value);
}
});
});
} else {
each(self.output(), function(row, i){
each(row, function(cell, j){
if (i > 0) {
self.data.output[i][j] = _applyFormat(self.data.output[i][j], options.value);
}
});
});
}
}
}
return self;
};
function _applyFormat(value, opts){
var output = value,
options = opts || {};
if (options.replace) {
each(options.replace, function(val, key){
if (output == key || String(output) == String(key) || parseFloat(output) == parseFloat(key)) {
output = val;
}
});
}
if (options.type && options.type == 'date') {
if (options.format && moment && moment(value).isValid()) {
output = moment(output).format(options.format);
} else {
output = new Date(output);
}
}
if (options.type && options.type == 'string') {
output = String(output);
}
if (options.type && options.type == 'number' && !isNaN(parseFloat(output))) {
output = parseFloat(output);
}
return output;
}
},{"../../core/utils/each":31}],45:[function(require,module,exports){
var each = require("../../core/utils/each");
var createNullList = require('../utils/create-null-list');
var append = require('./append');
var appendRow = append.appendRow,
appendColumn = append.appendColumn;
module.exports = {
"insertColumn": insertColumn,
"insertRow": insertRow
};
function insertColumn(index, str, input){
var self = this, label;
label = (str !== undefined) ? str : null;
if (typeof input === "function") {
self.data.output[0].splice(index, 0, label);
each(self.output(), function(row, i){
var cell;
if (i > 0) {
cell = input.call(self, row, i);
if (typeof cell === "undefined") {
cell = null;
}
self.data.output[i].splice(index, 0, cell);
}
});
}
else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.output().length - 1) {
input = input.concat( createNullList(self.output().length - 1 - input.length) );
}
else {
each(input, function(value, i){
if (self.data.output.length -1 < input.length) {
appendRow.call(self, String( self.data.output.length ));
}
});
}
self.data.output[0].splice(index, 0, label);
each(input, function(value, i){
self.data.output[i+1].splice(index, 0, value);
});
}
return self;
}
function insertRow(index, str, input){
var self = this, label, newRow = [];
label = (str !== undefined) ? str : null;
newRow.push(label);
if (typeof input === "function") {
each(self.output()[0], function(label, i){
var col, cell;
if (i > 0) {
col = self.selectColumn(i);
cell = input.call(self, col, i);
if (typeof cell === "undefined") {
cell = null;
}
newRow.push(cell);
}
});
self.data.output.splice(index, 0, newRow);
}
else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.data.output[0].length - 1) {
input = input.concat( createNullList( self.data.output[0].length - 1 - input.length ) );
}
else {
each(input, function(value, i){
if (self.data.output[0].length -1 < input.length) {
appendColumn.call(self, String( self.data.output[0].length ));
}
});
}
self.data.output.splice(index, 0, newRow.concat(input) );
}
return self;
}
},{"../../core/utils/each":31,"../utils/create-null-list":50,"./append":41}],46:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = {
"selectColumn": selectColumn,
"selectRow": selectRow
};
function selectColumn(q){
var result = new Array(),
index = (typeof q === 'number') ? q : this.data.output[0].indexOf(q);
if (index > -1 && 'undefined' !== typeof this.data.output[0][index]) {
each(this.data.output, function(row, i){
result.push(row[index]);
});
}
return result;
}
function selectRow(q){
var result = new Array(),
index = (typeof q === 'number') ? q : this.selectColumn(0).indexOf(q);
if (index > -1 && 'undefined' !== typeof this.data.output[index]) {
result = this.data.output[index];
}
return result;
}
},{"../../core/utils/each":31}],47:[function(require,module,exports){
var each = require("../../core/utils/each");
var append = require('./append');
var select = require('./select');
module.exports = {
"set": set
};
function set(coords, value){
if (arguments.length < 2 || coords.length < 2) {
throw Error('Incorrect arguments provided for #set method');
}
var colIndex = 'number' === typeof coords[0] ? coords[0] : this.data.output[0].indexOf(coords[0]),
rowIndex = 'number' === typeof coords[1] ? coords[1] : select.selectColumn.call(this, 0).indexOf(coords[1]);
var colResult = select.selectColumn.call(this, coords[0]),
rowResult = select.selectRow.call(this, coords[1]);
if (colResult.length < 1) {
append.appendColumn.call(this, coords[0]);
colIndex = this.data.output[0].length-1;
}
if (rowResult.length < 1) {
append.appendRow.call(this, coords[1]);
rowIndex = this.data.output.length-1;
}
this.data.output[ rowIndex ][ colIndex ] = value;
return this;
}
},{"../../core/utils/each":31,"./append":41,"./select":46}],48:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = {
"sortColumns": sortColumns,
"sortRows": sortRows
};
function sortColumns(str, comp){
var self = this,
head = this.output()[0].slice(1),
cols = [],
clone = [],
fn = comp || this.getColumnLabel;
each(head, function(cell, i){
cols.push(self.selectColumn(i+1).slice(0));
});
cols.sort(function(a,b){
var op = fn.call(self, a) > fn.call(self, b);
if (op) {
return (str === "asc" ? 1 : -1);
} else if (!op) {
return (str === "asc" ? -1 : 1);
} else {
return 0;
}
});
each(cols, function(col, i){
self
.deleteColumn(i+1)
.insertColumn(i+1, col[0], col.slice(1));
});
return self;
}
function sortRows(str, comp){
var self = this,
head = this.output().slice(0,1),
body = this.output().slice(1),
fn = comp || this.getRowIndex;
body.sort(function(a, b){
var op = fn.call(self, a) > fn.call(self, b);
if (op) {
return (str === "asc" ? 1 : -1);
} else if (!op) {
return (str === "asc" ? -1 : 1);
} else {
return 0;
}
});
self.output(head.concat(body));
return self;
}
},{"../../core/utils/each":31}],49:[function(require,module,exports){
var each = require("../../core/utils/each");
var createNullList = require('../utils/create-null-list');
var append = require('./append');
var appendRow = append.appendRow,
appendColumn = append.appendColumn;
module.exports = {
"updateColumn": updateColumn,
"updateRow": updateRow
};
function updateColumn(q, input){
var self = this,
index = (typeof q === 'number') ? q : this.data.output[0].indexOf(q);
if (index > -1) {
if (typeof input === "function") {
each(self.output(), function(row, i){
var cell;
if (i > 0) {
cell = input.call(self, row[index], i, row);
if (typeof cell !== "undefined") {
self.data.output[i][index] = cell;
}
}
});
} else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.output().length - 1) {
input = input.concat( createNullList(self.output().length - 1 - input.length) );
}
else {
each(input, function(value, i){
if (self.data.output.length -1 < input.length) {
appendRow.call(self, String( self.data.output.length ));
}
});
}
each(input, function(value, i){
self.data.output[i+1][index] = value;
});
}
}
return self;
}
function updateRow(q, input){
var self = this,
index = (typeof q === 'number') ? q : this.selectColumn(0).indexOf(q);
if (index > -1) {
if (typeof input === "function") {
each(self.output()[index], function(value, i){
var col = self.selectColumn(i),
cell = input.call(self, value, i, col);
if (typeof cell !== "undefined") {
self.data.output[index][i] = cell;
}
});
} else if (!input || input instanceof Array) {
input = input || [];
if (input.length <= self.data.output[0].length - 1) {
input = input.concat( createNullList( self.data.output[0].length - 1 - input.length ) );
}
else {
each(input, function(value, i){
if (self.data.output[0].length -1 < input.length) {
appendColumn.call(self, String( self.data.output[0].length ));
}
});
}
each(input, function(value, i){
self.data.output[index][i+1] = value;
});
}
}
return self;
}
},{"../../core/utils/each":31,"../utils/create-null-list":50,"./append":41}],50:[function(require,module,exports){
module.exports = function(len){
var list = new Array();
for (i = 0; i < len; i++) {
list.push(null);
}
return list;
};
},{}],51:[function(require,module,exports){
module.exports = flatten;
function flatten(ob) {
var toReturn = {};
for (var i in ob) {
if (!ob.hasOwnProperty(i)) continue;
if ((typeof ob[i]) == 'object' && ob[i] !== null) {
var flatObject = flatten(ob[i]);
for (var x in flatObject) {
if (!flatObject.hasOwnProperty(x)) continue;
toReturn[i + '.' + x] = flatObject[x];
}
} else {
toReturn[i] = ob[i];
}
}
return toReturn;
}
},{}],52:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = function() {
var result = [];
var loop = function() {
var root = arguments[0];
var args = Array.prototype.slice.call(arguments, 1);
var target = args.pop();
if (args.length === 0) {
if (root instanceof Array) {
args = root;
} else if (typeof root === 'object') {
args.push(root);
}
}
each(args, function(el){
if (target == "") {
if (typeof el == "number" || el == null) {
return result.push(el);
}
}
if (el[target] || el[target] === 0 || el[target] !== void 0) {
if (el[target] === null) {
return result.push(null);
} else {
return result.push(el[target]);
}
} else if (root[el]){
if (root[el] instanceof Array) {
each(root[el], function(n, i) {
var splinter = [root[el]].concat(root[el][i]).concat(args.slice(1)).concat(target);
return loop.apply(this, splinter);
});
} else {
if (root[el][target]) {
return result.push(root[el][target]);
} else {
return loop.apply(this, [root[el]].concat(args.splice(1)).concat(target));
}
}
} else if (typeof root === 'object' && root instanceof Array === false && !root[target]) {
throw new Error("Target property does not exist", target);
} else {
return loop.apply(this, [el].concat(args.splice(1)).concat(target));
}
return;
});
if (result.length > 0) {
return result;
}
};
return loop.apply(this, arguments);
}
},{"../../core/utils/each":31}],53:[function(require,module,exports){
var Dataset; /* injected */
var each = require('../../core/utils/each'),
flatten = require('./flatten');
var parsers = {
'metric': parseMetric,
'interval': parseInterval,
'grouped-metric': parseGroupedMetric,
'grouped-interval': parseGroupedInterval,
'double-grouped-metric': parseDoubleGroupedMetric,
'double-grouped-interval': parseDoubleGroupedInterval,
'funnel': parseFunnel,
'list': parseList,
'extraction': parseExtraction
};
module.exports = initialize;
function initialize(lib){
Dataset = lib;
return function(name){
var options = Array.prototype.slice.call(arguments, 1);
if (!parsers[name]) {
throw 'Requested parser does not exist';
}
else {
return parsers[name].apply(this, options);
}
};
}
function parseMetric(){
return function(res){
var dataset = new Dataset();
dataset.data.input = res;
dataset.parser = {
name: 'metric'
};
return dataset.set(['Value', 'Result'], res.result);
}
}
function parseInterval(){
var options = Array.prototype.slice.call(arguments);
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
var index = options[0] && options[0] === 'timeframe.end' ? record.timeframe.end : record.timeframe.start;
dataset.set(['Result', index], record.value);
});
dataset.data.input = res;
dataset.parser = 'interval';
dataset.parser = {
name: 'interval',
options: options
};
return dataset;
}
}
function parseGroupedMetric(){
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
var label;
each(record, function(value, key){
if (key !== 'result') {
label = key;
}
});
dataset.set(['Result', String(record[label])], record.result);
});
dataset.data.input = res;
dataset.parser = {
name: 'grouped-metric'
};
return dataset;
}
}
function parseGroupedInterval(){
var options = Array.prototype.slice.call(arguments);
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
var index = options[0] && options[0] === 'timeframe.end' ? record.timeframe.end : record.timeframe.start;
if (record.value.length) {
each(record.value, function(group, j){
var label;
each(group, function(value, key){
if (key !== 'result') {
label = key;
}
});
dataset.set([ String(group[label]) || '', index ], group.result);
});
}
else {
dataset.appendRow(index);
}
});
dataset.data.input = res;
dataset.parser = {
name: 'grouped-interval',
options: options
};
return dataset;
}
}
function parseDoubleGroupedMetric(){
var options = Array.prototype.slice.call(arguments);
if (!options[0]) throw 'Requested parser requires a sequential list (array) of properties to target as a second argument';
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
dataset.set([ 'Result', record[options[0][0]] + ' ' + record[options[0][1]] ], record.result);
});
dataset.data.input = res;
dataset.parser = {
name: 'double-grouped-metric',
options: options
};
return dataset;
}
}
function parseDoubleGroupedInterval(){
var options = Array.prototype.slice.call(arguments);
if (!options[0]) throw 'Requested parser requires a sequential list (array) of properties to target as a second argument';
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
var index = options[1] && options[1] === 'timeframe.end' ? record.timeframe.end : record.timeframe.start;
each(record['value'], function(value, j){
var label = String(value[options[0][0]]) + ' ' + String(value[options[0][1]]);
dataset.set([ label, index ], value.result);
});
});
dataset.data.input = res;
dataset.parser = {
name: 'double-grouped-interval',
options: options
};
return dataset;
}
}
function parseFunnel(){
return function(res){
var dataset = new Dataset();
dataset.appendColumn('Step Value');
each(res.result, function(value, i){
dataset.appendRow(res.steps[i].event_collection, [ value ]);
});
dataset.data.input = res;
dataset.parser = {
name: 'funnel'
};
return dataset;
}
}
function parseList(){
return function(res){
var dataset = new Dataset();
each(res.result, function(value, i){
dataset.set( [ 'Value', i+1 ], value );
});
dataset.data.input = res;
dataset.parser = {
name: 'list'
};
return dataset;
}
}
function parseExtraction(){
return function(res){
var dataset = new Dataset();
each(res.result, function(record, i){
each(flatten(record), function(value, key){
dataset.set([key, i+1], value);
});
});
dataset.deleteColumn(0);
dataset.data.input = res;
dataset.parser = {
name: 'extraction'
};
return dataset;
}
}
},{"../../core/utils/each":31,"./flatten":51}],54:[function(require,module,exports){
/*!
* ----------------------
* C3.js Adapter
* ----------------------
*/
var Dataviz = require('../dataviz'),
each = require('../../core/utils/each'),
extend = require('../../core/utils/extend');
getSetupTemplate = require('./c3/get-setup-template')
module.exports = function(){
var dataTypes = {
'singular' : ['gauge'],
'categorical' : ['donut', 'pie'],
'cat-interval' : ['area-step', 'step', 'bar', 'area', 'area-spline', 'spline', 'line'],
'cat-ordinal' : ['bar', 'area', 'area-spline', 'spline', 'line', 'step', 'area-step'],
'chronological' : ['area', 'area-spline', 'spline', 'line', 'bar', 'step', 'area-step'],
'cat-chronological' : ['line', 'spline', 'area', 'area-spline', 'bar', 'step', 'area-step']
};
var charts = {};
each(['gauge', 'donut', 'pie', 'bar', 'area', 'area-spline', 'spline', 'line', 'step', 'area-step'], function(type, index){
charts[type] = {
render: function(){
if (this.data()[0].length === 1 || this.data().length === 1) {
this.error('No data to display');
return;
}
this.view._artifacts['c3'] = c3.generate(getSetupTemplate.call(this, type));
this.update();
},
update: function(){
var self = this, cols = [];
if (type === 'gauge') {
self.view._artifacts['c3'].load({
columns: [ [self.title(), self.data()[1][1]] ]
})
}
else if (type === 'pie' || type === 'donut') {
self.view._artifacts['c3'].load({
columns: self.dataset.data.output.slice(1)
});
}
else {
if (this.dataType().indexOf('chron') > -1) {
cols.push(self.dataset.selectColumn(0));
cols[0][0] = 'x';
}
each(self.data()[0], function(c, i){
if (i > 0) {
cols.push(self.dataset.selectColumn(i));
}
});
if (self.stacked()) {
self.view._artifacts['c3'].groups([self.labels()]);
}
self.view._artifacts['c3'].load({
columns: cols
});
}
},
destroy: function(){
_selfDestruct.call(this);
}
};
});
function _selfDestruct(){
if (this.view._artifacts['c3']) {
this.view._artifacts['c3'].destroy();
this.view._artifacts['c3'] = null;
}
}
Dataviz.register('c3', charts, { capabilities: dataTypes });
};
},{"../../core/utils/each":31,"../../core/utils/extend":33,"../dataviz":59,"./c3/get-setup-template":55}],55:[function(require,module,exports){
var extend = require('../../../core/utils/extend');
var clone = require('../../../core/utils/clone');
module.exports = function (type) {
var chartOptions = clone(this.chartOptions());
var setup = extend({
axis: {},
color: {},
data: {},
size: {}
}, chartOptions);
setup.bindto = this.el();
setup.color.pattern = this.colors();
setup.data.columns = [];
setup.size.height = this.height();
setup.size.width = this.width();
setup['data']['type'] = type;
if (type === 'gauge') {}
else if (type === 'pie' || type === 'donut') {
setup[type] = { title: this.title() };
}
else {
if (this.dataType().indexOf('chron') > -1) {
setup['data']['x'] = 'x';
setup['axis']['x'] = setup['axis']['x'] || {};
setup['axis']['x']['type'] = 'timeseries';
setup['axis']['x']['tick'] = setup['axis']['x']['tick'] || {
format: this.dateFormat() || getDateFormatDefault(this.data()[1][0], this.data()[2][0])
};
}
else {
if (this.dataType() === 'cat-ordinal') {
setup['axis']['x'] = setup['axis']['x'] || {};
setup['axis']['x']['type'] = 'category';
setup['axis']['x']['categories'] = setup['axis']['x']['categories'] || this.labels()
}
}
if (this.title()) {
setup['axis']['y'] = { label: this.title() };
}
}
return setup;
}
function getDateFormatDefault(a, b){
var d = Math.abs(new Date(a).getTime() - new Date(b).getTime());
var months = [
'Jan', 'Feb', 'Mar',
'Apr', 'May', 'June',
'July', 'Aug', 'Sept',
'Oct', 'Nov', 'Dec'
];
if (d >= 2419200000) {
return function(ms){
var date = new Date(ms);
return months[date.getMonth()] + ' ' + date.getFullYear();
};
}
else if (d >= 86400000) {
return function(ms){
var date = new Date(ms);
return months[date.getMonth()] + ' ' + date.getDate();
};
}
else if (d >= 3600000) {
return '%I:%M %p';
}
else {
return '%I:%M:%S %p';
}
}
},{"../../../core/utils/clone":30,"../../../core/utils/extend":33}],56:[function(require,module,exports){
/*!
* ----------------------
* Chart.js Adapter
* ----------------------
*/
var Dataviz = require("../dataviz"),
each = require("../../core/utils/each"),
extend = require("../../core/utils/extend");
module.exports = function(){
if (typeof Chart !== "undefined") {
Chart.defaults.global.responsive = true;
}
var dataTypes = {
"categorical" : ["doughnut", "pie", "polar-area", "radar"],
"cat-interval" : ["bar", "line"],
"cat-ordinal" : ["bar", "line"],
"chronological" : ["line", "bar"],
"cat-chronological" : ["line", "bar"]
};
var ChartNameMap = {
"radar": "Radar",
"polar-area": "PolarArea",
"pie": "Pie",
"doughnut": "Doughnut",
"line": "Line",
"bar": "Bar"
};
var dataTransformers = {
'doughnut': getCategoricalData,
'pie': getCategoricalData,
'polar-area': getCategoricalData,
'radar': getSeriesData,
'line': getSeriesData,
'bar': getSeriesData
};
function getCategoricalData(){
var self = this, result = [];
each(self.dataset.selectColumn(0).slice(1), function(label, i){
result.push({
value: self.dataset.selectColumn(1).slice(1)[i],
color: self.colors()[+i],
hightlight: self.colors()[+i+9],
label: label
});
});
return result;
}
function getSeriesData(){
var self = this,
labels,
result = {
labels: [],
datasets: []
};
labels = this.dataset.selectColumn(0).slice(1);
each(labels, function(l,i){
if (l instanceof Date) {
result.labels.push((l.getMonth()+1) + "-" + l.getDate() + "-" + l.getFullYear());
} else {
result.labels.push(l);
}
})
each(self.dataset.selectRow(0).slice(1), function(label, i){
var hex = {
r: hexToR(self.colors()[i]),
g: hexToG(self.colors()[i]),
b: hexToB(self.colors()[i])
};
result.datasets.push({
label: label,
fillColor : "rgba(" + hex.r + "," + hex.g + "," + hex.b + ",0.2)",
strokeColor : "rgba(" + hex.r + "," + hex.g + "," + hex.b + ",1)",
pointColor : "rgba(" + hex.r + "," + hex.g + "," + hex.b + ",1)",
pointStrokeColor: "#fff",
pointHighlightFill: "#fff",
pointHighlightStroke: "rgba(" + hex.r + "," + hex.g + "," + hex.b + ",1)",
data: self.dataset.selectColumn(+i+1).slice(1)
});
});
return result;
}
var charts = {};
each(["doughnut", "pie", "polar-area", "radar", "bar", "line"], function(type, index){
charts[type] = {
initialize: function(){
if (this.data()[0].length === 1 || this.data().length === 1) {
this.error('No data to display');
return;
}
if (this.el().nodeName.toLowerCase() !== "canvas") {
var canvas = document.createElement('canvas');
this.el().innerHTML = "";
this.el().appendChild(canvas);
this.view._artifacts["ctx"] = canvas.getContext("2d");
}
else {
this.view._artifacts["ctx"] = this.el().getContext("2d");
}
if (this.height()) {
this.view._artifacts["ctx"].canvas.height = this.height();
this.view._artifacts["ctx"].canvas.style.height = String(this.height() + "px");
}
if (this.width()) {
this.view._artifacts["ctx"].canvas.width = this.width();
this.view._artifacts["ctx"].canvas.style.width = String(this.width() + "px");
}
return this;
},
render: function(){
if(_isEmptyOutput(this.dataset)) {
this.error("No data to display");
return;
}
var method = ChartNameMap[type],
opts = extend({}, this.chartOptions()),
data = dataTransformers[type].call(this);
if (this.view._artifacts["chartjs"]) {
this.view._artifacts["chartjs"].destroy();
}
this.view._artifacts["chartjs"] = new Chart(this.view._artifacts["ctx"])[method](data, opts);
return this;
},
destroy: function(){
_selfDestruct.call(this);
}
};
});
function _selfDestruct(){
if (this.view._artifacts["chartjs"]) {
this.view._artifacts["chartjs"].destroy();
this.view._artifacts["chartjs"] = null;
}
}
function _isEmptyOutput(dataset) {
var flattened = dataset.output().reduce(function(a, b) {
return a.concat(b)
});
return flattened.length === 0
}
function hexToR(h) {return parseInt((cutHex(h)).substring(0,2),16)}
function hexToG(h) {return parseInt((cutHex(h)).substring(2,4),16)}
function hexToB(h) {return parseInt((cutHex(h)).substring(4,6),16)}
function cutHex(h) {return (h.charAt(0)=="#") ? h.substring(1,7):h}
Dataviz.register("chartjs", charts, { capabilities: dataTypes });
};
},{"../../core/utils/each":31,"../../core/utils/extend":33,"../dataviz":59}],57:[function(require,module,exports){
/*!
* ----------------------
* Google Charts Adapter
* ----------------------
*/
/*
TODO:
[ ] Build a more robust DataTable transformer
[ ] ^Expose date parser for google charts tooltips (#70)
[ ] ^Allow custom tooltips (#147)
*/
var Dataviz = require("../dataviz"),
each = require("../../core/utils/each"),
extend = require("../../core/utils/extend"),
Keen = require("../../core");
module.exports = function(){
Keen.loaded = false;
var errorMapping = {
"Data column(s) for axis #0 cannot be of type string": "No results to visualize"
};
var chartTypes = ['AreaChart', 'BarChart', 'ColumnChart', 'LineChart', 'PieChart', 'Table'];
var chartMap = {};
var dataTypes = {
'categorical': ['piechart', 'barchart', 'columnchart', 'table'],
'cat-interval': ['columnchart', 'barchart', 'table'],
'cat-ordinal': ['barchart', 'columnchart', 'areachart', 'linechart', 'table'],
'chronological': ['areachart', 'linechart', 'table'],
'cat-chronological': ['linechart', 'columnchart', 'barchart', 'areachart'],
'nominal': ['table'],
'extraction': ['table']
};
each(chartTypes, function (type) {
var name = type.toLowerCase();
chartMap[name] = {
initialize: function(){
},
render: function(){
if(typeof google === "undefined") {
this.error("The Google Charts library could not be loaded.");
return;
}
var self = this;
if (self.view._artifacts['googlechart']) {
this.destroy();
}
self.view._artifacts['googlechart'] = self.view._artifacts['googlechart'] || new google.visualization[type](self.el());
google.visualization.events.addListener(self.view._artifacts['googlechart'], 'error', function(stack){
_handleErrors.call(self, stack);
});
this.update();
},
update: function(){
var options = _getDefaultAttributes.call(this, type);
extend(options, this.chartOptions(), this.attributes());
options['isStacked'] = (this.stacked() || options['isStacked']);
this.view._artifacts['datatable'] = google.visualization.arrayToDataTable(this.data());
if (options.dateFormat) {
if (typeof options.dateFormat === 'function') {
options.dateFormat(this.view._artifacts['datatable']);
}
else if (typeof options.dateFormat === 'string') {
new google.visualization.DateFormat({
pattern: options.dateFormat
}).format(this.view._artifacts['datatable'], 0);
}
}
if (this.view._artifacts['googlechart']) {
this.view._artifacts['googlechart'].draw(this.view._artifacts['datatable'], options);
}
},
destroy: function(){
if (this.view._artifacts['googlechart']) {
google.visualization.events.removeAllListeners(this.view._artifacts['googlechart']);
this.view._artifacts['googlechart'].clearChart();
this.view._artifacts['googlechart'] = null;
this.view._artifacts['datatable'] = null;
}
}
};
});
Dataviz.register('google', chartMap, {
capabilities: dataTypes,
dependencies: [{
type: 'script',
url: 'https://www.google.com/jsapi',
cb: function(done) {
if (typeof google === 'undefined'){
this.trigger("error", "Problem loading Google Charts library. Please contact us!");
done();
}
else {
google.load('visualization', '1.1', {
packages: ['corechart', 'table'],
callback: function(){
done();
}
});
}
}
}]
});
function _handleErrors(stack){
var message = errorMapping[stack['message']] || stack['message'] || 'An error occurred';
this.error(message);
}
function _getDefaultAttributes(type){
var output = {};
switch (type.toLowerCase()) {
case "areachart":
output.lineWidth = 2;
output.hAxis = {
baselineColor: 'transparent',
gridlines: { color: 'transparent' }
};
output.vAxis = {
viewWindow: { min: 0 }
};
if (this.dataType() === "chronological" || this.dataType() === "cat-ordinal") {
output.legend = "none";
output.chartArea = {
width: "85%"
};
}
if (this.dateFormat() && typeof this.dateFormat() === 'string') {
output.hAxis.format = this.dateFormat();
}
break;
case "barchart":
output.hAxis = {
viewWindow: { min: 0 }
};
output.vAxis = {
baselineColor: 'transparent',
gridlines: { color: 'transparent' }
};
if (this.dataType() === "chronological" || this.dataType() === "cat-ordinal") {
output.legend = "none";
}
if (this.dateFormat() && typeof this.dateFormat() === 'string') {
output.vAxis.format = this.dateFormat();
}
break;
case "columnchart":
output.hAxis = {
baselineColor: 'transparent',
gridlines: { color: 'transparent' }
};
output.vAxis = {
viewWindow: { min: 0 }
};
if (this.dataType() === "chronological" || this.dataType() === "cat-ordinal") {
output.legend = "none";
output.chartArea = {
width: "85%"
};
}
if (this.dateFormat() && typeof this.dateFormat() === 'string') {
output.hAxis.format = this.dateFormat();
}
break;
case "linechart":
output.lineWidth = 2;
output.hAxis = {
baselineColor: 'transparent',
gridlines: { color: 'transparent' }
};
output.vAxis = {
viewWindow: { min: 0 }
};
if (this.dataType() === "chronological" || this.dataType() === "cat-ordinal") {
output.legend = "none";
output.chartArea = {
width: "85%"
};
}
if (this.dateFormat() && typeof this.dateFormat() === 'string') {
output.hAxis.format = this.dateFormat();
}
break;
case "piechart":
output.sliceVisibilityThreshold = 0.01;
break;
case "table":
break;
}
return output;
}
};
},{"../../core":18,"../../core/utils/each":31,"../../core/utils/extend":33,"../dataviz":59}],58:[function(require,module,exports){
/*!
* ----------------------
* Keen IO Adapter
* ----------------------
*/
var Keen = require("../../core"),
Dataviz = require("../dataviz");
var clone = require("../../core/utils/clone"),
each = require("../../core/utils/each"),
extend = require("../../core/utils/extend"),
prettyNumber = require("../utils/prettyNumber");
module.exports = function(){
var Metric, Error, Spinner;
Keen.Error = {
defaults: {
backgroundColor : "",
borderRadius : "4px",
color : "#ccc",
display : "block",
fontFamily : "Helvetica Neue, Helvetica, Arial, sans-serif",
fontSize : "21px",
fontWeight : "light",
textAlign : "center"
}
};
Keen.Spinner.defaults = {
height: 138,
lines: 10,
length: 8,
width: 3,
radius: 10,
corners: 1,
rotate: 0,
direction: 1,
color: '#4d4d4d',
speed: 1.67,
trail: 60,
shadow: false,
hwaccel: false,
className: 'keen-spinner',
zIndex: 2e9,
top: '50%',
left: '50%'
};
var dataTypes = {
'singular': ['metric']
};
Metric = {
initialize: function(){
var css = document.createElement("style"),
bgDefault = "#49c5b1";
css.id = "keen-widgets";
css.type = "text/css";
css.innerHTML = "\
.keen-metric { \n background: " + bgDefault + "; \n border-radius: 4px; \n color: #fff; \n font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; \n padding: 10px 0; \n text-align: center; \n} \
.keen-metric-value { \n display: block; \n font-size: 84px; \n font-weight: 700; \n line-height: 84px; \n} \
.keen-metric-title { \n display: block; \n font-size: 24px; \n font-weight: 200; \n}";
if (!document.getElementById(css.id)) {
document.body.appendChild(css);
}
},
render: function(){
var bgColor = (this.colors().length == 1) ? this.colors()[0] : "#49c5b1",
title = this.title() || "Result",
value = (this.data()[1] && this.data()[1][1]) ? this.data()[1][1] : 0,
width = this.width(),
opts = this.chartOptions() || {},
prefix = "",
suffix = "";
var styles = {
'width': (width) ? width + 'px' : 'auto'
};
var formattedNum = value;
if ( typeof opts.prettyNumber === 'undefined' || opts.prettyNumber == true ) {
if ( !isNaN(parseInt(value)) ) {
formattedNum = prettyNumber(value);
}
}
if (opts['prefix']) {
prefix = '<span class="keen-metric-prefix">' + opts['prefix'] + '</span>';
}
if (opts['suffix']) {
suffix = '<span class="keen-metric-suffix">' + opts['suffix'] + '</span>';
}
this.el().innerHTML = '' +
'<div class="keen-widget keen-metric" style="background-color: ' + bgColor + '; width:' + styles.width + ';" title="' + value + '">' +
'<span class="keen-metric-value">' + prefix + formattedNum + suffix + '</span>' +
'<span class="keen-metric-title">' + title + '</span>' +
'</div>';
}
};
Error = {
initialize: function(){},
render: function(text, style){
var err, msg;
var defaultStyle = clone(Keen.Error.defaults);
var currentStyle = extend(defaultStyle, style);
err = document.createElement("div");
err.className = "keen-error";
each(currentStyle, function(value, key){
err.style[key] = value;
});
err.style.height = String(this.height() + "px");
err.style.paddingTop = (this.height() / 2 - 15) + "px";
err.style.width = String(this.width() + "px");
msg = document.createElement("span");
msg.innerHTML = text || "Yikes! An error occurred!";
err.appendChild(msg);
this.el().innerHTML = "";
this.el().appendChild(err);
},
destroy: function(){
this.el().innerHTML = "";
}
};
Spinner = {
initialize: function(){},
render: function(){
var spinner = document.createElement("div");
var height = this.height() || Keen.Spinner.defaults.height;
spinner.className = "keen-loading";
spinner.style.height = String(height + "px");
spinner.style.position = "relative";
spinner.style.width = String(this.width() + "px");
this.el().innerHTML = "";
this.el().appendChild(spinner);
this.view._artifacts.spinner = new Keen.Spinner(Keen.Spinner.defaults).spin(spinner);
},
destroy: function(){
this.view._artifacts.spinner.stop();
this.view._artifacts.spinner = null;
}
};
Keen.Dataviz.register('keen-io', {
'metric': Metric,
'error': Error,
'spinner': Spinner
}, {
capabilities: dataTypes
});
};
},{"../../core":18,"../../core/utils/clone":30,"../../core/utils/each":31,"../../core/utils/extend":33,"../dataviz":59,"../utils/prettyNumber":98}],59:[function(require,module,exports){
var clone = require('../core/utils/clone'),
each = require('../core/utils/each'),
extend = require('../core/utils/extend'),
loadScript = require('./utils/loadScript'),
loadStyle = require('./utils/loadStyle');
var Keen = require('../core');
var Emitter = require('../core/utils/emitter-shim');
var Dataset = require('../dataset');
function Dataviz(){
this.dataset = new Dataset();
this.view = {
_prepared: false,
_initialized: false,
_rendered: false,
_artifacts: { /* state bin */ },
adapter: {
library: undefined,
chartOptions: {},
chartType: undefined,
defaultChartType: undefined,
dataType: undefined
},
attributes: clone(Dataviz.defaults),
defaults: clone(Dataviz.defaults),
el: undefined,
loader: { library: 'keen-io', chartType: 'spinner' }
};
Dataviz.visuals.push(this);
};
extend(Dataviz, {
dataTypeMap: {
'singular': { library: 'keen-io', chartType: 'metric' },
'categorical': { library: 'google', chartType: 'piechart' },
'cat-interval': { library: 'google', chartType: 'columnchart' },
'cat-ordinal': { library: 'google', chartType: 'barchart' },
'chronological': { library: 'google', chartType: 'areachart' },
'cat-chronological': { library: 'google', chartType: 'linechart' },
'extraction': { library: 'google', chartType: 'table' },
'nominal': { library: 'google', chartType: 'table' }
},
defaults: {
colors: [
/* teal red yellow purple orange mint blue green lavender */
'#00bbde', '#fe6672', '#eeb058', '#8a8ad6', '#ff855c', '#00cfbb', '#5a9eed', '#73d483', '#c879bb',
'#0099b6', '#d74d58', '#cb9141', '#6b6bb6', '#d86945', '#00aa99', '#4281c9', '#57b566', '#ac5c9e',
'#27cceb', '#ff818b', '#f6bf71', '#9b9be1', '#ff9b79', '#26dfcd', '#73aff4', '#87e096', '#d88bcb'
],
indexBy: 'timeframe.start',
stacked: false
},
dependencies: {
loading: 0,
loaded: 0,
urls: {}
},
libraries: {},
visuals: []
});
Emitter(Dataviz);
Emitter(Dataviz.prototype);
Dataviz.register = function(name, methods, config){
var self = this;
var loadHandler = function(st) {
st.loaded++;
if(st.loaded === st.loading) {
Keen.loaded = true;
Keen.trigger('ready');
}
};
Dataviz.libraries[name] = Dataviz.libraries[name] || {};
each(methods, function(method, key){
Dataviz.libraries[name][key] = method;
});
if (config && config.capabilities) {
Dataviz.libraries[name]._defaults = Dataviz.libraries[name]._defaults || {};
each(config.capabilities, function(typeSet, key){
Dataviz.libraries[name]._defaults[key] = typeSet;
});
}
if (config && config.dependencies) {
each(config.dependencies, function (dependency, index, collection) {
var status = Dataviz.dependencies;
if(!status.urls[dependency.url]) {
status.urls[dependency.url] = true;
status.loading++;
var method = dependency.type === 'script' ? loadScript : loadStyle;
method(dependency.url, function() {
if(dependency.cb) {
dependency.cb.call(self, function() {
loadHandler(status);
});
} else {
loadHandler(status);
}
});
}
});
}
};
Dataviz.find = function(target){
if (!arguments.length) return Dataviz.visuals;
var el = target.nodeName ? target : document.querySelector(target),
match;
each(Dataviz.visuals, function(visual){
if (el == visual.el()){
match = visual;
return false;
}
});
if (match) return match;
};
module.exports = Dataviz;
},{"../core":18,"../core/utils/clone":30,"../core/utils/each":31,"../core/utils/emitter-shim":32,"../core/utils/extend":33,"../dataset":39,"./utils/loadScript":96,"./utils/loadStyle":97}],60:[function(require,module,exports){
var clone = require("../../core/utils/clone"),
extend = require("../../core/utils/extend"),
Dataviz = require("../dataviz"),
Request = require("../../core/request");
module.exports = function(query, el, cfg) {
var DEFAULTS = clone(Dataviz.defaults),
visual = new Dataviz(),
request = new Request(this, [query]),
config = cfg || {};
visual
.attributes(extend(DEFAULTS, config))
.el(el)
.prepare();
request.refresh();
request.on("complete", function(){
visual
.parseRequest(this)
.call(function(){
if (config.labels) {
this.labels(config.labels);
}
})
.render();
});
request.on("error", function(res){
visual.error(res.message);
});
return visual;
};
},{"../../core/request":27,"../../core/utils/clone":30,"../../core/utils/extend":33,"../dataviz":59}],61:[function(require,module,exports){
var Dataviz = require("../dataviz"),
extend = require("../../core/utils/extend")
module.exports = function(){
var map = extend({}, Dataviz.dataTypeMap),
dataType = this.dataType(),
library = this.library(),
chartType = this.chartType() || this.defaultChartType();
if (!library && map[dataType]) {
library = map[dataType].library;
}
if (library && !chartType && dataType) {
chartType = Dataviz.libraries[library]._defaults[dataType][0];
}
if (library && !chartType && map[dataType]) {
chartType = map[dataType].chartType;
}
if (library && chartType && Dataviz.libraries[library][chartType]) {
return Dataviz.libraries[library][chartType];
}
else {
return {};
}
};
},{"../../core/utils/extend":33,"../dataviz":59}],62:[function(require,module,exports){
module.exports = function(req){
var analysis = req.queries[0].analysis.replace("_", " "),
collection = req.queries[0].get('event_collection'),
output;
output = analysis.replace( /\b./g, function(a){
return a.toUpperCase();
});
if (collection) {
output += ' - ' + collection;
}
return output;
};
},{}],63:[function(require,module,exports){
module.exports = function(query){
var isInterval = typeof query.params.interval === "string",
isGroupBy = typeof query.params.group_by === "string",
is2xGroupBy = query.params.group_by instanceof Array,
dataType;
if (!isGroupBy && !isInterval) {
dataType = 'singular';
}
if (isGroupBy && !isInterval) {
dataType = 'categorical';
}
if (isInterval && !isGroupBy) {
dataType = 'chronological';
}
if (isInterval && isGroupBy) {
dataType = 'cat-chronological';
}
if (!isInterval && is2xGroupBy) {
dataType = 'categorical';
}
if (isInterval && is2xGroupBy) {
dataType = 'cat-chronological';
}
if (query.analysis === "funnel") {
dataType = 'cat-ordinal';
}
if (query.analysis === "extraction") {
dataType = 'extraction';
}
if (query.analysis === "select_unique") {
dataType = 'nominal';
}
return dataType;
};
},{}],64:[function(require,module,exports){
var extend = require('../core/utils/extend'),
Dataviz = require('./dataviz');
extend(Dataviz.prototype, {
'adapter' : require('./lib/adapter'),
'attributes' : require('./lib/attributes'),
'call' : require('./lib/call'),
'chartOptions' : require('./lib/chartOptions'),
'chartType' : require('./lib/chartType'),
'colorMapping' : require('./lib/colorMapping'),
'colors' : require('./lib/colors'),
'data' : require('./lib/data'),
'dataType' : require('./lib/dataType'),
'dateFormat' : require('./lib/dateFormat'),
'defaultChartType' : require('./lib/defaultChartType'),
'el' : require('./lib/el'),
'height' : require('./lib/height'),
'indexBy' : require('./lib/indexBy'),
'labelMapping' : require('./lib/labelMapping'),
'labels' : require('./lib/labels'),
'library' : require('./lib/library'),
'parseRawData' : require('./lib/parseRawData'),
'parseRequest' : require('./lib/parseRequest'),
'prepare' : require('./lib/prepare'),
'sortGroups' : require('./lib/sortGroups'),
'sortIntervals' : require('./lib/sortIntervals'),
'stacked' : require('./lib/stacked'),
'title' : require('./lib/title'),
'width' : require('./lib/width')
});
extend(Dataviz.prototype, {
'destroy' : require('./lib/actions/destroy'),
'error' : require('./lib/actions/error'),
'initialize' : require('./lib/actions/initialize'),
'render' : require('./lib/actions/render'),
'update' : require('./lib/actions/update')
});
module.exports = Dataviz;
},{"../core/utils/extend":33,"./dataviz":59,"./lib/actions/destroy":65,"./lib/actions/error":66,"./lib/actions/initialize":67,"./lib/actions/render":68,"./lib/actions/update":69,"./lib/adapter":70,"./lib/attributes":71,"./lib/call":72,"./lib/chartOptions":73,"./lib/chartType":74,"./lib/colorMapping":75,"./lib/colors":76,"./lib/data":77,"./lib/dataType":78,"./lib/dateFormat":79,"./lib/defaultChartType":80,"./lib/el":81,"./lib/height":82,"./lib/indexBy":83,"./lib/labelMapping":84,"./lib/labels":85,"./lib/library":86,"./lib/parseRawData":87,"./lib/parseRequest":88,"./lib/prepare":89,"./lib/sortGroups":90,"./lib/sortIntervals":91,"./lib/stacked":92,"./lib/title":93,"./lib/width":94}],65:[function(require,module,exports){
var getAdapterActions = require("../../helpers/getAdapterActions");
module.exports = function(){
var actions = getAdapterActions.call(this);
if (actions.destroy) {
actions.destroy.apply(this, arguments);
}
if (this.el()) {
this.el().innerHTML = "";
}
this.view._prepared = false;
this.view._initialized = false;
this.view._rendered = false;
this.view._artifacts = {};
return this;
};
},{"../../helpers/getAdapterActions":61}],66:[function(require,module,exports){
var getAdapterActions = require("../../helpers/getAdapterActions"),
Dataviz = require("../../dataviz");
module.exports = function(){
var actions = getAdapterActions.call(this);
if (this.el()) {
if (actions['error']) {
actions['error'].apply(this, arguments);
} else {
Dataviz.libraries['keen-io']['error'].render.apply(this, arguments);
}
}
else {
this.emit('error', 'No DOM element provided');
}
return this;
};
},{"../../dataviz":59,"../../helpers/getAdapterActions":61}],67:[function(require,module,exports){
var getAdapterActions = require("../../helpers/getAdapterActions"),
Dataviz = require("../../dataviz");
module.exports = function(){
var actions = getAdapterActions.call(this);
var loader = Dataviz.libraries[this.view.loader.library][this.view.loader.chartType];
if (this.view._prepared) {
if (loader.destroy) loader.destroy.apply(this, arguments);
} else {
if (this.el()) this.el().innerHTML = "";
}
if (actions.initialize) {
actions.initialize.apply(this, arguments);
}
else {
this.error('Incorrect chartType');
this.emit('error', 'Incorrect chartType');
}
this.view._initialized = true;
return this;
};
},{"../../dataviz":59,"../../helpers/getAdapterActions":61}],68:[function(require,module,exports){
var getAdapterActions = require("../../helpers/getAdapterActions"),
applyTransforms = require("../../utils/applyTransforms");
module.exports = function(){
var actions = getAdapterActions.call(this);
applyTransforms.call(this);
if (!this.view._initialized) {
this.initialize();
}
if (this.el() && actions.render) {
actions.render.apply(this, arguments);
this.view._rendered = true;
}
return this;
};
},{"../../helpers/getAdapterActions":61,"../../utils/applyTransforms":95}],69:[function(require,module,exports){
var getAdapterActions = require("../../helpers/getAdapterActions"),
applyTransforms = require("../../utils/applyTransforms");
module.exports = function(){
var actions = getAdapterActions.call(this);
applyTransforms.call(this);
if (actions.update) {
actions.update.apply(this, arguments);
} else if (actions.render) {
this.render();
}
return this;
};
},{"../../helpers/getAdapterActions":61,"../../utils/applyTransforms":95}],70:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = function(obj){
if (!arguments.length) return this.view.adapter;
var self = this;
each(obj, function(prop, key){
self.view.adapter[key] = (prop ? prop : null);
});
return this;
};
},{"../../core/utils/each":31}],71:[function(require,module,exports){
var each = require("../../core/utils/each");
var chartOptions = require("./chartOptions")
chartType = require("./chartType"),
library = require("./library");
module.exports = function(obj){
if (!arguments.length) return this.view["attributes"];
var self = this;
each(obj, function(prop, key){
if (key === "library") {
library.call(self, prop);
}
else if (key === "chartType") {
chartType.call(self, prop);
}
else if (key === "chartOptions") {
chartOptions.call(self, prop);
}
else {
self.view["attributes"][key] = prop;
}
});
return this;
};
},{"../../core/utils/each":31,"./chartOptions":73,"./chartType":74,"./library":86}],72:[function(require,module,exports){
module.exports = function(fn){
fn.call(this);
return this;
};
},{}],73:[function(require,module,exports){
var extend = require('../../core/utils/extend');
module.exports = function(obj){
if (!arguments.length) return this.view.adapter.chartOptions;
if (typeof obj === 'object' && obj !== null) {
extend(this.view.adapter.chartOptions, obj);
}
else {
this.view.adapter.chartOptions = {};
}
return this;
};
},{"../../core/utils/extend":33}],74:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view.adapter.chartType;
this.view.adapter.chartType = (str ? String(str) : null);
return this;
};
},{}],75:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = function(obj){
if (!arguments.length) return this.view["attributes"].colorMapping;
this.view["attributes"].colorMapping = (obj ? obj : null);
colorMapping.call(this);
return this;
};
function colorMapping(){
var self = this,
schema = this.dataset.schema,
data = this.dataset.output(),
colorSet = this.view.defaults.colors.slice(),
colorMap = this.colorMapping(),
dt = this.dataType() || "";
if (colorMap) {
if (dt.indexOf("chronological") > -1 || (schema.unpack && data[0].length > 2)) {
each(data[0].slice(1), function(label, i){
var color = colorMap[label];
if (color && colorSet[i] !== color) {
colorSet.splice(i, 0, color);
}
});
}
else {
each(self.dataset.selectColumn(0).slice(1), function(label, i){
var color = colorMap[label];
if (color && colorSet[i] !== color) {
colorSet.splice(i, 0, color);
}
});
}
self.view.attributes.colors = colorSet;
}
}
},{"../../core/utils/each":31}],76:[function(require,module,exports){
module.exports = function(arr){
if (!arguments.length) return this.view["attributes"].colors;
this.view["attributes"].colors = (arr instanceof Array ? arr : null);
this.view.defaults.colors = (arr instanceof Array ? arr : null);
return this;
};
},{}],77:[function(require,module,exports){
var Dataset = require("../../dataset"),
Request = require("../../core/request");
module.exports = function(data){
if (!arguments.length) return this.dataset.output();
if (data instanceof Dataset) {
this.dataset = data;
} else if (data instanceof Request) {
this.parseRequest(data);
} else {
this.parseRawData(data);
}
return this;
};
},{"../../core/request":27,"../../dataset":39}],78:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view.adapter.dataType;
this.view.adapter.dataType = (str ? String(str) : null);
return this;
};
},{}],79:[function(require,module,exports){
module.exports = function(val){
if (!arguments.length) return this.view.attributes.dateFormat;
if (typeof val === 'string' || typeof val === 'function') {
this.view.attributes.dateFormat = val;
}
else {
this.view.attributes.dateFormat = undefined;
}
return this;
};
},{}],80:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view.adapter.defaultChartType;
this.view.adapter.defaultChartType = (str ? String(str) : null);
return this;
};
},{}],81:[function(require,module,exports){
module.exports = function(el){
if (!arguments.length) return this.view.el;
this.view.el = el;
return this;
};
},{}],82:[function(require,module,exports){
module.exports = function(num){
if (!arguments.length) return this.view["attributes"]["height"];
this.view["attributes"]["height"] = (!isNaN(parseInt(num)) ? parseInt(num) : null);
return this;
};
},{}],83:[function(require,module,exports){
var Dataset = require('../../dataset'),
Dataviz = require('../dataviz'),
each = require('../../core/utils/each');
module.exports = function(str){
if (!arguments.length) return this.view['attributes'].indexBy;
this.view['attributes'].indexBy = (str ? String(str) : Dataviz.defaults.indexBy);
indexBy.call(this);
return this;
};
function indexBy(){
var parser, options;
if (this.dataset.output().length > 1
&& !isNaN(new Date(this.dataset.output()[1][0]).getTime())) {
if (this.dataset.parser
&& this.dataset.parser.name
&& this.dataset.parser.options) {
if (this.dataset.parser.options.length === 1) {
parser = Dataset.parser(this.dataset.parser.name, this.indexBy());
this.dataset.parser.options[0] = this.indexBy();
}
else {
parser = Dataset.parser(this.dataset.parser.name, this.dataset.parser.options[0], this.indexBy());
this.dataset.parser.options[1] = this.indexBy();
}
}
else if (this.dataset.output()[0].length === 2) {
parser = Dataset.parser('interval', this.indexBy());
this.dataset.parser = {
name: 'interval',
options: [this.indexBy()]
};
}
else {
parser = Dataset.parser('grouped-interval', this.indexBy());
this.dataset.parser = {
name: 'grouped-interval',
options: [this.indexBy()]
};
}
this.dataset = parser(this.dataset.input());
this.dataset.updateColumn(0, function(value){
return (typeof value === 'string') ? new Date(value) : value;
});
}
}
},{"../../core/utils/each":31,"../../dataset":39,"../dataviz":59}],84:[function(require,module,exports){
var each = require("../../core/utils/each");
module.exports = function(obj){
if (!arguments.length) return this.view["attributes"].labelMapping;
this.view["attributes"].labelMapping = (obj ? obj : null);
applyLabelMapping.call(this);
return this;
};
function applyLabelMapping(){
var self = this,
labelMap = this.labelMapping(),
dt = this.dataType() || "";
if (labelMap) {
if (dt.indexOf("chronological") > -1 || (self.dataset.output()[0].length > 2)) {
each(self.dataset.output()[0], function(c, i){
if (i > 0) {
self.dataset.data.output[0][i] = labelMap[c] || c;
}
});
}
else if (self.dataset.output()[0].length === 2) {
self.dataset.updateColumn(0, function(c, i){
return labelMap[c] || c;
});
}
}
}
},{"../../core/utils/each":31}],85:[function(require,module,exports){
var each = require('../../core/utils/each');
module.exports = function(arr){
if (!arguments.length) {
if (!this.view['attributes'].labels || !this.view['attributes'].labels.length) {
return getLabels.call(this);
}
else {
return this.view['attributes'].labels;
}
}
else {
this.view['attributes'].labels = (arr instanceof Array ? arr : null);
setLabels.call(this);
return this;
}
};
function setLabels(){
var self = this,
labelSet = this.labels() || null,
data = this.dataset.output(),
dt = this.dataType() || '';
if (labelSet) {
if (dt.indexOf('chronological') > -1 || (data[0].length > 2)) {
each(data[0], function(cell,i){
if (i > 0 && labelSet[i-1]) {
self.dataset.data.output[0][i] = labelSet[i-1];
}
});
}
else {
each(data, function(row,i){
if (i > 0 && labelSet[i-1]) {
self.dataset.data.output[i][0] = labelSet[i-1];
}
});
}
}
}
function getLabels(){
var data = this.dataset.output(),
dt = this.dataType() || '',
labels;
if (dt.indexOf('chron') > -1 || (data[0].length > 2)) {
labels = this.dataset.selectRow(0).slice(1);
}
else {
labels = this.dataset.selectColumn(0).slice(1);
}
return labels;
}
},{"../../core/utils/each":31}],86:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view.adapter.library;
this.view.adapter.library = (str ? String(str) : null);
return this;
};
},{}],87:[function(require,module,exports){
var Dataset = require('../../dataset');
var extend = require('../../core/utils/extend');
module.exports = function(response){
var dataType,
indexBy = this.indexBy() ? this.indexBy() : 'timestamp.start',
parser,
parserArgs = [],
query = (typeof response.query !== 'undefined') ? response.query : {};
query = extend({
analysis_type: null,
event_collection: null,
filters: [],
group_by: null,
interval: null,
timeframe: null,
timezone: null
}, query);
if (query.analysis_type === 'funnel') {
dataType = 'cat-ordinal';
parser = 'funnel';
}
else if (query.analysis_type === 'extraction'){
dataType = 'extraction';
parser = 'extraction';
}
else if (query.analysis_type === 'select_unique') {
if (!query.group_by && !query.interval) {
dataType = 'nominal';
parser = 'list';
}
}
else if (query.analysis_type) {
if (!query.group_by && !query.interval) {
dataType = 'singular';
parser = 'metric';
}
else if (query.group_by && !query.interval) {
if (query.group_by instanceof Array && query.group_by.length > 1) {
dataType = 'categorical';
parser = 'double-grouped-metric';
parserArgs.push(query.group_by);
}
else {
dataType = 'categorical';
parser = 'grouped-metric';
}
}
else if (query.interval && !query.group_by) {
dataType = 'chronological';
parser = 'interval';
parserArgs.push(indexBy);
}
else if (query.group_by && query.interval) {
if (query.group_by instanceof Array && query.group_by.length > 1) {
dataType = 'cat-chronological';
parser = 'double-grouped-interval';
parserArgs.push(query.group_by);
parserArgs.push(indexBy);
}
else {
dataType = 'cat-chronological';
parser = 'grouped-interval';
parserArgs.push(indexBy);
}
}
}
if (!parser) {
if (typeof response.result === 'number'){
dataType = 'singular';
parser = 'metric';
}
if (response.result instanceof Array && response.result.length > 0){
if (response.result[0].timeframe && (typeof response.result[0].value == 'number' || response.result[0].value == null)) {
dataType = 'chronological';
parser = 'interval';
parserArgs.push(indexBy)
}
if (typeof response.result[0].result == 'number'){
dataType = 'categorical';
parser = 'grouped-metric';
}
if (response.result[0].value instanceof Array){
dataType = 'cat-chronological';
parser = 'grouped-interval';
parserArgs.push(indexBy)
}
if (typeof response.result[0] == 'number' && typeof response.steps !== "undefined"){
dataType = 'cat-ordinal';
parser = 'funnel';
}
if ((typeof response.result[0] == 'string' || typeof response.result[0] == 'number') && typeof response.steps === "undefined"){
dataType = 'nominal';
parser = 'list';
}
if (dataType === void 0) {
dataType = 'extraction';
parser = 'extraction';
}
}
}
if (dataType) {
this.dataType(dataType);
}
this.dataset = Dataset.parser.apply(this, [parser].concat(parserArgs))(response);
if (parser.indexOf('interval') > -1) {
this.dataset.updateColumn(0, function(value, i){
return new Date(value);
});
}
return this;
};
},{"../../core/utils/extend":33,"../../dataset":39}],88:[function(require,module,exports){
var Query = require('../../core/query');
var dataType = require('./dataType'),
extend = require('../../core/utils/extend'),
getDefaultTitle = require('../helpers/getDefaultTitle'),
getQueryDataType = require('../helpers/getQueryDataType'),
parseRawData = require('./parseRawData'),
title = require('./title');
module.exports = function(req){
var response = req.data instanceof Array ? req.data[0] : req.data;
if (req.queries[0] instanceof Query) {
response.query = extend({
analysis_type: req.queries[0].analysis
}, req.queries[0].params);
dataType.call(this, getQueryDataType(req.queries[0]));
this.view.defaults.title = getDefaultTitle.call(this, req);
if (!title.call(this)) {
title.call(this, this.view.defaults.title);
}
}
parseRawData.call(this, response);
return this;
};
},{"../../core/query":26,"../../core/utils/extend":33,"../helpers/getDefaultTitle":62,"../helpers/getQueryDataType":63,"./dataType":78,"./parseRawData":87,"./title":93}],89:[function(require,module,exports){
var Dataviz = require("../dataviz");
module.exports = function(){
var loader;
if (this.view._rendered) {
this.destroy();
}
if (this.el()) {
this.el().innerHTML = "";
loader = Dataviz.libraries[this.view.loader.library][this.view.loader.chartType];
if (loader.initialize) {
loader.initialize.apply(this, arguments);
}
if (loader.render) {
loader.render.apply(this, arguments);
}
this.view._prepared = true;
}
return this;
};
},{"../dataviz":59}],90:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view["attributes"].sortGroups;
this.view["attributes"].sortGroups = (str ? String(str) : null);
runSortGroups.call(this);
return this;
};
function runSortGroups(){
var dt = this.dataType();
if (!this.sortGroups()) return;
if ((dt && dt.indexOf("chronological") > -1) || this.data()[0].length > 2) {
this.dataset.sortColumns(this.sortGroups(), this.dataset.getColumnSum);
}
else if (dt && (dt.indexOf("cat-") > -1 || dt.indexOf("categorical") > -1)) {
this.dataset.sortRows(this.sortGroups(), this.dataset.getRowSum);
}
return;
}
},{}],91:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view["attributes"].sortIntervals;
this.view["attributes"].sortIntervals = (str ? String(str) : null);
runSortIntervals.call(this);
return this;
};
function runSortIntervals(){
if (!this.sortIntervals()) return;
this.dataset.sortRows(this.sortIntervals());
return;
}
},{}],92:[function(require,module,exports){
module.exports = function(bool){
if (!arguments.length) return this.view['attributes']['stacked'];
this.view['attributes']['stacked'] = bool ? true : false;
return this;
};
},{}],93:[function(require,module,exports){
module.exports = function(str){
if (!arguments.length) return this.view["attributes"]["title"];
this.view["attributes"]["title"] = (str ? String(str) : null);
return this;
};
},{}],94:[function(require,module,exports){
module.exports = function(num){
if (!arguments.length) return this.view["attributes"]["width"];
this.view["attributes"]["width"] = (!isNaN(parseInt(num)) ? parseInt(num) : null);
return this;
};
},{}],95:[function(require,module,exports){
module.exports = function(){
if (this.labelMapping()) {
this.labelMapping(this.labelMapping());
}
if (this.colorMapping()) {
this.colorMapping(this.colorMapping());
}
if (this.sortGroups()) {
this.sortGroups(this.sortGroups());
}
if (this.sortIntervals()) {
this.sortIntervals(this.sortIntervals());
}
};
},{}],96:[function(require,module,exports){
module.exports = function(url, cb) {
var doc = document;
var handler;
var head = doc.head || doc.getElementsByTagName("head");
setTimeout(function () {
if ('item' in head) {
if (!head[0]) {
setTimeout(arguments.callee, 25);
return;
}
head = head[0];
}
var script = doc.createElement("script"),
scriptdone = false;
script.onload = script.onreadystatechange = function () {
if ((script.readyState && script.readyState !== "complete" && script.readyState !== "loaded") || scriptdone) {
return false;
}
script.onload = script.onreadystatechange = null;
scriptdone = true;
cb();
};
script.src = url;
head.insertBefore(script, head.firstChild);
}, 0);
if (doc.readyState === null && doc.addEventListener) {
doc.readyState = "loading";
doc.addEventListener("DOMContentLoaded", handler = function () {
doc.removeEventListener("DOMContentLoaded", handler, false);
doc.readyState = "complete";
}, false);
}
};
},{}],97:[function(require,module,exports){
module.exports = function(url, cb) {
var link = document.createElement('link');
link.setAttribute('rel', 'stylesheet');
link.type = 'text/css';
link.href = url;
cb();
document.head.appendChild(link);
};
},{}],98:[function(require,module,exports){
module.exports = function(_input) {
var input = Number(_input),
sciNo = input.toPrecision(3),
prefix = "",
suffixes = ["", "k", "M", "B", "T"];
if (Number(sciNo) == input && String(input).length <= 4) {
return String(input);
}
if(input >= 1 || input <= -1) {
if(input < 0){
input = -input;
prefix = "-";
}
return prefix + recurse(input, 0);
} else {
return input.toPrecision(3);
}
function recurse(input, iteration) {
var input = String(input);
var split = input.split(".");
if(split.length > 1) {
input = split[0];
var rhs = split[1];
if (input.length == 2 && rhs.length > 0) {
if (rhs.length > 0) {
input = input + "." + rhs.charAt(0);
}
else {
input += "0";
}
}
else if (input.length == 1 && rhs.length > 0) {
input = input + "." + rhs.charAt(0);
if(rhs.length > 1) {
input += rhs.charAt(1);
}
else {
input += "0";
}
}
}
var numNumerals = input.length;
if (input.split(".").length > 1) {
numNumerals--;
}
if(numNumerals <= 3) {
return String(input) + suffixes[iteration];
}
else {
return recurse(Number(input) / 1000, iteration + 1);
}
}
};
},{}],99:[function(require,module,exports){
(function (global){
;(function (f) {
if (typeof define === "function" && define.amd) {
define("keen", [], function(){ return f(); });
}
if (typeof exports === "object" && typeof module !== "undefined") {
module.exports = f();
}
var g = null;
if (typeof window !== "undefined") {
g = window;
} else if (typeof global !== "undefined") {
g = global;
} else if (typeof self !== "undefined") {
g = self;
}
if (g) {
g.Keen = f();
}
})(function() {
"use strict";
var Keen = require("./core"),
extend = require("./core/utils/extend");
extend(Keen.prototype, {
"addEvent" : require("./core/lib/addEvent"),
"addEvents" : require("./core/lib/addEvents"),
"setGlobalProperties" : require("./core/lib/setGlobalProperties"),
"trackExternalLink" : require("./core/lib/trackExternalLink"),
"get" : require("./core/lib/get"),
"post" : require("./core/lib/post"),
"put" : require("./core/lib/post"),
"run" : require("./core/lib/run"),
"savedQueries" : require("./core/saved-queries"),
"draw" : require("./dataviz/extensions/draw")
});
Keen.Query = require("./core/query");
Keen.Request = require("./core/request");
Keen.Dataset = require("./dataset");
Keen.Dataviz = require("./dataviz");
Keen.Base64 = require("./core/utils/base64");
Keen.Spinner = require("spin.js");
Keen.utils = {
"domready" : require("domready"),
"each" : require("./core/utils/each"),
"extend" : extend,
"parseParams" : require("./core/utils/parseParams"),
"prettyNumber" : require("./dataviz/utils/prettyNumber")
};
require("./dataviz/adapters/keen-io")();
require("./dataviz/adapters/google")();
require("./dataviz/adapters/c3")();
require("./dataviz/adapters/chartjs")();
if (Keen.loaded) {
setTimeout(function(){
Keen.utils.domready(function(){
Keen.emit("ready");
});
}, 0);
}
require("./core/async")();
module.exports = Keen;
return Keen;
});
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./core":18,"./core/async":10,"./core/lib/addEvent":19,"./core/lib/addEvents":20,"./core/lib/get":21,"./core/lib/post":22,"./core/lib/run":23,"./core/lib/setGlobalProperties":24,"./core/lib/trackExternalLink":25,"./core/query":26,"./core/request":27,"./core/saved-queries":28,"./core/utils/base64":29,"./core/utils/each":31,"./core/utils/extend":33,"./core/utils/parseParams":35,"./dataset":39,"./dataviz":64,"./dataviz/adapters/c3":54,"./dataviz/adapters/chartjs":56,"./dataviz/adapters/google":57,"./dataviz/adapters/keen-io":58,"./dataviz/extensions/draw":60,"./dataviz/utils/prettyNumber":98,"domready":2,"spin.js":5}]},{},[99]);<|fim▁end|> | }; |
<|file_name|>trival.cc<|end_file_name|><|fim▁begin|>// -*- coding: utf-8 -*-
// Copyright (C) 2016 Laboratoire de Recherche et Developpement de
// l'Epita (LRDE).
//
// This file is part of Spot, a model checking library.
//
// Spot is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.<|fim▁hole|>// Spot is distributed in the hope that it will be useful, but WITHOUT
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
// License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#undef NDEBUG
#include <spot/misc/trival.hh>
#include <cassert>
int main()
{
spot::trival v1;
spot::trival v2(false);
spot::trival v3(true);
spot::trival v4 = spot::trival::maybe();
assert(v1 != v2);
assert(v1 != v3);
assert(v2 != v3);
assert(v4 != v2);
assert(v4 != v3);
assert(v2 == false);
assert(true == v3);
assert(v4 == spot::trival::maybe());
assert((bool)v3);
assert(!(bool)v2);
assert(!(bool)!v1);
assert(!(bool)v1);
assert(!(bool)!v3);
for (auto u : {v2, v1, v3})
for (auto v : {v2, v1, v3})
std::cout << u << " && " << v << " == " << (u && v) << '\n';
for (auto u : {v2, v1, v3})
for (auto v : {v2, v1, v3})
std::cout << u << " || " << v << " == " << (u || v) << '\n';
}<|fim▁end|> | // |
<|file_name|>gamepad.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * This file is part of huborcid.
*
* huborcid is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* huborcid is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with huborcid. If not, see <http://www.gnu.org/licenses/>.
*/
// GamePad API
// https://dvcs.w3.org/hg/gamepad/raw-file/default/gamepad.html
// By Eric Bidelman
// FF has Gamepad API support only in special builds, but not in any release (even behind a flag)
// Their current implementation has no way to feature detect, only events to bind to.
// http://www.html5rocks.com/en/tutorials/doodles/gamepad/#toc-featuredetect
// but a patch will bring them up to date with the spec when it lands (and they'll pass this test)
// https://bugzilla.mozilla.org/show_bug.cgi?id=690935
Modernizr.addTest('gamepads', !!Modernizr.prefixed('getGamepads', navigator));<|fim▁end|> | /* |
<|file_name|>shim.rs<|end_file_name|><|fim▁begin|>use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
use rustc_middle::mir::*;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::{InternalSubsts, Subst};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_target::abi::VariantIdx;
use rustc_index::vec::{Idx, IndexVec};
use rustc_span::Span;
use rustc_target::spec::abi::Abi;
use std::fmt;
use std::iter;
use crate::util::expand_aggregate;
use crate::{
abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, remove_noop_landing_pads,
run_passes, simplify,
};
use rustc_middle::mir::patch::MirPatch;
use rustc_mir_dataflow::elaborate_drops::{self, DropElaborator, DropFlagMode, DropStyle};
pub fn provide(providers: &mut Providers) {
providers.mir_shims = make_shim;
}
fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'tcx> {
debug!("make_shim({:?})", instance);
let mut result = match instance {
ty::InstanceDef::Item(..) => bug!("item {:?} passed to make_shim", instance),
ty::InstanceDef::VtableShim(def_id) => {
build_call_shim(tcx, instance, Some(Adjustment::Deref), CallKind::Direct(def_id))
}
ty::InstanceDef::FnPtrShim(def_id, ty) => {
let trait_ = tcx.trait_of_item(def_id).unwrap();
let adjustment = match tcx.fn_trait_kind_from_lang_item(trait_) {
Some(ty::ClosureKind::FnOnce) => Adjustment::Identity,
Some(ty::ClosureKind::FnMut | ty::ClosureKind::Fn) => Adjustment::Deref,
None => bug!("fn pointer {:?} is not an fn", ty),
};
build_call_shim(tcx, instance, Some(adjustment), CallKind::Indirect(ty))
}
// We are generating a call back to our def-id, which the
// codegen backend knows to turn to an actual call, be it
// a virtual call, or a direct call to a function for which
// indirect calls must be codegen'd differently than direct ones
// (such as `#[track_caller]`).<|fim▁hole|> ty::InstanceDef::ReifyShim(def_id) => {
build_call_shim(tcx, instance, None, CallKind::Direct(def_id))
}
ty::InstanceDef::ClosureOnceShim { call_once: _, track_caller: _ } => {
let fn_mut = tcx.require_lang_item(LangItem::FnMut, None);
let call_mut = tcx
.associated_items(fn_mut)
.in_definition_order()
.find(|it| it.kind == ty::AssocKind::Fn)
.unwrap()
.def_id;
build_call_shim(tcx, instance, Some(Adjustment::RefMut), CallKind::Direct(call_mut))
}
ty::InstanceDef::DropGlue(def_id, ty) => build_drop_shim(tcx, def_id, ty),
ty::InstanceDef::CloneShim(def_id, ty) => build_clone_shim(tcx, def_id, ty),
ty::InstanceDef::Virtual(..) => {
bug!("InstanceDef::Virtual ({:?}) is for direct calls only", instance)
}
ty::InstanceDef::Intrinsic(_) => {
bug!("creating shims from intrinsics ({:?}) is unsupported", instance)
}
};
debug!("make_shim({:?}) = untransformed {:?}", instance, result);
run_passes(
tcx,
&mut result,
MirPhase::Const,
&[&[
&add_moves_for_packed_drops::AddMovesForPackedDrops,
&remove_noop_landing_pads::RemoveNoopLandingPads,
&simplify::SimplifyCfg::new("make_shim"),
&add_call_guards::CriticalCallEdges,
&abort_unwinding_calls::AbortUnwindingCalls,
]],
);
debug!("make_shim({:?}) = {:?}", instance, result);
result
}
#[derive(Copy, Clone, Debug, PartialEq)]
enum Adjustment {
/// Pass the receiver as-is.
Identity,
/// We get passed `&[mut] self` and call the target with `*self`.
///
/// This either copies `self` (if `Self: Copy`, eg. for function items), or moves out of it
/// (for `VtableShim`, which effectively is passed `&own Self`).
Deref,
/// We get passed `self: Self` and call the target with `&mut self`.
///
/// In this case we need to ensure that the `Self` is dropped after the call, as the callee
/// won't do it for us.
RefMut,
}
#[derive(Copy, Clone, Debug, PartialEq)]
enum CallKind<'tcx> {
/// Call the `FnPtr` that was passed as the receiver.
Indirect(Ty<'tcx>),
/// Call a known `FnDef`.
Direct(DefId),
}
fn local_decls_for_sig<'tcx>(
sig: &ty::FnSig<'tcx>,
span: Span,
) -> IndexVec<Local, LocalDecl<'tcx>> {
iter::once(LocalDecl::new(sig.output(), span))
.chain(sig.inputs().iter().map(|ity| LocalDecl::new(ity, span).immutable()))
.collect()
}
fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>) -> Body<'tcx> {
debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
// Check if this is a generator, if so, return the drop glue for it
if let Some(&ty::Generator(gen_def_id, substs, _)) = ty.map(|ty| ty.kind()) {
let body = tcx.optimized_mir(gen_def_id).generator_drop().unwrap();
return body.clone().subst(tcx, substs);
}
let substs = if let Some(ty) = ty {
tcx.intern_substs(&[ty.into()])
} else {
InternalSubsts::identity_for_item(tcx, def_id)
};
let sig = tcx.fn_sig(def_id).subst(tcx, substs);
let sig = tcx.erase_late_bound_regions(sig);
let span = tcx.def_span(def_id);
let source_info = SourceInfo::outermost(span);
let return_block = BasicBlock::new(1);
let mut blocks = IndexVec::with_capacity(2);
let block = |blocks: &mut IndexVec<_, _>, kind| {
blocks.push(BasicBlockData {
statements: vec![],
terminator: Some(Terminator { source_info, kind }),
is_cleanup: false,
})
};
block(&mut blocks, TerminatorKind::Goto { target: return_block });
block(&mut blocks, TerminatorKind::Return);
let source = MirSource::from_instance(ty::InstanceDef::DropGlue(def_id, ty));
let mut body =
new_body(tcx, source, blocks, local_decls_for_sig(&sig, span), sig.inputs().len(), span);
if ty.is_some() {
// The first argument (index 0), but add 1 for the return value.
let dropee_ptr = Place::from(Local::new(1 + 0));
if tcx.sess.opts.debugging_opts.mir_emit_retag {
// Function arguments should be retagged, and we make this one raw.
body.basic_blocks_mut()[START_BLOCK].statements.insert(
0,
Statement {
source_info,
kind: StatementKind::Retag(RetagKind::Raw, Box::new(dropee_ptr)),
},
);
}
let patch = {
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let mut elaborator =
DropShimElaborator { body: &body, patch: MirPatch::new(&body), tcx, param_env };
let dropee = tcx.mk_place_deref(dropee_ptr);
let resume_block = elaborator.patch.resume_block();
elaborate_drops::elaborate_drop(
&mut elaborator,
source_info,
dropee,
(),
return_block,
elaborate_drops::Unwind::To(resume_block),
START_BLOCK,
);
elaborator.patch
};
patch.apply(&mut body);
}
body
}
fn new_body<'tcx>(
tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
arg_count: usize,
span: Span,
) -> Body<'tcx> {
Body::new(
tcx,
source,
basic_blocks,
IndexVec::from_elem_n(
SourceScopeData {
span,
parent_scope: None,
inlined: None,
inlined_parent_scope: None,
local_data: ClearCrossCrate::Clear,
},
1,
),
local_decls,
IndexVec::new(),
arg_count,
vec![],
span,
None,
)
}
pub struct DropShimElaborator<'a, 'tcx> {
pub body: &'a Body<'tcx>,
pub patch: MirPatch<'tcx>,
pub tcx: TyCtxt<'tcx>,
pub param_env: ty::ParamEnv<'tcx>,
}
impl<'a, 'tcx> fmt::Debug for DropShimElaborator<'a, 'tcx> {
fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
Ok(())
}
}
impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> {
type Path = ();
fn patch(&mut self) -> &mut MirPatch<'tcx> {
&mut self.patch
}
fn body(&self) -> &'a Body<'tcx> {
self.body
}
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle {
match mode {
DropFlagMode::Shallow => {
// Drops for the contained fields are "shallow" and "static" - they will simply call
// the field's own drop glue.
DropStyle::Static
}
DropFlagMode::Deep => {
// The top-level drop is "deep" and "open" - it will be elaborated to a drop ladder
// dropping each field contained in the value.
DropStyle::Open
}
}
}
fn get_drop_flag(&mut self, _path: Self::Path) -> Option<Operand<'tcx>> {
None
}
fn clear_drop_flag(&mut self, _location: Location, _path: Self::Path, _mode: DropFlagMode) {}
fn field_subpath(&self, _path: Self::Path, _field: Field) -> Option<Self::Path> {
None
}
fn deref_subpath(&self, _path: Self::Path) -> Option<Self::Path> {
None
}
fn downcast_subpath(&self, _path: Self::Path, _variant: VariantIdx) -> Option<Self::Path> {
Some(())
}
fn array_subpath(&self, _path: Self::Path, _index: u64, _size: u64) -> Option<Self::Path> {
None
}
}
/// Builds a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`.
fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> {
debug!("build_clone_shim(def_id={:?})", def_id);
let param_env = tcx.param_env(def_id);
let mut builder = CloneShimBuilder::new(tcx, def_id, self_ty);
let is_copy = self_ty.is_copy_modulo_regions(tcx.at(builder.span), param_env);
let dest = Place::return_place();
let src = tcx.mk_place_deref(Place::from(Local::new(1 + 0)));
match self_ty.kind() {
_ if is_copy => builder.copy_shim(),
ty::Closure(_, substs) => {
builder.tuple_like_shim(dest, src, substs.as_closure().upvar_tys())
}
ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()),
_ => bug!("clone shim for `{:?}` which is not `Copy` and is not an aggregate", self_ty),
};
builder.into_mir()
}
struct CloneShimBuilder<'tcx> {
tcx: TyCtxt<'tcx>,
def_id: DefId,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
span: Span,
sig: ty::FnSig<'tcx>,
}
impl CloneShimBuilder<'tcx> {
fn new(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Self {
// we must subst the self_ty because it's
// otherwise going to be TySelf and we can't index
// or access fields of a Place of type TySelf.
let substs = tcx.mk_substs_trait(self_ty, &[]);
let sig = tcx.fn_sig(def_id).subst(tcx, substs);
let sig = tcx.erase_late_bound_regions(sig);
let span = tcx.def_span(def_id);
CloneShimBuilder {
tcx,
def_id,
local_decls: local_decls_for_sig(&sig, span),
blocks: IndexVec::new(),
span,
sig,
}
}
fn into_mir(self) -> Body<'tcx> {
let source = MirSource::from_instance(ty::InstanceDef::CloneShim(
self.def_id,
self.sig.inputs_and_output[0],
));
new_body(
self.tcx,
source,
self.blocks,
self.local_decls,
self.sig.inputs().len(),
self.span,
)
}
fn source_info(&self) -> SourceInfo {
SourceInfo::outermost(self.span)
}
fn block(
&mut self,
statements: Vec<Statement<'tcx>>,
kind: TerminatorKind<'tcx>,
is_cleanup: bool,
) -> BasicBlock {
let source_info = self.source_info();
self.blocks.push(BasicBlockData {
statements,
terminator: Some(Terminator { source_info, kind }),
is_cleanup,
})
}
/// Gives the index of an upcoming BasicBlock, with an offset.
/// offset=0 will give you the index of the next BasicBlock,
/// offset=1 will give the index of the next-to-next block,
/// offset=-1 will give you the index of the last-created block
fn block_index_offset(&mut self, offset: usize) -> BasicBlock {
BasicBlock::new(self.blocks.len() + offset)
}
fn make_statement(&self, kind: StatementKind<'tcx>) -> Statement<'tcx> {
Statement { source_info: self.source_info(), kind }
}
fn copy_shim(&mut self) {
let rcvr = self.tcx.mk_place_deref(Place::from(Local::new(1 + 0)));
let ret_statement = self.make_statement(StatementKind::Assign(Box::new((
Place::return_place(),
Rvalue::Use(Operand::Copy(rcvr)),
))));
self.block(vec![ret_statement], TerminatorKind::Return, false);
}
fn make_place(&mut self, mutability: Mutability, ty: Ty<'tcx>) -> Place<'tcx> {
let span = self.span;
let mut local = LocalDecl::new(ty, span);
if mutability == Mutability::Not {
local = local.immutable();
}
Place::from(self.local_decls.push(local))
}
fn make_clone_call(
&mut self,
dest: Place<'tcx>,
src: Place<'tcx>,
ty: Ty<'tcx>,
next: BasicBlock,
cleanup: BasicBlock,
) {
let tcx = self.tcx;
let substs = tcx.mk_substs_trait(ty, &[]);
// `func == Clone::clone(&ty) -> ty`
let func_ty = tcx.mk_fn_def(self.def_id, substs);
let func = Operand::Constant(Box::new(Constant {
span: self.span,
user_ty: None,
literal: ty::Const::zero_sized(tcx, func_ty).into(),
}));
let ref_loc = self.make_place(
Mutability::Not,
tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty, mutbl: hir::Mutability::Not }),
);
// `let ref_loc: &ty = &src;`
let statement = self.make_statement(StatementKind::Assign(Box::new((
ref_loc,
Rvalue::Ref(tcx.lifetimes.re_erased, BorrowKind::Shared, src),
))));
// `let loc = Clone::clone(ref_loc);`
self.block(
vec![statement],
TerminatorKind::Call {
func,
args: vec![Operand::Move(ref_loc)],
destination: Some((dest, next)),
cleanup: Some(cleanup),
from_hir_call: true,
fn_span: self.span,
},
false,
);
}
fn tuple_like_shim<I>(&mut self, dest: Place<'tcx>, src: Place<'tcx>, tys: I)
where
I: Iterator<Item = Ty<'tcx>>,
{
let mut previous_field = None;
for (i, ity) in tys.enumerate() {
let field = Field::new(i);
let src_field = self.tcx.mk_place_field(src, field, ity);
let dest_field = self.tcx.mk_place_field(dest, field, ity);
// #(2i + 1) is the cleanup block for the previous clone operation
let cleanup_block = self.block_index_offset(1);
// #(2i + 2) is the next cloning block
// (or the Return terminator if this is the last block)
let next_block = self.block_index_offset(2);
// BB #(2i)
// `dest.i = Clone::clone(&src.i);`
// Goto #(2i + 2) if ok, #(2i + 1) if unwinding happens.
self.make_clone_call(dest_field, src_field, ity, next_block, cleanup_block);
// BB #(2i + 1) (cleanup)
if let Some((previous_field, previous_cleanup)) = previous_field.take() {
// Drop previous field and goto previous cleanup block.
self.block(
vec![],
TerminatorKind::Drop {
place: previous_field,
target: previous_cleanup,
unwind: None,
},
true,
);
} else {
// Nothing to drop, just resume.
self.block(vec![], TerminatorKind::Resume, true);
}
previous_field = Some((dest_field, cleanup_block));
}
self.block(vec![], TerminatorKind::Return, false);
}
}
/// Builds a "call" shim for `instance`. The shim calls the function specified by `call_kind`,
/// first adjusting its first argument according to `rcvr_adjustment`.
fn build_call_shim<'tcx>(
tcx: TyCtxt<'tcx>,
instance: ty::InstanceDef<'tcx>,
rcvr_adjustment: Option<Adjustment>,
call_kind: CallKind<'tcx>,
) -> Body<'tcx> {
debug!(
"build_call_shim(instance={:?}, rcvr_adjustment={:?}, call_kind={:?})",
instance, rcvr_adjustment, call_kind
);
// `FnPtrShim` contains the fn pointer type that a call shim is being built for - this is used
// to substitute into the signature of the shim. It is not necessary for users of this
// MIR body to perform further substitutions (see `InstanceDef::has_polymorphic_mir_body`).
let (sig_substs, untuple_args) = if let ty::InstanceDef::FnPtrShim(_, ty) = instance {
let sig = tcx.erase_late_bound_regions(ty.fn_sig(tcx));
let untuple_args = sig.inputs();
// Create substitutions for the `Self` and `Args` generic parameters of the shim body.
let arg_tup = tcx.mk_tup(untuple_args.iter());
let sig_substs = tcx.mk_substs_trait(ty, &[ty::subst::GenericArg::from(arg_tup)]);
(Some(sig_substs), Some(untuple_args))
} else {
(None, None)
};
let def_id = instance.def_id();
let sig = tcx.fn_sig(def_id);
let mut sig = tcx.erase_late_bound_regions(sig);
assert_eq!(sig_substs.is_some(), !instance.has_polymorphic_mir_body());
if let Some(sig_substs) = sig_substs {
sig = sig.subst(tcx, sig_substs);
}
if let CallKind::Indirect(fnty) = call_kind {
// `sig` determines our local decls, and thus the callee type in the `Call` terminator. This
// can only be an `FnDef` or `FnPtr`, but currently will be `Self` since the types come from
// the implemented `FnX` trait.
// Apply the opposite adjustment to the MIR input.
let mut inputs_and_output = sig.inputs_and_output.to_vec();
// Initial signature is `fn(&? Self, Args) -> Self::Output` where `Args` is a tuple of the
// fn arguments. `Self` may be passed via (im)mutable reference or by-value.
assert_eq!(inputs_and_output.len(), 3);
// `Self` is always the original fn type `ty`. The MIR call terminator is only defined for
// `FnDef` and `FnPtr` callees, not the `Self` type param.
let self_arg = &mut inputs_and_output[0];
*self_arg = match rcvr_adjustment.unwrap() {
Adjustment::Identity => fnty,
Adjustment::Deref => tcx.mk_imm_ptr(fnty),
Adjustment::RefMut => tcx.mk_mut_ptr(fnty),
};
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
}
// FIXME(eddyb) avoid having this snippet both here and in
// `Instance::fn_sig` (introduce `InstanceDef::fn_sig`?).
if let ty::InstanceDef::VtableShim(..) = instance {
// Modify fn(self, ...) to fn(self: *mut Self, ...)
let mut inputs_and_output = sig.inputs_and_output.to_vec();
let self_arg = &mut inputs_and_output[0];
debug_assert!(tcx.generics_of(def_id).has_self && *self_arg == tcx.types.self_param);
*self_arg = tcx.mk_mut_ptr(*self_arg);
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
}
let span = tcx.def_span(def_id);
debug!("build_call_shim: sig={:?}", sig);
let mut local_decls = local_decls_for_sig(&sig, span);
let source_info = SourceInfo::outermost(span);
let rcvr_place = || {
assert!(rcvr_adjustment.is_some());
Place::from(Local::new(1 + 0))
};
let mut statements = vec![];
let rcvr = rcvr_adjustment.map(|rcvr_adjustment| match rcvr_adjustment {
Adjustment::Identity => Operand::Move(rcvr_place()),
Adjustment::Deref => Operand::Move(tcx.mk_place_deref(rcvr_place())),
Adjustment::RefMut => {
// let rcvr = &mut rcvr;
let ref_rcvr = local_decls.push(
LocalDecl::new(
tcx.mk_ref(
tcx.lifetimes.re_erased,
ty::TypeAndMut { ty: sig.inputs()[0], mutbl: hir::Mutability::Mut },
),
span,
)
.immutable(),
);
let borrow_kind = BorrowKind::Mut { allow_two_phase_borrow: false };
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
Place::from(ref_rcvr),
Rvalue::Ref(tcx.lifetimes.re_erased, borrow_kind, rcvr_place()),
))),
});
Operand::Move(Place::from(ref_rcvr))
}
});
let (callee, mut args) = match call_kind {
// `FnPtr` call has no receiver. Args are untupled below.
CallKind::Indirect(_) => (rcvr.unwrap(), vec![]),
// `FnDef` call with optional receiver.
CallKind::Direct(def_id) => {
let ty = tcx.type_of(def_id);
(
Operand::Constant(Box::new(Constant {
span,
user_ty: None,
literal: ty::Const::zero_sized(tcx, ty).into(),
})),
rcvr.into_iter().collect::<Vec<_>>(),
)
}
};
let mut arg_range = 0..sig.inputs().len();
// Take the `self` ("receiver") argument out of the range (it's adjusted above).
if rcvr_adjustment.is_some() {
arg_range.start += 1;
}
// Take the last argument, if we need to untuple it (handled below).
if untuple_args.is_some() {
arg_range.end -= 1;
}
// Pass all of the non-special arguments directly.
args.extend(arg_range.map(|i| Operand::Move(Place::from(Local::new(1 + i)))));
// Untuple the last argument, if we have to.
if let Some(untuple_args) = untuple_args {
let tuple_arg = Local::new(1 + (sig.inputs().len() - 1));
args.extend(untuple_args.iter().enumerate().map(|(i, ity)| {
Operand::Move(tcx.mk_place_field(Place::from(tuple_arg), Field::new(i), *ity))
}));
}
let n_blocks = if let Some(Adjustment::RefMut) = rcvr_adjustment { 5 } else { 2 };
let mut blocks = IndexVec::with_capacity(n_blocks);
let block = |blocks: &mut IndexVec<_, _>, statements, kind, is_cleanup| {
blocks.push(BasicBlockData {
statements,
terminator: Some(Terminator { source_info, kind }),
is_cleanup,
})
};
// BB #0
block(
&mut blocks,
statements,
TerminatorKind::Call {
func: callee,
args,
destination: Some((Place::return_place(), BasicBlock::new(1))),
cleanup: if let Some(Adjustment::RefMut) = rcvr_adjustment {
Some(BasicBlock::new(3))
} else {
None
},
from_hir_call: true,
fn_span: span,
},
false,
);
if let Some(Adjustment::RefMut) = rcvr_adjustment {
// BB #1 - drop for Self
block(
&mut blocks,
vec![],
TerminatorKind::Drop { place: rcvr_place(), target: BasicBlock::new(2), unwind: None },
false,
);
}
// BB #1/#2 - return
block(&mut blocks, vec![], TerminatorKind::Return, false);
if let Some(Adjustment::RefMut) = rcvr_adjustment {
// BB #3 - drop if closure panics
block(
&mut blocks,
vec![],
TerminatorKind::Drop { place: rcvr_place(), target: BasicBlock::new(4), unwind: None },
true,
);
// BB #4 - resume
block(&mut blocks, vec![], TerminatorKind::Resume, true);
}
let mut body = new_body(
tcx,
MirSource::from_instance(instance),
blocks,
local_decls,
sig.inputs().len(),
span,
);
if let Abi::RustCall = sig.abi {
body.spread_arg = Some(Local::new(sig.inputs().len()));
}
body
}
pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> {
debug_assert!(tcx.is_constructor(ctor_id));
let span =
tcx.hir().span_if_local(ctor_id).unwrap_or_else(|| bug!("no span for ctor {:?}", ctor_id));
let param_env = tcx.param_env(ctor_id);
// Normalize the sig.
let sig = tcx.fn_sig(ctor_id).no_bound_vars().expect("LBR in ADT constructor signature");
let sig = tcx.normalize_erasing_regions(param_env, sig);
let (adt_def, substs) = match sig.output().kind() {
ty::Adt(adt_def, substs) => (adt_def, substs),
_ => bug!("unexpected type for ADT ctor {:?}", sig.output()),
};
debug!("build_ctor: ctor_id={:?} sig={:?}", ctor_id, sig);
let local_decls = local_decls_for_sig(&sig, span);
let source_info = SourceInfo::outermost(span);
let variant_index = if adt_def.is_enum() {
adt_def.variant_index_with_ctor_id(ctor_id)
} else {
VariantIdx::new(0)
};
// Generate the following MIR:
//
// (return as Variant).field0 = arg0;
// (return as Variant).field1 = arg1;
//
// return;
debug!("build_ctor: variant_index={:?}", variant_index);
let statements = expand_aggregate(
Place::return_place(),
adt_def.variants[variant_index].fields.iter().enumerate().map(|(idx, field_def)| {
(Operand::Move(Place::from(Local::new(idx + 1))), field_def.ty(tcx, substs))
}),
AggregateKind::Adt(adt_def, variant_index, substs, None, None),
source_info,
tcx,
)
.collect();
let start_block = BasicBlockData {
statements,
terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }),
is_cleanup: false,
};
let source = MirSource::item(ctor_id);
let body = new_body(
tcx,
source,
IndexVec::from_elem_n(start_block, 1),
local_decls,
sig.inputs().len(),
span,
);
rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
body
}<|fim▁end|> | |
<|file_name|>SpecialTautiMonsters.java<|end_file_name|><|fim▁begin|>package l2s.gameserver.ai;
import l2s.gameserver.model.Creature;
import l2s.gameserver.model.instances.NpcInstance;
import l2s.gameserver.model.instances.MonsterInstance;
/**
* @author Bonux
**/
public class SpecialTautiMonsters extends Fighter
{
public SpecialTautiMonsters(NpcInstance actor)
{
super(actor);
}
@Override
protected void onEvtAttacked(Creature attacker, int damage)
{
NpcInstance actor = getActor();
if(!canAttack(actor.getNpcId(), attacker))
return;
super.onEvtAttacked(attacker, damage);
}
@Override
public boolean canAttackCharacter(Creature target)
{
NpcInstance actor = getActor();
return canAttack(actor.getNpcId(), target);
}
@Override
public int getMaxAttackTimeout()
{
return 0;
}
private boolean canAttack(int selfId, Creature target)
{
if(selfId == 33680 || selfId == 33679) //peace
{
if(target.isPlayable())
return false;
else
return target.isMonster();
}
else //not peace
{
if(target.isMonster())
{
MonsterInstance monster = (MonsterInstance) target;
if(monster.getNpcId() == 19262 || monster.getNpcId() == 19263 || monster.getNpcId() == 19264 || monster.getNpcId() == 19265 || monster.getNpcId() == 19266)
return false;
else
return true;
}
else
return !target.isNpc();
} <|fim▁hole|><|fim▁end|> |
}
} |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# sanpera documentation build configuration file, created by
# sphinx-quickstart2 on Sat May 12 21:24:07 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------<|fim▁hole|>
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sanpera'
copyright = u'2012, Eevee'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sanperadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sanpera.tex', u'sanpera Documentation',
u'Eevee', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sanpera', u'sanpera Documentation',
[u'Eevee'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sanpera', u'sanpera Documentation',
u'Eevee', 'sanpera', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'<|fim▁end|> | |
<|file_name|>posix_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""File used to unit test the pacifica archive interface."""
import unittest
import os
from stat import ST_MODE
from six import PY2
from pacifica.archiveinterface.archive_utils import bytes_type
from pacifica.archiveinterface.backends.posix.archive import PosixBackendArchive
import pacifica.archiveinterface.config as pa_config
from .common_setup_test import SetupTearDown
class TestPosixBackendArchive(unittest.TestCase, SetupTearDown):
"""Test the Posix backend archive."""
def test_posix_backend_create(self):
"""Test creating a posix backend."""
backend = PosixBackendArchive('/tmp')
self.assertTrue(isinstance(backend, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._prefix, '/tmp')
# pylint: enable=protected-access
def test_posix_backend_open(self):
"""Test opening a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
self.assertTrue(isinstance(my_file, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_stage(self):
"""Test staging a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
my_file.stage()
# pylint: disable=protected-access
self.assertTrue(my_file._file._staged)
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_open_twice(self):
"""Test opening a file from posix backend twice."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
my_file = backend.open(filepath, mode)
self.assertTrue(isinstance(my_file, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_open_id2f(self):
"""Test opening a file from posix backend twice."""
backend = PosixBackendArchive('/tmp')
mode = 'w'
my_file = backend.open('/a/b/d', mode)
temp_cfg_file = pa_config.CONFIG_FILE
pa_config.CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_configs', 'posix-id2filename.cfg')
backend = PosixBackendArchive('/tmp')
my_file = backend.open(12345, mode)
my_file.write('this is file 12345')
my_file.close()
# pylint: disable=protected-access
my_file.patch(123456789, '/tmp{}'.format(my_file._id2filename(12345)))
# pylint: enable=protected-access
my_file = backend.open(123456789, 'r')
text = my_file.read(-1)
pa_config.CONFIG_FILE = temp_cfg_file
self.assertTrue(isinstance(my_file, PosixBackendArchive))
self.assertEqual(bytes_type('this is file 12345'), text)
my_file.close()
def test_posix_backend_close(self):
"""Test closing a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
my_file.close()
self.assertEqual(backend._file, None)
# pylint: enable=protected-access
def test_posix_backend_write(self):
"""Test writing a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
error = my_file.write('i am a test string')
if PY2:
self.assertEqual(error, None)
else:
self.assertEqual(error, 18)
my_file.close()
def test_posix_file_mod_time(self):
"""Test the correct setting of a file mod time."""<|fim▁hole|> my_file.close()
my_file.set_mod_time(1000000)
my_file = backend.open(filepath, 'r')
status = my_file.status()
my_file.close()
self.assertEqual(status.mtime, 1000000)
def test_posix_file_permissions(self):
"""Test the correct setting of a file mod time."""
filepath = '12345'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
my_file.close()
my_file.set_file_permissions()
statinfo = oct(os.stat('/tmp/12345')[ST_MODE])[-3:]
self.assertEqual(statinfo, '444')
def test_posix_backend_read(self):
"""Test reading a file from posix backend."""
self.test_posix_backend_write()
filepath = '1234'
mode = 'r'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
buf = my_file.read(-1)
self.assertEqual(buf, bytes_type('i am a test string'))
my_file.close()
def test_patch(self):
"""Test patching file."""
old_path = '/tmp/1234'
backend = PosixBackendArchive('/tmp')
my_file = backend.open('1234', 'w')
my_file.close()
backend.patch('5678', '/tmp/1234')
# Error would be thrown on patch so nothing to assert
self.assertEqual(old_path, '/tmp/1234')
def test_seek(self):
"""Test patching file."""
backend = PosixBackendArchive('/tmp')
my_file = backend.open('1234', 'w')
my_file.write('something')
my_file.close()
my_file = backend.open('1234', 'r')
my_file.seek(4)
data = my_file.read(-1).decode('utf8')
self.assertEqual(data, 'thing')<|fim▁end|> | filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode) |
<|file_name|>fullscreen.rs<|end_file_name|><|fim▁begin|>#[cfg(target_os = "android")]
#[macro_use]
extern crate android_glue;
extern crate glutin;
use std::io;
mod support;
#[cfg(target_os = "android")]
android_start!(main);
#[cfg(not(feature = "window"))]
fn main() { println!("This example requires glutin to be compiled with the `window` feature"); }
#[cfg(feature = "window")]
fn main() {
// enumerating monitors
let monitor = {
for (num, monitor) in glutin::get_available_monitors().enumerate() {
println!("Monitor #{}: {:?}", num, monitor.get_name());
}
print!("Please write the number of the monitor to use: ");
let mut num = String::new();
io::stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let monitor = glutin::get_available_monitors().nth(num).expect("Please enter a valid ID");
println!("Using {:?}", monitor.get_name());<|fim▁hole|>
let window = glutin::WindowBuilder::new()
.with_title("Hello world!".to_string())
.with_fullscreen(monitor)
.build()
.unwrap();
unsafe { window.make_current() };
let context = support::load(&window);
for event in window.wait_events() {
context.draw_frame((0.0, 1.0, 0.0, 1.0));
window.swap_buffers();
println!("{:?}", event);
match event {
glutin::Event::Closed => break,
_ => ()
}
}
}<|fim▁end|> |
monitor
}; |
<|file_name|>llvm_pr32379.rs<|end_file_name|><|fim▁begin|>pub fn pr32379(mut data: u64, f1: bool, f2: bool) -> u64 {<|fim▁hole|> if f2 { data |= 2; }
data
}<|fim▁end|> | if f1 { data &= !2; } |
<|file_name|>iconres.rs<|end_file_name|><|fim▁begin|>#include <windows.h><|fim▁hole|><|fim▁end|> | 1234 ICON "icon.ico" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.