prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>run_raw_file_io.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Vispek Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==========================================================================
""" Example code about how to run raw_file_io
python3 -m vispek.examples.run_raw_file_io \
--in_path /Users/huaminli/Downloads/data \
--out_path /Users/huaminli/Desktop/vispek/data
"""
import argparse
from vispek.lib.io.raw_file_io import RawFileIO
def run_file_io(args):
my_file_io = RawFileIO(args.in_path, args.out_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Example code about how tun run raw_file_io')
parser.add_argument(
'--in_path', type=str,
help='absolute path to the directories that contains raw csv files')
parser.add_argument(
'--out_path', type=str,
help='absolute path to the directories that contains ' +
'preproceed files')
args = parser.parse_args()
print(args.in_path)
print(args.out_path)
<|fim▁hole|><|fim▁end|>
|
run_file_io(args)
|
<|file_name|>BoatWithSupport.cpp<|end_file_name|><|fim▁begin|>/*
* boatWithSupport.cpp
*
* Created on: 16 de Abr de 2013
* Author: Windows
*/
#include "BoatWithSupport.h"
BoatWithSupport::BoatWithSupport(int extraCapacity) :
Boat() {
extraCap = extraCapacity;
lastMaxCap = 0;
lastTransported = 0;
}
<|fim▁hole|> lastTransported = 0;
}
int BoatWithSupport::getExtraCapacity() {
return extraCap;
}
int BoatWithSupport::getMaxCapacity() {
return this->maxCapacity + this->extraCap;
}
BoatWithSupport::~BoatWithSupport() {
// TODO Auto-generated destructor stub
}
void BoatWithSupport::reset() {
this->transportedQuantity = lastTransported;
this->maxCapacity = lastMaxCap;
}
void BoatWithSupport::resize() {
this->lastMaxCap = this->maxCapacity;
this->maxCapacity = this->extraCap;
lastTransported = this->transportedQuantity;
this->transportedQuantity = this->extraCap;
}<|fim▁end|>
|
BoatWithSupport::BoatWithSupport(int capacity, int extraCapacity) :
Boat(capacity) {
extraCap = extraCapacity;
lastMaxCap = 0;
|
<|file_name|>argument-passing.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast<|fim▁hole|> x: int
}
fn f1(a: &mut X, b: &mut int, c: int) -> int {
let r = a.x + *b + c;
a.x = 0;
*b = 10;
return r;
}
fn f2(a: int, f: &fn(int)) -> int { f(1); return a; }
pub fn main() {
let mut a = X {x: 1};
let mut b = 2;
let mut c = 3;
assert_eq!(f1(&mut a, &mut b, c), 6);
assert_eq!(a.x, 0);
assert_eq!(b, 10);
assert_eq!(f2(a.x, |x| a.x = 50), 0);
assert_eq!(a.x, 50);
}<|fim▁end|>
|
struct X {
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Django settings for eproweb project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import django.conf.global_settings as DEFAULT_SETTINGS
from django.contrib.messages import constants as message
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'REPLACED_IN_LOCAL_SETTINGS_FILE'
# SECURITY WARNING: don't run with debug turned on in production!<|fim▁hole|>
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'rest_framework',
'rest_framework_swagger',
'crispy_forms',
'djangocosign',
'epro',
'feedback',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'eproweb.middleware.AjaxMessaging',
'eproweb.middleware.TimingMiddleware',
)
ROOT_URLCONF = 'eproweb.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR + "/templates/",],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'eproweb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Where to get redirected after logging in.
LOGIN_REDIRECT_URL = '/'
#https://docs.djangoproject.com/en/1.9/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = [os.path.join(BASE_DIR, 'fixtures'),]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# STATIC_ROOT = BASE_DIR + "/static/"
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
CRISPY_TEMPLATE_PACK='bootstrap3'
### This is to map Django message levels to Boostrap3 alert levels ########
MESSAGE_TAGS = {message.DEBUG: 'debug',
message.INFO: 'info',
message.SUCCESS: 'success',
message.WARNING: 'warning',
message.ERROR: 'danger',}
# Using the Cosgin Authentication backend first.
AUTHENTICATION_BACKENDS = (
'djangocosign.cosign.CosignBackend',
'django.contrib.auth.backends.ModelBackend',
)
# Email setup
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
# formatting options reference: https://docs.python.org/2/library/logging.html#formatter-objects
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(name)s %(funcName)s %(lineno)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
'file': {
'class': 'logging.FileHandler',
'filename': "/var/log/httpd/eproweb_app.log",
'formatter': 'verbose',
},
'mail_admins': {
'class': 'django.utils.log.AdminEmailHandler',
},
},
'loggers': {
'django': {
'handlers': ['console', 'file'],
'level': 'INFO',
'propagate': True,
},
'epro': {
'handlers': ['file'],
'level': 'WARNING',
'propagate': True,
},
'feedback': {
'handlers': ['file'],
'level': 'WARNING',
'propagate': True,
},
'app_admins': {
'handlers': ['file', 'mail_admins',],
'level': 'ERROR',
'propagate': True,
'include_html': True,
},
},
}<|fim▁end|>
|
DEBUG = True
ALLOWED_HOSTS = ["localhost", ".mercycorps.org"]
|
<|file_name|>update_find_and_modify_id.js<|end_file_name|><|fim▁begin|>// SERVER-4516 and SERVER-6913: test that update and findAndModify tolerate
// an _id in the update document, as long as the _id will not be modified
var t = db.jstests_server4516;
var startingDoc = {_id: 1, a: 1};
function prepare() {
t.drop();
t.save(startingDoc);
}
function update_succeeds(updateDoc, qid, resultDoc) {
prepare();
t.update({_id: qid}, updateDoc, true);
assert.eq(t.findOne({_id: qid}), resultDoc);
prepare();
t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true});<|fim▁hole|>}
update_succeeds({_id: 1, a: 2}, 1, {_id: 1, a: 2});
update_succeeds({$set: {_id: 1}}, 1, {_id: 1, a: 1});
update_succeeds({_id: 1, b: "a"}, 1, {_id: 1, b: "a"});
update_succeeds({_id: 2, a: 3}, 2, {_id: 2, a: 3});
function update_fails(updateDoc, qid) {
prepare();
var res = t.update({_id: qid}, updateDoc, true);
assert.writeError(res);
assert.eq(t.count(), 1);
assert.eq(t.findOne(), startingDoc);
prepare();
assert.throws(function() {
t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true});
});
assert.eq(t.count(), 1);
assert.eq(t.findOne(), startingDoc);
}
update_fails({$set: {_id: 2}}, 1);
update_fails({_id: 2, a: 3}, 1);
update_fails({_id: 2, a: 3}, 3);<|fim▁end|>
|
assert.eq(t.findOne({_id: qid}), resultDoc);
|
<|file_name|>blast2matrix.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
"""Parses multiple blast files (for the same target) and print table
combining best matches to all target sequences from all query file.
Author:
[email protected]
Barcelona, 24/05/2012
"""
import os, sys
from optparse import OptionParser
from datetime import datetime
from genome_annotation import get_contig2size,parse_blast
def main():
usage = "usage: %prog [options] blastout1 [blastout2 ... blastoutN] [ > out ]"
desc = """Blast has to be run with -m8."""
epilog = ""
parser = OptionParser( usage=usage,version="%prog 1.0",description=desc,epilog=epilog )
parser.add_option("-k", dest="target", default="",
help="target fasta")
parser.add_option("-e", dest="evalue", default=1e-05, type=float,
help="E-value cut-off [%default]" )
parser.add_option("-q", dest="qcov", default=0, type=float,
help="query coverage [%default]")
parser.add_option("-t", dest="tcov", default=0, type=float,
help="target coverage [%default]")
parser.add_option("-s", dest="fnsplit", default=True, action="store_false",
help="split fnames [%default]")
parser.add_option("-v", dest="verbose", default=False, action="store_true" )
( o, fnames ) = parser.parse_args()
if o.verbose:
sys.stderr.write( "Options: %s\nArgs: %s\n" % ( o,fnames ) )
#check files
for fn in fnames + [ o.target, ]:
if not fn:
parser.error( "Provide input file!" )
if not os.path.isfile( fn ):
parser.error( "No such file: %s" % fn )
#get sizes of targets
t2len = get_contig2size( o.target )
#dict to store matches and list of targets
s2matches = []
targets = sorted( t2len.keys() )
#process all files
samples = []
for fn in fnames:
#define sample name
s = fn<|fim▁hole|> samples.append( s )
#define empty matches
smatch = []
for i in range( len(targets) ):
smatch.append( [] )
#get sizes of queries
q2len = {}#get_contig2size( fn )
#get significant matches
matches = parse_blast( fn,q2len,t2len,o.evalue,o.qcov,o.tcov,o.verbose )
#parse matches
for qlocus,tlocus,identity,algLen,mismatches,gaps,qstart,qend,tstart,tend,e,score,qcov,tcov in matches:
i = targets.index( tlocus )
#add match info if not match for given target
if not smatch[i]:
smatch[i] = ( qlocus,e,score,identity,tcov )
#or better match found
elif score > smatch[i][2]:
smatch[i] = ( qlocus,e,score,identity,tcov )
#store matches
s2matches.append( smatch )
#write header
header = "Target"
for s in samples:
header += "\t%s\t" % s
print header
print "\t" + "identity [%]\tcoverage [%]\t" * len(samples)
#write data
for i in range( len(targets) ):
line = targets[i]
for smatch in s2matches:
if smatch[i]:
qlocus,e,score,identity,tcov = smatch[i]
else:
identity=tcov=0
line += "\t%6.2f\t%6.2f" % ( identity,tcov*100 )
print line
if __name__=='__main__':
t0=datetime.now()
main()
dt=datetime.now()-t0
sys.stderr.write( "#Time elapsed: %s\n" % dt )<|fim▁end|>
|
#split by dot if requested
if o.fnsplit:
s = fn.split(".")[0]
|
<|file_name|>production.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'<|fim▁end|>
|
from .base import *
|
<|file_name|>linux.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import logging
import datetime
import re
import dateutil.parser
from lib.cuckoo.common.abstracts import BehaviorHandler
log = logging.getLogger(__name__)
class FilteredProcessLog(list):
def __init__(self, eventstream, **kwfilters):
self.eventstream = eventstream
self.kwfilters = kwfilters
def __iter__(self):
for event in self.eventstream:<|fim▁hole|> continue
del event["type"]
yield event
def __nonzero__(self):
return True
class LinuxSystemTap(BehaviorHandler):
"""Parses systemtap generated plaintext logs (see data/strace.stp)."""
key = "processes"
def __init__(self, *args, **kwargs):
super(LinuxSystemTap, self).__init__(*args, **kwargs)
self.processes = []
self.pids_seen = set()
self.forkmap = {}
self.matched = False
self._check_for_probelkm()
def _check_for_probelkm(self):
path_lkm = os.path.join(self.analysis.logs_path, "all.lkm")
if os.path.exists(path_lkm):
lines = open(path_lkm).readlines()
forks = [re.findall("task (\d+)@0x[0-9a-f]+ forked to (\d+)@0x[0-9a-f]+", line) for line in lines]
self.forkmap = dict((j, i) for i, j in reduce(lambda x, y: x+y, forks, []))
# self.results["source"].append("probelkm")
def handles_path(self, path):
if path.endswith(".stap"):
self.matched = True
return True
def parse(self, path):
parser = StapParser(open(path))
for event in parser:
pid = event["pid"]
if pid not in self.pids_seen:
self.pids_seen.add(pid)
ppid = self.forkmap.get(pid, -1)
process = {
"pid": pid,
"ppid": ppid,
"process_name": event["process_name"],
"first_seen": event["time"],
}
# create a process event as we don't have those with linux+systemtap
pevent = dict(process)
pevent["type"] = "process"
yield pevent
process["calls"] = FilteredProcessLog(parser, pid=pid)
self.processes.append(process)
yield event
def run(self):
if not self.matched:
return
self.processes.sort(key=lambda process: process["first_seen"])
return self.processes
class StapParser(object):
"""Handle .stap logs from the Linux analyzer."""
def __init__(self, fd):
self.fd = fd
def __iter__(self):
self.fd.seek(0)
for line in self.fd:
# 'Thu May 7 14:58:43 2015.390178 python@7f798cb95240[2114] close(6) = 0\n'
# datetime is 31 characters
datetimepart, rest = line[:31], line[32:]
# incredibly sophisticated date time handling
dtms = datetime.timedelta(0, 0, int(datetimepart.split(".", 1)[1]))
dt = dateutil.parser.parse(datetimepart.split(".", 1)[0]) + dtms
parts = re.match("^(.+)@([a-f0-9]+)\[(\d+)\] (\w+)\((.*)\) = (\S+){0,1}\s{0,1}(\(\w+\)){0,1}$", rest)
if not parts:
log.warning("Could not parse syscall trace line: %s", line)
continue
pname, ip, pid, fn, arguments, retval, ecode = parts.groups()
argsplit = arguments.split(", ")
arguments = dict(("p%u" % pos, argsplit[pos]) for pos in range(len(argsplit)))
pid = int(pid) if pid.isdigit() else -1
yield {
"time": dt, "process_name": pname, "pid": pid,
"instruction_pointer": ip, "api": fn, "arguments": arguments,
"return_value": retval, "status": ecode,
"type": "apicall", "raw": line,
}<|fim▁end|>
|
for k, v in self.kwfilters.items():
if event[k] != v:
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import logging
try:
from configparser import ConfigParser
except ImportError:<|fim▁hole|>
logger = logging.getLogger("packges.knightos.org")
logger.setLevel(logging.DEBUG)
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
sh.setFormatter(formatter)
logger.addHandler(sh)
# scss logger
logging.getLogger("scss").addHandler(sh)
config = ConfigParser()
config.readfp(open('config.ini'))
env = 'dev'
_cfg = lambda k: config.get(env, k)
_cfgi = lambda k: int(_cfg(k))<|fim▁end|>
|
# Python 2 support
from ConfigParser import ConfigParser
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>/*******************************************************************************
Add to .git/hooks/pre-commit (and chmod +x) to enable auto-linting/uglifying:
#!/bin/sh
grunt build
if [ $? -ne 0 ]; then
exit 1
fi
git add deflector.min.js
exit 0
*******************************************************************************/
module.exports = function(grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
connect: {
all: { options: { base: '.', port: 9999 }}
},
jshint: {
all: ['deflector.js', 'deflector.test.js', 'Gruntfile.js']
},
qunit: {
all: ['index.html']
},
'saucelabs-qunit': {
all: {<|fim▁hole|> tags: ['master'],
urls: ['http://127.0.0.1:9999/'],
build: process.env.TRAVIS_JOB_ID,
browsers: [
{ browserName: "internet explorer", version: "11" },
{ browserName: "android", version: "4.4" },
{ browserName: "iphone", version: "7.1" }
],
tunnelTimeout: 5,
concurrency: 3
}
}
},
uglify: {
all: { files: { 'deflector.min.js': 'deflector.js' }}
},
watch: {
all: {
files: ['deflector.js', 'deflector.test.js'],
tasks: ['build']
}
}
});
for (var key in grunt.file.readJSON('package.json').devDependencies) {
if (key !== 'grunt' && key.indexOf('grunt') === 0) {
grunt.loadNpmTasks(key);
}
}
grunt.registerTask('build', ['jshint', 'uglify', 'qunit']);
grunt.registerTask('test', ['build', 'connect', 'saucelabs-qunit']);
grunt.registerTask('default', ['build', 'connect', 'watch']);
};<|fim▁end|>
|
options: {
testname: '<%= pkg.name %> tests',
|
<|file_name|>kmz_test.go<|end_file_name|><|fim▁begin|>package kmz
import (
"bytes"
"log"
"github.com/twpayne/go-kml"
)
func ExampleNewKMZ() {
kmz := NewKMZ(<|fim▁hole|> kml.Name("Simple placemark"),
kml.Description("Attached to the ground. Intelligently places itself at the height of the underlying terrain."),
kml.Point(
kml.Coordinates(kml.Coordinate{Lon: -122.0822035425683, Lat: 37.42228990140251}),
),
),
)
w := &bytes.Buffer{}
if err := kmz.WriteIndent(w, "", "\t"); err != nil {
log.Fatal(err)
}
}<|fim▁end|>
|
kml.Placemark(
|
<|file_name|>runner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import argparse
import logging
import json
import subprocess
import sys
import os.path
import urllib2
from base64 import b64decode
from distutils.dir_util import mkpath
from tempfile import TemporaryFile
from shutil import copyfileobj
from urlparse import urlparse
from urllib2 import urlopen
from StringIO import StringIO
from boto import connect_s3
from boto.kms.layer1 import KMSConnection
def download_to_file(s3, src, f):
logging.debug('download_to_file: %s -> %s', src, f)
url = urlparse(src)
if url.scheme == 's3':
bucket = s3.get_bucket(url.netloc, validate=False)
key = bucket.get_key(url.path, validate=False)
key.get_contents_to_file(f)
else:
response = urlopen(src)
copyfileobj(response, f, 16 * 1024)
def download_to_filename(s3, src, dst, mode=None):
dirname, os.path.dirname = os.path.split(dst)
mkpath(dirname)
with open(dst, 'wb') as f:
download_to_file(s3, src, f)
if mode is not None:
os.chmod(dst, mode)
def download_to_string(s3, src):
logging.debug('download_to_string: %s', src)
f = StringIO()
download_to_file(s3, src, f)
s = f.getvalue()
logging.debug('download_to_string: %s: %s', src, s)
f.close()
return s
def download(s3, src=None, dst=None, mode=None):
assert src and dst
logging.info('download: %s -> %s', src, dst)
download_to_filename(s3, src, dst, mode=mode)
def process_parameter(kms, parameter):
logging.debug('process_parameter: %s', parameter)
t = parameter['type']
value = parameter['value']
if t == 'plain':
return value
elif t == 'kms_encrypted':
decrypted = kms.decrypt(value.decode('base64'))
return decrypted['Plaintext']
else:
raise Exception("Unexpected parameter type: '%s'" % (t, ))
def debug_parameter(kms, parameter):
t = parameter['type']
if t == 'plain':
return parameter['value']
elif t == 'kms_encrypted':
return '***'
else:
return '<unknown:%s>' % (t, )
def process_env(kms, parameters):
return dict((key, process_parameter(kms, parameter)) for key, parameter in parameters.iteritems())
def main():
parser = argparse.ArgumentParser()
parser.add_argument('config_uri')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--aws-credentials', type=file)
args = parser.parse_args()
logging.basicConfig(
format='%(asctime)s %(levelname)s %(message)s',
level=(logging.DEBUG if args.verbose else logging.INFO))
aws_credentials = {}
if args.aws_credentials:
aws_credentials = json.loads(args.aws_credentials.read())
s3 = connect_s3(host='s3.amazonaws.com', **aws_credentials)
# Fetch config json
config_filename = os.path.basename(args.config_uri)
download_to_filename(s3, args.config_uri, config_filename)
with open(config_filename, 'rb') as f:
config = json.load(f)
# Compile environment variables
env_parameters = {}
kms = KMSConnection(**aws_credentials)
env_parameters = process_env(kms, config.get('env', {}))
# Create working directory
working_directory = config['working_directory']
mkpath(working_directory)
# Download staging files
for item in config.get('download', []):
download(s3, **item)
# Execute command
env = dict(os.environ)
env.update(env_parameters)
debug_command = [debug_parameter(kms, parameter) for parameter in config['command']]
command = [process_parameter(kms, parameter) for parameter in config['command']]
logging.info('executing command: %s', debug_command)
logging.debug('Popen: command=%s, env=%s', command, env)
process = subprocess.Popen(command, env=env, cwd=working_directory)<|fim▁hole|> status = main()
sys.exit(status)<|fim▁end|>
|
return process.wait()
if __name__ == '__main__':
|
<|file_name|>struct_zone.go<|end_file_name|><|fim▁begin|>package hsm
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// Zone is a nested struct in hsm response
type Zone struct {
ZoneId string `json:"ZoneId" xml:"ZoneId"`
}<|fim▁end|>
|
//See the License for the specific language governing permissions and
//limitations under the License.
//
|
<|file_name|>bitcoin_gu_IN.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="gu_IN" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About TLGCoin</source>
<translation>બીટકોઈન વિષે</translation>
</message>
<message>
<location line="+39"/>
<source><b>TLGCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The TLGCoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your TLGCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a TLGCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified TLGCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your TLGCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR TLGCOINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>TLGCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your tlgcoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about TLGCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a TLGCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for TLGCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>TLGCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About TLGCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your TLGCoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified TLGCoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>TLGCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to TLGCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid TLGCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. TLGCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid TLGCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>TLGCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start TLGCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start TLGCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the TLGCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the TLGCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting TLGCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show TLGCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting TLGCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the TLGCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start tlgcoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the TLGCoin-Qt help message to get a list with possible TLGCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>TLGCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>TLGCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the TLGCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the TLGCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a TLGCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this TLGCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified TLGCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a TLGCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter TLGCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The TLGCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 20 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>TLGCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or tlgcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: tlgcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: tlgcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 23069 or testnet: 33069)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 23070 or testnet: 33070)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=tlgcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "TLGCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. TLGCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong TLGCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/><|fim▁hole|> </message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the TLGCoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of TLGCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart TLGCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. TLGCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
<source>Failed to write block</source>
<translation type="unfinished"/>
|
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>import sys
import time
import os.path
from collections import Counter
from vial import vfunc, vim, dref
from vial.utils import redraw, focus_window
from vial.widgets import make_scratch
collector = None
def get_collector():
global collector
if not collector:
collector = ResultCollector()
return collector
def run_test(project_dir, executable=None, match=None, files=None, env=None):
from subprocess import Popen
from multiprocessing.connection import Client, arbitrary_address
addr = arbitrary_address('AF_UNIX')
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pt.py')
executable = executable or sys.executable
args = [executable, filename, addr, '-q']
if match:
args.append('-k %s' % match)
environ = None
if env:
environ = os.environ.copy()
environ.update(env)
log = open('/tmp/vial-pytest.log', 'w')
if files:
args.extend(files)
proc = Popen(args, cwd=project_dir, env=environ, stdout=log, stderr=log, close_fds=True)
start = time.time()
while not os.path.exists(addr):
if time.time() - start > 5:
raise Exception('py.test launching timeout exceed')
time.sleep(0.01)
conn = Client(addr)
return proc, conn
def indent(width, lines):
return [' ' * width + r for r in lines]
@dref
def goto_file():
filename, line = vfunc.expand('<cWORD>').split(':')[:2]
for win in vim.windows:
if vfunc.buflisted(win.buffer.number):
focus_window(win)
vim.command('e +{} {}'.format(line, filename))
class ResultCollector(object):
def init(self, win, buf):
vim.command('setlocal syntax=vialpytest')
vim.command('nnoremap <buffer> gf :python {}()<cr>'.format(goto_file.ref))
def reset(self):
cwin = vim.current.window
_, self.buf = make_scratch('__vial_pytest__', self.init, 'pytest')
vim.command('normal! ggdG')
focus_window(cwin)
redraw()
def add_test_result(self, rtype, name, result):
self.counts[rtype] += 1
lines = ['{} {}'.format(name, rtype)]
trace, out = result
for k, v in out:
lines.append(' ----======= {} =======----'.format(k))
lines.extend(indent(1, v.splitlines()))
lines.append('')
if trace:
lines.extend(indent(1, trace.splitlines()))
lines.append('')
lines.append('')
buflen = len(self.buf)
self.buf[buflen-1:] = lines
redraw()
def collect(self, conn):
self.tests = []
self.counts = Counter()
self.reset()
while True:
msg = conn.recv()
cmd = msg[0]
if cmd == 'END':
return
elif cmd == 'COLLECTED_TESTS':
self.tests[:] = cmd[1]
elif cmd in ('PASS', 'ERROR', 'FAIL', 'SKIP', 'FAILED_COLLECT'):
self.add_test_result(*msg)
def run(*args):<|fim▁hole|> project = os.getcwd()
files = None
if args:
files = [vfunc.expand(r) for r in args]
try:
f = vfunc.VialPythonGetExecutable
except vim.error:
executable = None
else:
executable = f()
proc, conn = run_test(project, files=files, executable=executable)
get_collector().collect(conn)<|fim▁end|>
| |
<|file_name|>_quicktable.py<|end_file_name|><|fim▁begin|>#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2008 Donald N. Allingham
# Copyright (C) 2009 Douglas S. Blank
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Provide a simplified table creation interface
"""
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
import pickle
#-------------------------------------------------------------------------
#
# GNOME modules
#
#-------------------------------------------------------------------------
from gi.repository import Gdk
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.simple import SimpleTable
from gramps.gen.errors import WindowActiveError
from ...utils import model_to_text, text_to_clipboard
from ...widgets.multitreeview import MultiTreeView
from ...ddtargets import DdTargets
from ..quick import run_quick_report_by_name
from ...editors import (EditPerson, EditEvent, EditFamily, EditCitation,
EditSource, EditPlace, EditRepository, EditNote,
EditMedia)
#-------------------------------------------------------------------------
#
# QuickTable class
#
#-------------------------------------------------------------------------
class QuickTable(SimpleTable):
"""
Provide a simplified table creation interface.
"""
def set_callback(self, which, callback):
"""
Override (or add) a function for click/double-click
"""
if which == "leftclick":
self._callback_leftclick = callback
elif which == "leftdouble":
self._callback_leftdouble = callback
def button_press_event(self, treeview, event):
wid = treeview.get_toplevel()
try:
winmgr = self.simpledoc.doc.uistate.gwm
self.track = winmgr.get_item_from_window(wid).track
except:
self.track = []
index = None
button_code = None
event_time = None
func = None
if type(event) == bool: # enter
button_code = 3
event_time = 0
selection = treeview.get_selection()
store, paths = selection.get_selected_rows()
tpath = paths[0] if len(paths) > 0 else None
node = store.get_iter(tpath) if tpath else None
if node:
treeview.grab_focus()
index = store.get_value(node, 0)
# FIXME: make popup come where cursor is
#rectangle = treeview.get_visible_rect()
#column = treeview.get_column(0)
#rectangle = treeview.get_cell_area("0:0",
#x, y = rectangle.x, rectangle.y
#func = lambda menu: (x, y, True)
elif event.button == 3:
button_code = 3
event_time = event.time
x = int(event.x)
y = int(event.y)
path_info = treeview.get_path_at_pos(x, y)
func = None
if path_info is not None:
path, col, cellx, celly = path_info
selection = treeview.get_selection()
store, paths = selection.get_selected_rows()
tpath = paths[0] if len(paths) > 0 else None
node = store.get_iter(tpath) if tpath else None
if path:
treeview.grab_focus()
treeview.set_cursor(path, col, 0)
if store and node:
index = store.get_value(node, 0) # index Below,
# you need index, treeview, path, button_code,
# func, and event_time
if index is not None:
if self._link[index]:
objclass, handle = self._link[index]
else:
return False
if (self.simpledoc.doc.uistate.get_export_mode() and
objclass != 'Filter'):
return False # avoid edition during export
self.popup = Gtk.Menu()
popup = self.popup
menu_item = Gtk.MenuItem(label=_("Copy all"))
menu_item.connect("activate", lambda widget: text_to_clipboard(
model_to_text(treeview.get_model())))
popup.append(menu_item)
menu_item.show()
# Now add more items to popup menu, if available
# See details (edit, etc):
menu_item = Gtk.MenuItem(label=_("the object|See %s details") %
glocale.trans_objclass(objclass))
menu_item.connect(
"activate", lambda widget: self.on_table_doubleclick(treeview))
popup.append(menu_item)
menu_item.show()
# Add other items to menu:
if objclass == 'Person':
menu_item = Gtk.MenuItem(label=_("the object|Make %s active")
% glocale.trans_objclass('Person'))
menu_item.connect("activate",
lambda widget: self.on_table_click(treeview))
popup.append(menu_item)
menu_item.show()
if (self.simpledoc.doc.dbstate.db !=
self.simpledoc.doc.dbstate.db.basedb):
if (objclass == 'Filter' and
handle[0] in ['Person', 'Family', 'Place', 'Event',
'Repository', 'Note', 'Media',
'Citation', 'Source']):
menu_item = Gtk.MenuItem(label=_("See data not in Filter"))
menu_item.connect(
"activate",
lambda widget: self.show_not_in_filter(handle[0]))
popup.append(menu_item)
menu_item.show()
# Show the popup menu:
popup.popup(None, None, func, None, button_code, event_time)
return True
return False
def show_not_in_filter(self, obj_class):
run_quick_report_by_name(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate,
'filterbyname',
'Inverse %s' % obj_class,
track=self.track)
def on_table_doubleclick(self, obj):
"""
Handle events on tables. obj is a treeview
"""
selection = obj.get_selection()
store, paths = selection.get_selected_rows()
tpath = paths[0] if len(paths) > 0 else None
node = store.get_iter(tpath) if tpath else None
if not node:
return
index = store.get_value(node, 0) # index
if self._callback_leftdouble:
self._callback_leftdouble(store.get_value(node, 1))
return True
elif self._link[index]:
objclass, handle = self._link[index]
if isinstance(handle, list):
handle = handle[0]
if objclass == 'Person':
person = self.access.dbase.get_person_from_handle(handle)
if person:
try:
EditPerson(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], person)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Event':
event = self.access.dbase.get_event_from_handle(handle)
if event:
try:
EditEvent(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], event)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Family':
ref = self.access.dbase.get_family_from_handle(handle)
if ref:
try:
EditFamily(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Citation':
ref = self.access.dbase.get_citation_from_handle(handle)
if ref:
try:
EditCitation(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Source':
ref = self.access.dbase.get_source_from_handle(handle)
if ref:
try:
EditSource(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Place':
ref = self.access.dbase.get_place_from_handle(handle)
if ref:
try:
EditPlace(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Repository':
ref = self.access.dbase.get_repository_from_handle(handle)
if ref:
try:
EditRepository(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Note':
ref = self.access.dbase.get_note_from_handle(handle)
if ref:
try:
EditNote(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'Media':
ref = self.access.dbase.get_media_from_handle(handle)
if ref:
try:
EditMedia(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate, [], ref)
return True # handled event
except WindowActiveError:
pass
elif objclass == 'PersonList':
run_quick_report_by_name(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate,
'filterbyname',
'list of people',
handle=handle,
track=self.track)
elif objclass == 'Filter':
if isinstance(handle, list):
handle = handle[0]
run_quick_report_by_name(self.simpledoc.doc.dbstate,
self.simpledoc.doc.uistate,
'filterbyname',
handle, track=self.track)
return False # didn't handle event
def on_table_click(self, obj):
"""
Handle events on tables. obj is a treeview
"""
selection = obj.get_selection()
store, paths = selection.get_selected_rows()
tpath = paths[0] if len(paths) > 0 else None
node = store.get_iter(tpath)
if not node:
return
index = store.get_value(node, 0) # index
if self._callback_leftclick:
self._callback_leftclick(store.get_value(node, 1))
return True
elif self._link[index]:
objclass, handle = self._link[index]
if isinstance(handle, list):
handle = handle[0]
if objclass == 'Person':
from gi.repository import GLib
# If you emmit the signal here and it causes this table to be deleted,
# then you'll crash Python:
#self.simpledoc.doc.uistate.set_active(handle, 'Person')
# So, let's return from this, then change the active person:
return GLib.timeout_add(100, self.simpledoc.doc.uistate.set_active, handle, 'Person')<|fim▁hole|> return True
return False # didn't handle event
def object_drag_data_get(self, widget, context, sel_data, info, time):
tree_selection = widget.get_selection()
model, paths = tree_selection.get_selected_rows()
retval = []
for path in paths:
node = model.get_iter(path)
index = model.get_value(node,0)
if (index is not None and self._link[index]):
retval.append(self._link[index])
sel_data.set(DdTargets.HANDLE_LIST.atom_drag_type, 8, pickle.dumps(retval))
return True
def toggle(self, obj, path, col):
"""
obj - column widget
path - row
col - column
"""
self.treeview.get_model()[path][col] = not \
self.treeview.get_model()[path][col]
def write(self, document):
self.simpledoc = document
buffer = self.simpledoc.doc.buffer
text_view = self.simpledoc.doc.text_view
model_index = 1 # start after index
if self._sort_col:
sort_index = self._columns.index(self._sort_col)
else:
sort_index = 0
treeview = MultiTreeView()
treeview.enable_model_drag_source(Gdk.ModifierType.BUTTON1_MASK,
[],
Gdk.DragAction.COPY)
tglist = Gtk.TargetList.new([])
tglist.add(DdTargets.HANDLE_LIST.atom_drag_type, Gtk.TargetFlags.SAME_WIDGET,
0)
treeview.drag_source_set_target_list(tglist)
#treeview.enable_model_drag_dest(DdTargets.all_targets(),
# Gdk.DragAction.DEFAULT)
treeview.connect('drag_data_get', self.object_drag_data_get)
treeview.set_grid_lines(Gtk.TreeViewGridLines.BOTH)
#treeview.connect('row-activated', on_table_doubleclick, self)
#treeview.connect('cursor-changed', on_table_click, self)
treeview.connect('button-press-event', self.button_press_event)
treeview.connect('select-cursor-row', self.button_press_event)
renderer = Gtk.CellRendererText()
types = [int] # index
cnt = 0
sort_data = []
sort_data_types = []
for col in self._columns:
if self.get_cell_type(cnt) == "text":
types.append(str)
if self.get_cell_markup(cnt):
column = Gtk.TreeViewColumn(col,renderer,markup=model_index)
else:
column = Gtk.TreeViewColumn(col,renderer,text=model_index)
elif self.get_cell_type(cnt) == "checkbox":
types.append(bool)
toggle_renderer = Gtk.CellRendererToggle()
toggle_renderer.set_property('activatable', True)
toggle_renderer.connect("toggled", self.toggle, model_index)
column = Gtk.TreeViewColumn(col, toggle_renderer)
column.add_attribute(toggle_renderer, "active", model_index)
column.set_resizable(True)
if self._sort_vals[cnt] != []:
sort_data.append(self._sort_vals[cnt])
column.set_sort_column_id(len(self._columns) +
len(sort_data))
sort_data_types.append(int)
else:
column.set_sort_column_id(model_index)
treeview.append_column(column)
self.model_index_of_column[col] = model_index
#if model_index == sort_index:
# FIXME: what to set here?
model_index += 1
cnt += 1
if self.title:
self.simpledoc.paragraph(self.title)
# Make a GUI to put the tree view in
types += sort_data_types
model = Gtk.ListStore(*types)
treeview.set_model(model)
treeview.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
iter = buffer.get_end_iter()
anchor = buffer.create_child_anchor(iter)
text_view.add_child_at_anchor(treeview, anchor)
self.treeview= treeview
count = 0
for data in self._rows:
col = 0
rowdata = []
for cell in data:
rowdata.append(self.get_cell_markup(col, count, cell))
col += 1
try:
model.append(row=([count] + list(rowdata) + [col[count] for col in sort_data]))
except KeyError as msg:
print (msg)
if sort_data:
print("Quicktable: error in row %d: data: %s, sort data: %d" % (count, rowdata, len(sort_data[0])))
else:
print("Quicktable: error in row %d: data: %s" % (count, rowdata))
count += 1
text_view.show_all()
self.simpledoc.paragraph("")
self.simpledoc.paragraph("")<|fim▁end|>
| |
<|file_name|>file.rs<|end_file_name|><|fim▁begin|>// "Tifflin" Kernel
// - By John Hodge (thePowersGang)
//
// Modules/fs_fat/dir.rs
use kernel::prelude::*;
use kernel::lib::mem::aref::ArefBorrow;
use kernel::vfs::node;
const ERROR_SHORTCHAIN: node::IoError = node::IoError::Unknown("Cluster chain terminated early");
pub type FilesystemInner = super::FilesystemInner;
pub struct FileNode
{
fs: ArefBorrow<FilesystemInner>,
//parent_dir: u32,
first_cluster: u32,
size: u32,
}
impl FileNode
{
pub fn new_boxed(fs: ArefBorrow<FilesystemInner>, _parent: u32, first_cluster: u32, size: u32) -> Box<FileNode> {
Box::new(FileNode {
fs: fs,
//parent_dir: parent,
first_cluster: first_cluster,
size: size,
})
}
}
impl node::NodeBase for FileNode {
fn get_id(&self) -> node::InodeId {
todo!("FileNode::get_id")
}
}
impl node::File for FileNode {
fn size(&self) -> u64 {
self.size as u64
}
fn truncate(&self, newsize: u64) -> node::Result<u64> {
todo!("FileNode::truncate({:#x})", newsize);
}
fn clear(&self, ofs: u64, size: u64) -> node::Result<()> {
todo!("FileNode::clear({:#x}+{:#x}", ofs, size);
}
fn read(&self, ofs: u64, buf: &mut [u8]) -> node::Result<usize> {
// Sanity check and bound parameters
if ofs >= self.size as u64 {
// out of range
return Err( node::IoError::OutOfRange );
}
let maxread = (self.size as u64 - ofs) as usize;
let buf = if buf.len() > maxread { &mut buf[..maxread] } else { buf };
let read_length = buf.len();
// Seek to correct position in the cluster chain
let mut clusters = super::ClusterList::chained(self.fs.reborrow(), self.first_cluster)
.skip( (ofs/self.fs.cluster_size as u64) as usize);
let ofs = (ofs % self.fs.cluster_size as u64) as usize;
// First incomplete cluster
let chunks = if ofs != 0 {
let cluster = match clusters.next()
{
Some(v) => v,
None => return Err( ERROR_SHORTCHAIN ),
};
let short_count = ::core::cmp::min(self.fs.cluster_size-ofs, buf.len());
let c = try!(self.fs.load_cluster(cluster));
let n = buf[..short_count].clone_from_slice( &c[ofs..] );
assert_eq!(n, short_count);
buf[short_count..].chunks_mut(self.fs.cluster_size)
}
else {
buf.chunks_mut(self.fs.cluster_size)
};
// The rest of the clusters
for dst in chunks
{
let cluster = match clusters.next()
{
Some(v) => v,
None => return Err(ERROR_SHORTCHAIN),
};
if dst.len() == self.fs.cluster_size {
// Read directly
try!(self.fs.read_cluster(cluster, dst));
}
else {
// Bounce (could leave the bouncing up to read_cluster I guess...)
let c = try!(self.fs.load_cluster(cluster));<|fim▁hole|>
Ok( read_length )
}
/// Write data to the file, can only grow the file if ofs==size
fn write(&self, ofs: u64, buf: &mut [u8]) -> node::Result<usize> {
todo!("FileNode::write({:#x}, {:p})", ofs, ::kernel::lib::SlicePtr(buf));
}
}<|fim▁end|>
|
let n = dst.clone_from_slice( &c );
assert_eq!(n, dst.len());
}
}
|
<|file_name|>db.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3.4
#############################################################################
#
# Dictionnary DB managing script. Add/Del/Search definitions
# Copyright (C) 2014 bertrand
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#############################################################################
###############
### Imports ###
import sys
import psycopg2 as PSQL
import textwrap as txtwrp
#####################
### Configuration ###
config = {
'VERSION_MAJOR' : '0',
'VERSION_MINOR' : '1',
'dbname' : 'bertrand',
'user' : 'bertrand'
}
#############
### USAGE ###
def usage():
print("Tool to insert/remove entries in the dicotionnnary.")
print("Version: " + config['VERSION_MAJOR'] + "." + config['VERSION_MINOR'])
print("Usage: " + sys.argv[0] + " <command> <options>")
print("")
print("Commands:")
print(" add Add definition to dictionnary.")
print(" del Remove definition from dictionnary.")
print(" help Print general help or command specific help.")
print(" search Search definition in dictionnary.")
print("")
###########
### ADD ###
def add():
argc = len(sys.argv)
if argc < 3:
__help_cmd(sys.argv[1])
return
req = {
'fields' : '',
'name' : '',
'def' : '',
'url' : ''
}
i=2
while i < argc:
if sys.argv[i] == "-d":
i += 1
req['def'] = sys.argv[i]
elif sys.argv[i] == "-f":
i += 1
req['fields'] = sys.argv[i]
elif sys.argv[i] == '-n':
i += 1
req['name'] = sys.argv[i]
elif sys.argv[i] == "-u":
i += 1
req['url'] = sys.argv[i]
else:
print("Unknown option '" + sys.argv[i] + "'")
__help_cmd(sys.argv[1])
return
i += 1
if req['fields'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['name'] == '':
print("Please specify fields with option '-f'.")
__help_cmd(sys.argv[1])
return
elif req['def'] == '':
print("Please specify definition with option '-d'.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("INSERT INTO dico (fields,name,def,url) VALUES (%s, %s, %s, %s)",
("{" + req['fields'] + "}", req['name'], req['def'], req['url']))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
###########
### DEL ###
def delete():
try:
defid = sys.argv[2]
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
req = cur.mogrify("DELETE FROM dico WHERE id=%s", (defid,))
print(req)
cur.execute(req)
conn.commit()
cur.close()
conn.close()
#####################
### HELP COMMANDS ###
def help_cmd():
try:
cmd = sys.argv[2]
except:
cmd = ''
__help_cmd(cmd)
def __help_cmd(cmd):
if cmd == '' :
usage()
elif cmd == "add" :
print("Command '" + cmd + "': Add definition to dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -d <str> Definition.")
print(" -f <str,str,..> List of fields.")
print(" -n <str> Name of the entry")
print(" -u <url> One url to a more complete definition.")
print("")
elif cmd == "del" :
print("Command '" + cmd + "': Delete definition from dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <id>")
print("")
print("Param:")
print(" id ID of the definition to delete.")
print("")
elif cmd == "help" :
print("Command '" + cmd + "': Print help.")
print("Usage: " + sys.argv[0] + " " + cmd + " [command]")
print("")
print("Giving NO 'command' this will print the general help.")
print("Giving 'command' this will print the command specific help. ")
print("")
elif cmd == "search" :
print("Command '" + cmd + "': Search definition in dictionnary.")
print("Usage: " + sys.argv[0] + " " + cmd + " <options>")
print("")
print("Options:")
print(" -a Print all definitions in the table.")
print(" -f <str,str,...> Print definitions matching the set of given fields.")
print(" -i <id> Print definition matching the given ID.")
print(" -n <str> Print definition mathing the given entry name.")
print("")
else:
print("Unknown command: '" + cmd + "'")
usage()
##############
### SEARCH ###
def search():
try:
opt = sys.argv[2]
except IndexError:
__help_cmd(sys.argv[1])
return
else:
if not opt in ('-a', '-f', '-i', '-n'):
print("Unknown option '" + sys.argv[2] + "'")
__help_cmd(sys.argv[1])
return
conn = PSQL.connect("dbname=" + config['dbname'] + " user=" + config['user'])
cur = conn.cursor()
try:
if opt == "-a":
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico")
elif opt == "-f":
optarg = sys.argv[3]
req = __search_build_req_fields(optarg.split(','))
elif opt == '-i':
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE id=%s", (optarg,))
elif opt == "-n":
optarg = sys.argv[3]
req = cur.mogrify("SELECT id,fields,name,def,url FROM dico WHERE name=%s", (optarg,))
except IndexError:
print("Missing argument.")
__help_cmd(sys.argv[1])
else:
print(req)
cur.execute(req)
print_rows(cur.fetchall())
conn.commit()
finally:
cur.close()
conn.close()
def __search_build_req_fields(fields):
# How do you like your SQL injection?
# I like mine crispy and with a python '+' ;)
# http://initd.org/psycopg/docs/usage.html
# http://xkcd.com/327/
# That will do for now ...
req = "SELECT id,fields,name,def,url FROM dico WHERE "
req += "'" + fields[0] + "'=ANY(fields)"
for f in fields[1:]:
req += " OR '" + f + "'=ANY(fields)"
return req
###################################
### PRINT PSQL REQUESTS RESULTS ###
def print_rows(rows):
for row in rows:
print("---------------------")
print("ID : ", row[0])
__print_row_wrapped("FIELDS : ", row[1])
__print_row_wrapped("NAME : ", row[2])
__print_row_wrapped("DEF : ", row[3])
__print_row_wrapped("URL : ", row[4])
print("")
def __print_row_wrapped(label, value):
labellen = len(label)
wrapped = txtwrp.wrap(value)
print(label, wrapped[0])
for i in range(1, len(wrapped)):
print(' ' * labellen, wrapped[i])
<|fim▁hole|>commands = {
'add' : add,
'del' : delete,
'help' : help_cmd,
'search' : search
}
try:
cmd = sys.argv[1]
except KeyError:
print("Unknown command: " + cmd)
usage()
sys.exit()
except IndexError:
usage()
sys.exit()
else:
commands[cmd]()<|fim▁end|>
|
############
### MAIN ###
|
<|file_name|>api.js<|end_file_name|><|fim▁begin|>export default {
queryRouteList: '/routes',
queryUserInfo: '/user',
logoutUser: '/user/logout',
loginUser: 'POST /user/login',
queryUser: '/user/:id',
queryUserList: '/users',
updateUser: 'Patch /user/:id',
createUser: 'POST /user',
removeUser: 'DELETE /user/:id',
removeUserList: 'POST /users/delete',<|fim▁hole|>
queryPostList: '/posts',
queryDashboard: '/dashboard',
}<|fim▁end|>
| |
<|file_name|>test_vmware_vmdk.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for VMware VMDK driver.
"""
import mox
from cinder import exception
from cinder.image import glance
from cinder import test
from cinder import units
from cinder.volume import configuration
from cinder.volume.drivers.vmware import api
from cinder.volume.drivers.vmware import error_util
from cinder.volume.drivers.vmware import vim
from cinder.volume.drivers.vmware import vim_util
from cinder.volume.drivers.vmware import vmdk
from cinder.volume.drivers.vmware import vmware_images
from cinder.volume.drivers.vmware import volumeops
class FakeVim(object):
@property
def service_content(self):
return mox.MockAnything()
@property
def client(self):
return mox.MockAnything()
def Login(self, session_manager, userName, password):
return mox.MockAnything()
class FakeTaskInfo(object):
def __init__(self, state, result=None):
self.state = state
self.result = result
class FakeError(object):
def __init__(self):
self.localizedMessage = None
self.error = FakeError()
class FakeMor(object):
def __init__(self, type, val):
self._type = type
self.value = val
class FakeObject(object):
def __init__(self):
self._fields = {}
def __setitem__(self, key, value):
self._fields[key] = value
def __getitem__(self, item):
return self._fields[item]
class FakeManagedObjectReference(object):
def __init__(self, lis=[]):
self.ManagedObjectReference = lis
class FakeDatastoreSummary(object):
def __init__(self, freeSpace, capacity, datastore=None, name=None):
self.freeSpace = freeSpace
self.capacity = capacity
self.datastore = datastore
self.name = name
class FakeSnapshotTree(object):
def __init__(self, tree=None, name=None,
snapshot=None, childSnapshotList=None):
self.rootSnapshotList = tree
self.name = name
self.snapshot = snapshot
self.childSnapshotList = childSnapshotList
class FakeElem(object):
def __init__(self, prop_set=None):
self.propSet = prop_set
class FakeProp(object):
def __init__(self, name=None, val=None):
self.name = name
self.val = val
class FakeRetrieveResult(object):
def __init__(self, objects, token):
self.objects = objects
self.token = token
class FakeObj(object):
def __init__(self, obj=None):
self.obj = obj
class VMwareEsxVmdkDriverTestCase(test.TestCase):
"""Test class for VMwareEsxVmdkDriver."""
IP = 'localhost'
USERNAME = 'username'
PASSWORD = 'password'
VOLUME_FOLDER = 'cinder-volumes'
API_RETRY_COUNT = 3
TASK_POLL_INTERVAL = 5.0
IMG_TX_TIMEOUT = 10
MAX_OBJECTS = 100
def setUp(self):
super(VMwareEsxVmdkDriverTestCase, self).setUp()
self._config = mox.MockObject(configuration.Configuration)
self._config.append_config_values(mox.IgnoreArg())
self._config.vmware_host_ip = self.IP
self._config.vmware_host_username = self.USERNAME
self._config.vmware_host_password = self.PASSWORD
self._config.vmware_wsdl_location = None
self._config.vmware_volume_folder = self.VOLUME_FOLDER
self._config.vmware_api_retry_count = self.API_RETRY_COUNT
self._config.vmware_task_poll_interval = self.TASK_POLL_INTERVAL
self._config.vmware_image_transfer_timeout_secs = self.IMG_TX_TIMEOUT
self._config.vmware_max_objects_retrieval = self.MAX_OBJECTS
self._driver = vmdk.VMwareEsxVmdkDriver(configuration=self._config)
api_retry_count = self._config.vmware_api_retry_count,
task_poll_interval = self._config.vmware_task_poll_interval,
self._session = api.VMwareAPISession(self.IP, self.USERNAME,
self.PASSWORD, api_retry_count,
task_poll_interval,
create_session=False)
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self.MAX_OBJECTS)
self._vim = FakeVim()
def test_retry(self):
"""Test Retry."""
class TestClass(object):
def __init__(self):
self.counter1 = 0
@api.Retry(max_retry_count=2, inc_sleep_time=0.001,
exceptions=(Exception))
def fail(self):
self.counter1 += 1
raise exception.CinderException('Fail')
test_obj = TestClass()
self.assertRaises(exception.CinderException, test_obj.fail)
self.assertEqual(test_obj.counter1, 3)
def test_create_session(self):
"""Test create_session."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.ReplayAll()
self._session.create_session()
m.UnsetStubs()
m.VerifyAll()
def test_do_setup(self):
"""Test do_setup."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'session')
self._driver.session = self._session
m.ReplayAll()
self._driver.do_setup(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_check_for_setup_error(self):
"""Test check_for_setup_error."""
self._driver.check_for_setup_error()
def test_get_volume_stats(self):
"""Test get_volume_stats."""
stats = self._driver.get_volume_stats()
self.assertEqual(stats['vendor_name'], 'VMware')
self.assertEqual(stats['driver_version'], '1.0')
self.assertEqual(stats['storage_protocol'], 'LSI Logic SCSI')
self.assertEqual(stats['reserved_percentage'], 0)
self.assertEqual(stats['total_capacity_gb'], 'unknown')
self.assertEqual(stats['free_capacity_gb'], 'unknown')
def test_create_volume(self):
"""Test create_volume."""
self._driver.create_volume(mox.IgnoreArg())
def test_success_wait_for_task(self):
"""Test successful wait_for_task."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
result = FakeMor('VirtualMachine', 'my_vm')
success_task_info = FakeTaskInfo('success', result=result)
m.StubOutWithMock(vim_util, 'get_object_property')
vim_util.get_object_property(self._session.vim,
mox.IgnoreArg(),
'info').AndReturn(success_task_info)
m.ReplayAll()
ret = self._session.wait_for_task(mox.IgnoreArg())
self.assertEqual(ret.result, result)
m.UnsetStubs()
m.VerifyAll()
def test_failed_wait_for_task(self):
"""Test failed wait_for_task."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
failed_task_info = FakeTaskInfo('failed')
m.StubOutWithMock(vim_util, 'get_object_property')
vim_util.get_object_property(self._session.vim,
mox.IgnoreArg(),
'info').AndReturn(failed_task_info)
m.ReplayAll()
self.assertRaises(error_util.VimFaultException,
self._session.wait_for_task,
mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_continue_retrieval(self):
"""Test continue_retrieval."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
self._session.invoke_api(vim_util, 'continue_retrieval',
self._vim, mox.IgnoreArg())
m.ReplayAll()
self._volumeops.continue_retrieval(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_cancel_retrieval(self):
"""Test cancel_retrieval."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
self._session.invoke_api(vim_util, 'cancel_retrieval',
self._vim, mox.IgnoreArg())
m.ReplayAll()
self._volumeops.cancel_retrieval(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_get_backing(self):
"""Test get_backing."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
self._session.invoke_api(vim_util, 'get_objects',
self._vim, 'VirtualMachine',
self.MAX_OBJECTS)
m.ReplayAll()
self._volumeops.get_backing(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_get_backing_multiple_retrieval(self):
"""Test get_backing with multiple retrieval."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
retrieve_result = FakeRetrieveResult([], 'my_token')
self._session.invoke_api(vim_util, 'get_objects',
self._vim, 'VirtualMachine',
self.MAX_OBJECTS).AndReturn(retrieve_result)
m.StubOutWithMock(self._volumeops, 'cancel_retrieval')
self._volumeops.continue_retrieval(retrieve_result)
m.ReplayAll()
self._volumeops.get_backing(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_delete_backing(self):
"""Test delete_backing."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_vm')
self._session.invoke_api(self._vim, 'Destroy_Task', backing)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(mox.IgnoreArg())
m.ReplayAll()
self._volumeops.delete_backing(backing)
m.UnsetStubs()
m.VerifyAll()
def test_delete_volume_without_backing(self):
"""Test delete_volume without backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
self._volumeops.get_backing('hello_world').AndReturn(None)
m.ReplayAll()
volume = FakeObject()
volume['name'] = 'hello_world'
self._driver.delete_volume(volume)
m.UnsetStubs()
m.VerifyAll()
def test_delete_volume_with_backing(self):
"""Test delete_volume with backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
backing = FakeMor('VirtualMachine', 'my_vm')
task = FakeMor('Task', 'my_task')
m.StubOutWithMock(self._volumeops, 'get_backing')
m.StubOutWithMock(self._volumeops, 'delete_backing')
self._volumeops.get_backing('hello_world').AndReturn(backing)
self._volumeops.delete_backing(backing)
m.ReplayAll()
volume = FakeObject()
volume['name'] = 'hello_world'
self._driver.delete_volume(volume)
m.UnsetStubs()
m.VerifyAll()
def test_create_export(self):
"""Test create_export."""
self._driver.create_export(mox.IgnoreArg(), mox.IgnoreArg())
def test_ensure_export(self):
"""Test ensure_export."""
self._driver.ensure_export(mox.IgnoreArg(), mox.IgnoreArg())
def test_remove_export(self):
"""Test remove_export."""
self._driver.remove_export(mox.IgnoreArg(), mox.IgnoreArg())
def test_terminate_connection(self):
"""Test terminate_connection."""
self._driver.terminate_connection(mox.IgnoreArg(), mox.IgnoreArg(),
force=mox.IgnoreArg())
def test_get_host(self):
"""Test get_host."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
instance = FakeObject()
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, instance, 'runtime.host')
m.ReplayAll()
self._volumeops.get_host(instance)
m.UnsetStubs()
m.VerifyAll()
def test_get_hosts(self):
"""Test get_hosts."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
self._session.invoke_api(vim_util, 'get_objects', self._vim,
'HostSystem', self.MAX_OBJECTS)
m.ReplayAll()
self._volumeops.get_hosts()
m.UnsetStubs()
m.VerifyAll()
def test_is_valid_with_accessible_attr(self):
"""Test _is_valid with accessible attribute."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
datastore = FakeMor('Datastore', 'my_ds')
mntInfo = FakeObject()
mntInfo.accessMode = "readWrite"
mntInfo.accessible = True
host = FakeMor('HostSystem', 'my_host')
host_mount = FakeObject()
host_mount.key = host
host_mount.mountInfo = mntInfo
host_mounts = FakeObject()
host_mounts.DatastoreHostMount = [host_mount]
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, datastore,
'host').AndReturn(host_mounts)
m.ReplayAll()
self.assertTrue(self._volumeops._is_valid(datastore, host))
m.UnsetStubs()
m.VerifyAll()
def test_is_valid_without_accessible_attr(self):
"""Test _is_valid without accessible attribute."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
datastore = FakeMor('Datastore', 'my_ds')
mntInfo = FakeObject()
mntInfo.accessMode = "readWrite"
host = FakeMor('HostSystem', 'my_host')
host_mount = FakeObject()
host_mount.key = host
host_mount.mountInfo = mntInfo
host_mounts = FakeObject()
host_mounts.DatastoreHostMount = [host_mount]
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, datastore,
'host').AndReturn(host_mounts)
m.StubOutWithMock(self._volumeops, 'get_summary')
summary = FakeObject()
summary.accessible = True
self._volumeops.get_summary(datastore).AndReturn(summary)
m.ReplayAll()
self.assertTrue(self._volumeops._is_valid(datastore, host))
m.UnsetStubs()
m.VerifyAll()
def test_get_dss_rp(self):
"""Test get_dss_rp."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
host = FakeObject()
self._session.invoke_api(vim_util, 'get_object_properties',
self._vim, host,
['datastore', 'parent']).AndReturn([])
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, mox.IgnoreArg(), 'resourcePool')
m.ReplayAll()
self.assertRaises(error_util.VimException, self._volumeops.get_dss_rp,
host)
m.UnsetStubs()
m.VerifyAll()
def test_get_dss_rp_without_datastores(self):
"""Test get_dss_rp without datastores."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
host = FakeObject()
props = [FakeElem(prop_set=[FakeProp(name='datastore')])]
self._session.invoke_api(vim_util, 'get_object_properties',
self._vim, host,
['datastore', 'parent']).AndReturn(props)
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, mox.IgnoreArg(), 'resourcePool')
m.ReplayAll()
self.assertRaises(error_util.VimException, self._volumeops.get_dss_rp,
host)
m.UnsetStubs()
m.VerifyAll()
def test_get_parent(self):
"""Test get_parent."""
# Not recursive
child = FakeMor('Parent', 'my_parent')
parent = self._volumeops._get_parent(child, 'Parent')
self.assertEqual(parent, child)
# Recursive
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
parent = FakeMor('Parent', 'my_parent1')
child = FakeMor('Child', 'my_child')
self._session.invoke_api(vim_util, 'get_object_property', self._vim,
child, 'parent').AndReturn(parent)
m.ReplayAll()
ret = self._volumeops._get_parent(child, 'Parent')
self.assertEqual(ret, parent)
m.UnsetStubs()
m.VerifyAll()
def test_get_dc(self):
"""Test get_dc."""
m = self.mox
m.StubOutWithMock(self._volumeops, '_get_parent')
self._volumeops._get_parent(mox.IgnoreArg(), 'Datacenter')
m.ReplayAll()
self._volumeops.get_dc(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_get_vmfolder(self):
"""Test get_vmfolder."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
datacenter = FakeMor('Datacenter', 'my_dc')
self._session.invoke_api(vim_util, 'get_object_property', self._vim,
datacenter, 'vmFolder')
m.ReplayAll()
dc = self._volumeops.get_vmfolder(datacenter)
m.UnsetStubs()
m.VerifyAll()
def test_create_backing(self):
"""Test create_backing."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
folder = FakeMor('Folder', 'my_fol')
resource_pool = FakeMor('ResourcePool', 'my_rs')
host = FakeMor('HostSystem', 'my_host')
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'CreateVM_Task', folder,
config=mox.IgnoreArg(), pool=resource_pool,
host=host).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
task_info = FakeTaskInfo('success', mox.IgnoreArg())
self._session.wait_for_task(task).AndReturn(task_info)
name = 'my_vm'
size_kb = 1 * units.MiB
disk_type = 'thick'
ds_name = 'my_ds'
m.StubOutWithMock(self._volumeops, '_get_create_spec')
self._volumeops._get_create_spec(name, size_kb, disk_type, ds_name)
m.ReplayAll()
self._volumeops.create_backing(name, size_kb, disk_type, folder,
resource_pool, host, ds_name)
m.UnsetStubs()
m.VerifyAll()
def test_create_backing_in_inventory_multi_hosts(self):
"""Test _create_backing_in_inventory scanning multiple hosts."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
host1 = FakeObj(obj=FakeMor('HostSystem', 'my_host1'))
host2 = FakeObj(obj=FakeMor('HostSystem', 'my_host2'))
host3 = FakeObj(obj=FakeMor('HostSystem', 'my_host3'))
retrieve_result = FakeRetrieveResult([host1, host3, host2], None)
m.StubOutWithMock(self._volumeops, 'get_hosts')
self._volumeops.get_hosts().AndReturn(retrieve_result)
m.StubOutWithMock(self._driver, '_create_backing')
volume = FakeObject()
volume['name'] = 'vol_name'
backing = FakeMor('VirtualMachine', 'my_back')
mux = self._driver._create_backing(volume, host1.obj)
mux.AndRaise(error_util.VimException('Maintenance mode'))
mux = self._driver._create_backing(volume, host3.obj)
mux.AndRaise(error_util.VimFaultException(
[], 'Bad host connection state'))
mux = self._driver._create_backing(volume, host2.obj)
mux.AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'cancel_retrieval')
self._volumeops.cancel_retrieval(retrieve_result)
m.StubOutWithMock(self._volumeops, 'continue_retrieval')
m.ReplayAll()
result = self._driver._create_backing_in_inventory(volume)
self.assertEqual(result, backing)
m.UnsetStubs()
m.VerifyAll()
def test_get_datastore(self):
"""Test get_datastore."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_back')
datastore = FakeMor('Datastore', 'my_ds')
datastores = FakeManagedObjectReference([datastore])
self._session.invoke_api(vim_util, 'get_object_property', self._vim,
backing, 'datastore').AndReturn(datastores)
m.ReplayAll()
result = self._volumeops.get_datastore(backing)
self.assertEqual(result, datastore)
m.UnsetStubs()
m.VerifyAll()
def test_get_summary(self):
"""Test get_summary."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
datastore = FakeMor('Datastore', 'my_ds')
self._session.invoke_api(vim_util, 'get_object_property', self._vim,
datastore, 'summary')
m.ReplayAll()
self._volumeops.get_summary(datastore)
m.UnsetStubs()
m.VerifyAll()
def test_init_conn_with_instance_and_backing(self):
"""Test initialize_connection with instance and backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['id'] = 'volume_id'
volume['size'] = 1
connector = {'instance': 'my_instance'}
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(volume['name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_host')
host = FakeMor('HostSystem', 'my_host')
self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host)
m.ReplayAll()
conn_info = self._driver.initialize_connection(volume, connector)
self.assertEqual(conn_info['driver_volume_type'], 'vmdk')
self.assertEqual(conn_info['data']['volume'], 'my_back')
self.assertEqual(conn_info['data']['volume_id'], 'volume_id')
m.UnsetStubs()
m.VerifyAll()
def test_get_volume_group_folder(self):
"""Test _get_volume_group_folder."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
datacenter = FakeMor('Datacenter', 'my_dc')
m.StubOutWithMock(self._volumeops, 'get_vmfolder')
self._volumeops.get_vmfolder(datacenter)
m.ReplayAll()
self._driver._get_volume_group_folder(datacenter)
m.UnsetStubs()
m.VerifyAll()
def test_select_datastore_summary(self):
"""Test _select_datastore_summary."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
datastore1 = FakeMor('Datastore', 'my_ds_1')
datastore2 = FakeMor('Datastore', 'my_ds_2')
datastore3 = FakeMor('Datastore', 'my_ds_3')
datastore4 = FakeMor('Datastore', 'my_ds_4')
datastores = [datastore1, datastore2, datastore3, datastore4]
m.StubOutWithMock(self._volumeops, 'get_summary')
summary1 = FakeDatastoreSummary(10, 10)
summary2 = FakeDatastoreSummary(25, 50)
summary3 = FakeDatastoreSummary(50, 50)
summary4 = FakeDatastoreSummary(100, 100)
moxd = self._volumeops.get_summary(datastore1)
moxd.MultipleTimes().AndReturn(summary1)
moxd = self._volumeops.get_summary(datastore2)
moxd.MultipleTimes().AndReturn(summary2)
moxd = self._volumeops.get_summary(datastore3)
moxd.MultipleTimes().AndReturn(summary3)
moxd = self._volumeops.get_summary(datastore4)
moxd.MultipleTimes().AndReturn(summary4)
m.ReplayAll()
summary = self._driver._select_datastore_summary(1, datastores)
self.assertEqual(summary, summary1)
summary = self._driver._select_datastore_summary(10, datastores)
self.assertEqual(summary, summary3)
summary = self._driver._select_datastore_summary(50, datastores)
self.assertEqual(summary, summary4)
self.assertRaises(error_util.VimException,
self._driver._select_datastore_summary,
100, datastores)
m.UnsetStubs()
m.VerifyAll()
def test_get_folder_ds_summary(self):
"""Test _get_folder_ds_summary."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
size = 1
resource_pool = FakeMor('ResourcePool', 'my_rp')
datacenter = FakeMor('Datacenter', 'my_dc')
m.StubOutWithMock(self._volumeops, 'get_dc')
self._volumeops.get_dc(resource_pool).AndReturn(datacenter)
m.StubOutWithMock(self._driver, '_get_volume_group_folder')
folder = FakeMor('Folder', 'my_fol')
self._driver._get_volume_group_folder(datacenter).AndReturn(folder)
m.StubOutWithMock(self._driver, '_select_datastore_summary')
size = 1
datastores = [FakeMor('Datastore', 'my_ds')]
self._driver._select_datastore_summary(size * units.GiB, datastores)
m.ReplayAll()
self._driver._get_folder_ds_summary(size, resource_pool, datastores)
m.UnsetStubs()
m.VerifyAll()
def test_get_disk_type(self):
"""Test _get_disk_type."""
volume = FakeObject()
volume['volume_type_id'] = None
self.assertEqual(vmdk.VMwareEsxVmdkDriver._get_disk_type(volume),
'thin')
def test_init_conn_with_instance_no_backing(self):
"""Test initialize_connection with instance and without backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['id'] = 'volume_id'
volume['size'] = 1
volume['volume_type_id'] = None
connector = {'instance': 'my_instance'}
self._volumeops.get_backing(volume['name'])
m.StubOutWithMock(self._volumeops, 'get_host')
host = FakeMor('HostSystem', 'my_host')
self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host)
m.StubOutWithMock(self._volumeops, 'get_dss_rp')
resource_pool = FakeMor('ResourcePool', 'my_rp')
datastores = [FakeMor('Datastore', 'my_ds')]
self._volumeops.get_dss_rp(host).AndReturn((datastores, resource_pool))
m.StubOutWithMock(self._driver, '_get_folder_ds_summary')
folder = FakeMor('Folder', 'my_fol')
summary = FakeDatastoreSummary(1, 1)
self._driver._get_folder_ds_summary(volume['size'], resource_pool,
datastores).AndReturn((folder,
summary))
backing = FakeMor('VirtualMachine', 'my_back')
m.StubOutWithMock(self._volumeops, 'create_backing')
self._volumeops.create_backing(volume['name'],
volume['size'] * units.MiB,
mox.IgnoreArg(), folder,
resource_pool, host,
mox.IgnoreArg()).AndReturn(backing)
m.ReplayAll()
conn_info = self._driver.initialize_connection(volume, connector)
self.assertEqual(conn_info['driver_volume_type'], 'vmdk')
self.assertEqual(conn_info['data']['volume'], 'my_back')
self.assertEqual(conn_info['data']['volume_id'], 'volume_id')
m.UnsetStubs()
m.VerifyAll()
def test_init_conn_without_instance(self):
"""Test initialize_connection without instance and a backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
backing = FakeMor('VirtualMachine', 'my_back')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['id'] = 'volume_id'
connector = {}
self._volumeops.get_backing(volume['name']).AndReturn(backing)
m.ReplayAll()
conn_info = self._driver.initialize_connection(volume, connector)
self.assertEqual(conn_info['driver_volume_type'], 'vmdk')
self.assertEqual(conn_info['data']['volume'], 'my_back')
self.assertEqual(conn_info['data']['volume_id'], 'volume_id')
m.UnsetStubs()
m.VerifyAll()
def test_create_snapshot_operation(self):
"""Test volumeops.create_snapshot."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
name = 'snapshot_name'
description = 'snapshot_desc'
backing = FakeMor('VirtualMachine', 'my_back')
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'CreateSnapshot_Task', backing,
name=name, description=description,
memory=False, quiesce=False).AndReturn(task)
result = FakeMor('VirtualMachineSnapshot', 'my_snap')
success_task_info = FakeTaskInfo('success', result=result)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task).AndReturn(success_task_info)
m.ReplayAll()
self._volumeops.create_snapshot(backing, name, description)
m.UnsetStubs()
m.VerifyAll()
def test_create_snapshot_without_backing(self):
"""Test vmdk.create_snapshot without backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snap_name'
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'available'
self._volumeops.get_backing(snapshot['volume_name'])
m.ReplayAll()
self._driver.create_snapshot(snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_create_snapshot_with_backing(self):
"""Test vmdk.create_snapshot with backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snapshot_name'
snapshot['display_description'] = 'snapshot_desc'
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'available'
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'create_snapshot')
self._volumeops.create_snapshot(backing, snapshot['name'],
snapshot['display_description'])
m.ReplayAll()
self._driver.create_snapshot(snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_create_snapshot_when_attached(self):
"""Test vmdk.create_snapshot when volume is attached."""
snapshot = FakeObject()
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'in-use'
self.assertRaises(exception.InvalidVolume,
self._driver.create_snapshot, snapshot)
def test_get_snapshot_from_tree(self):
"""Test _get_snapshot_from_tree."""
volops = volumeops.VMwareVolumeOps
ret = volops._get_snapshot_from_tree(mox.IgnoreArg(), None)
self.assertEqual(ret, None)
name = 'snapshot_name'
snapshot = FakeMor('VirtualMachineSnapshot', 'my_snap')
root = FakeSnapshotTree(name='snapshot_name', snapshot=snapshot)
ret = volops._get_snapshot_from_tree(name, root)
self.assertEqual(ret, snapshot)
snapshot1 = FakeMor('VirtualMachineSnapshot', 'my_snap_1')
root = FakeSnapshotTree(name='snapshot_name_1', snapshot=snapshot1,
childSnapshotList=[root])
ret = volops._get_snapshot_from_tree(name, root)
self.assertEqual(ret, snapshot)
def test_get_snapshot(self):
"""Test get_snapshot."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
name = 'snapshot_name'
backing = FakeMor('VirtualMachine', 'my_back')
root = FakeSnapshotTree()
tree = FakeSnapshotTree(tree=[root])
self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, backing,
'snapshot').AndReturn(tree)
volops = volumeops.VMwareVolumeOps
m.StubOutWithMock(volops, '_get_snapshot_from_tree')
volops._get_snapshot_from_tree(name, root)
m.ReplayAll()
self._volumeops.get_snapshot(backing, name)
m.UnsetStubs()
m.VerifyAll()
def test_delete_snapshot_not_present(self):
"""Test volumeops.delete_snapshot, when not present."""
m = self.mox
m.StubOutWithMock(self._volumeops, 'get_snapshot')
name = 'snapshot_name'
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_snapshot(backing, name)
m.ReplayAll()
self._volumeops.delete_snapshot(backing, name)
m.UnsetStubs()
m.VerifyAll()
def test_delete_snapshot_when_present(self):
"""Test volumeops.delete_snapshot, when it is present."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
m.StubOutWithMock(self._volumeops, 'get_snapshot')
name = 'snapshot_name'
backing = FakeMor('VirtualMachine', 'my_back')
snapshot = FakeMor('VirtualMachineSnapshot', 'my_snap')
self._volumeops.get_snapshot(backing, name).AndReturn(snapshot)
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._session.vim,
'RemoveSnapshot_Task', snapshot,
removeChildren=False).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.delete_snapshot(backing, name)
m.UnsetStubs()
m.VerifyAll()
def test_delete_snapshot_without_backing(self):
"""Test delete_snapshot without backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snap_name'
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'available'
self._volumeops.get_backing(snapshot['volume_name'])
m.ReplayAll()
self._driver.delete_snapshot(snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_delete_snapshot_with_backing(self):
"""Test delete_snapshot with backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
snapshot = FakeObject()
snapshot['name'] = 'snapshot_name'
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snap_name'
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'available'
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'delete_snapshot')
self._volumeops.delete_snapshot(backing,
snapshot['name'])
m.ReplayAll()
self._driver.delete_snapshot(snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_delete_snapshot_when_attached(self):
"""Test delete_snapshot when volume is attached."""
snapshot = FakeObject()
snapshot['volume'] = FakeObject()
snapshot['volume']['status'] = 'in-use'
self.assertRaises(exception.InvalidVolume,
self._driver.delete_snapshot, snapshot)
def test_create_cloned_volume_without_backing(self):
"""Test create_cloned_volume without a backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['status'] = 'available'
src_vref = FakeObject()
src_vref['name'] = 'src_volume_name'
self._volumeops.get_backing(src_vref['name'])
m.ReplayAll()
self._driver.create_cloned_volume(volume, src_vref)
m.UnsetStubs()
m.VerifyAll()
def test_get_path_name(self):
"""Test get_path_name."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_back')
class FakePath(object):
def __init__(self, path=None):
self.vmPathName = path
path = FakePath()
self._session.invoke_api(vim_util, 'get_object_property', self._vim,
backing, 'config.files').AndReturn(path)
m.ReplayAll()
self._volumeops.get_path_name(backing)
m.UnsetStubs()
m.VerifyAll()
def test_delete_file(self):
"""Test _delete_file."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
src_path = 'src_path'
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'DeleteDatastoreFile_Task',
mox.IgnoreArg(), name=src_path,
datacenter=mox.IgnoreArg()).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.delete_file(src_path)
m.UnsetStubs()
m.VerifyAll()
def test_clone_backing_by_copying(self):
"""Test _clone_backing_by_copying."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
volume = FakeObject()
src_vmdk_path = "[datastore] src_vm/src_vm.vmdk"
new_vmdk_path = "[datastore] dest_vm/dest_vm.vmdk"
backing = FakeMor('VirtualMachine', 'my_back')
m.StubOutWithMock(self._driver, '_create_backing_in_inventory')
mux = self._driver._create_backing_in_inventory(volume)
mux.AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_vmdk_path')
self._volumeops.get_vmdk_path(backing).AndReturn(new_vmdk_path)
m.StubOutWithMock(self._volumeops, 'get_dc')
datacenter = FakeMor('Datacenter', 'my_dc')
self._volumeops.get_dc(backing).AndReturn(datacenter)
m.StubOutWithMock(self._volumeops, 'delete_vmdk_file')
self._volumeops.delete_vmdk_file(new_vmdk_path, datacenter)
m.StubOutWithMock(self._volumeops, 'copy_vmdk_file')
self._volumeops.copy_vmdk_file(datacenter, src_vmdk_path,
new_vmdk_path)
m.ReplayAll()
self._driver._clone_backing_by_copying(volume, src_vmdk_path)
m.UnsetStubs()
m.VerifyAll()
def test_create_cloned_volume_with_backing(self):
"""Test create_cloned_volume with a backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
src_vref = FakeObject()
src_vref['name'] = 'src_snapshot_name'
backing = FakeMor('VirtualMachine', 'my_vm')
self._volumeops.get_backing(src_vref['name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_vmdk_path')
src_vmdk_path = "[datastore] src_vm/src_vm.vmdk"
self._volumeops.get_vmdk_path(backing).AndReturn(src_vmdk_path)
m.StubOutWithMock(self._driver, '_clone_backing_by_copying')
self._driver._clone_backing_by_copying(volume, src_vmdk_path)
m.ReplayAll()
self._driver.create_cloned_volume(volume, src_vref)
m.UnsetStubs()
m.VerifyAll()
def test_create_volume_from_snapshot_without_backing(self):
"""Test create_volume_from_snapshot without a backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snap_name'
self._volumeops.get_backing(snapshot['volume_name'])
m.ReplayAll()
self._driver.create_volume_from_snapshot(volume, snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_create_volume_from_snap_without_backing_snap(self):
"""Test create_volume_from_snapshot without a backing snapshot."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
backing = FakeMor('VirtualMachine', 'my_vm')
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_snapshot')
snapshot['name'] = 'snapshot_name'
self._volumeops.get_snapshot(backing, snapshot['name'])
m.ReplayAll()
self._driver.create_volume_from_snapshot(volume, snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_create_volume_from_snapshot(self):
"""Test create_volume_from_snapshot."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
backing = FakeMor('VirtualMachine', 'my_vm')
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_snapshot')
snapshot['name'] = 'snapshot_name'
snapshot_mor = FakeMor('VirtualMachineSnapshot', 'my_snap')
self._volumeops.get_snapshot(backing,
snapshot['name']).AndReturn(snapshot_mor)
m.StubOutWithMock(self._volumeops, 'get_vmdk_path')
src_vmdk_path = "[datastore] src_vm/src_vm-001.vmdk"
self._volumeops.get_vmdk_path(snapshot_mor).AndReturn(src_vmdk_path)
m.StubOutWithMock(self._driver, '_clone_backing_by_copying')
self._driver._clone_backing_by_copying(volume, src_vmdk_path)
m.ReplayAll()
self._driver.create_volume_from_snapshot(volume, snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_get_entity_name(self):
"""Test volumeops get_entity_name."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
entity = FakeMor('VirtualMachine', 'virt')
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, entity, 'name')
m.ReplayAll()
self._volumeops.get_entity_name(entity)
m.UnsetStubs()
m.VerifyAll()
def test_get_vmdk_path(self):
"""Test volumeops get_vmdk_path."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_back')
vmdk_path = '[datastore 1] folders/myvols/volume-123.vmdk'
class VirtualDisk:
pass
virtualDisk = VirtualDisk()
class VirtualDiskFlatVer2BackingInfo:
pass
backingInfo = VirtualDiskFlatVer2BackingInfo()
backingInfo.fileName = vmdk_path
virtualDisk.backing = backingInfo
devices = [FakeObject(), virtualDisk, FakeObject()]
moxed = self._session.invoke_api(vim_util, 'get_object_property',
self._vim, backing,
'config.hardware.device')
moxed.AndReturn(devices)
m.ReplayAll()
actual_vmdk_path = self._volumeops.get_vmdk_path(backing)
self.assertEqual(backingInfo.__class__.__name__,
'VirtualDiskFlatVer2BackingInfo')
self.assertEqual(virtualDisk.__class__.__name__, 'VirtualDisk')
self.assertEqual(actual_vmdk_path, vmdk_path)
m.UnsetStubs()
m.VerifyAll()
def test_copy_vmdk_file(self):
"""Test copy_vmdk_file."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
dc_ref = FakeMor('Datacenter', 'dc1')
src_path = 'src_path'
dest_path = 'dest_path'
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'CopyVirtualDisk_Task',
mox.IgnoreArg(), sourceName=src_path,
sourceDatacenter=dc_ref, destName=dest_path,
destDatacenter=dc_ref,
force=True).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.copy_vmdk_file(dc_ref, src_path, dest_path)
m.UnsetStubs()
m.VerifyAll()
def test_delete_vmdk_file(self):
"""Test delete_vmdk_file."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
dc_ref = FakeMor('Datacenter', 'dc1')
vmdk_path = 'vmdk_path'
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'DeleteVirtualDisk_Task',
mox.IgnoreArg(), name=vmdk_path,
datacenter=dc_ref).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.delete_vmdk_file(vmdk_path, dc_ref)
m.UnsetStubs()
m.VerifyAll()
def test_split_datastore_path(self):
"""Test volumeops split_datastore_path."""
test1 = '[datastore1] myfolder/mysubfolder/myvm.vmx'
(datastore,
folder,
file_name) = volumeops.split_datastore_path(test1)
self.assertEqual(datastore, 'datastore1')
self.assertEqual(folder, 'myfolder/mysubfolder/')
self.assertEqual(file_name, 'myvm.vmx')
test2 = '[datastore2 ] myfolder/myvm.vmdk'
(datastore,
folder,
file_name) = volumeops.split_datastore_path(test2)
self.assertEqual(datastore, 'datastore2')
self.assertEqual(folder, 'myfolder/')
self.assertEqual(file_name, 'myvm.vmdk')
test3 = 'myfolder/myvm.vmdk'
self.assertRaises(IndexError, volumeops.split_datastore_path, test3)
def test_copy_image_to_volume_non_vmdk(self):
"""Test copy_image_to_volume for a non-vmdk disk format."""
m = self.mox
image_id = 'image-123456789'
image_meta = FakeObject()
image_meta['disk_format'] = 'novmdk'
image_service = m.CreateMock(glance.GlanceImageService)
image_service.show(mox.IgnoreArg(), image_id).AndReturn(image_meta)
m.ReplayAll()
self.assertRaises(exception.ImageUnacceptable,
self._driver.copy_image_to_volume,
mox.IgnoreArg(), mox.IgnoreArg(),
image_service, image_id)
m.UnsetStubs()
m.VerifyAll()
def test_copy_image_to_volume_vmdk(self):
"""Test copy_image_to_volume with an acceptable vmdk disk format."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'session')
self._driver.session = self._session
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
image_id = 'image-id'
image_meta = FakeObject()
image_meta['disk_format'] = 'vmdk'
image_meta['size'] = 1024 * 1024
image_service = m.CreateMock(glance.GlanceImageService)
image_service.show(mox.IgnoreArg(), image_id).AndReturn(image_meta)
volume = FakeObject()
vol_name = 'volume name'
volume['name'] = vol_name
backing = FakeMor('VirtualMachine', 'my_vm')
m.StubOutWithMock(self._driver, '_create_backing_in_inventory')
self._driver._create_backing_in_inventory(volume).AndReturn(backing)
datastore_name = 'datastore1'
flat_vmdk_path = 'myvolumes/myvm-flat.vmdk'
m.StubOutWithMock(self._driver, '_get_ds_name_flat_vmdk_path')
moxed = self._driver._get_ds_name_flat_vmdk_path(mox.IgnoreArg(),
vol_name)
moxed.AndReturn((datastore_name, flat_vmdk_path))
host = FakeMor('Host', 'my_host')
m.StubOutWithMock(self._volumeops, 'get_host')
self._volumeops.get_host(backing).AndReturn(host)
datacenter = FakeMor('Datacenter', 'my_datacenter')
m.StubOutWithMock(self._volumeops, 'get_dc')
self._volumeops.get_dc(host).AndReturn(datacenter)
datacenter_name = 'my-datacenter'
m.StubOutWithMock(self._volumeops, 'get_entity_name')
self._volumeops.get_entity_name(datacenter).AndReturn(datacenter_name)
flat_path = '[%s] %s' % (datastore_name, flat_vmdk_path)
m.StubOutWithMock(self._volumeops, 'delete_file')
self._volumeops.delete_file(flat_path, datacenter)
client = FakeObject()
client.options = FakeObject()
client.options.transport = FakeObject()
cookies = FakeObject()
client.options.transport.cookiejar = cookies
m.StubOutWithMock(self._vim.__class__, 'client')
self._vim.client = client
m.StubOutWithMock(vmware_images, 'fetch_image')
timeout = self._config.vmware_image_transfer_timeout_secs
vmware_images.fetch_image(mox.IgnoreArg(), timeout, image_service,
image_id, host=self.IP,
data_center_name=datacenter_name,
datastore_name=datastore_name,
cookies=cookies,
file_path=flat_vmdk_path)
m.ReplayAll()
self._driver.copy_image_to_volume(mox.IgnoreArg(), volume,
image_service, image_id)
m.UnsetStubs()
m.VerifyAll()
def test_copy_volume_to_image_non_vmdk(self):
"""Test copy_volume_to_image for a non-vmdk disk format."""
m = self.mox
image_meta = FakeObject()
image_meta['disk_format'] = 'novmdk'
volume = FakeObject()
volume['name'] = 'vol-name'
volume['instance_uuid'] = None
volume['attached_host'] = None
m.ReplayAll()
self.assertRaises(exception.ImageUnacceptable,
self._driver.copy_volume_to_image,
mox.IgnoreArg(), volume,
mox.IgnoreArg(), image_meta)
m.UnsetStubs()
m.VerifyAll()
def test_copy_volume_to_image_when_attached(self):
"""Test copy_volume_to_image when volume is attached."""
m = self.mox
volume = FakeObject()
volume['instance_uuid'] = 'my_uuid'
m.ReplayAll()
self.assertRaises(exception.InvalidVolume,
self._driver.copy_volume_to_image,
mox.IgnoreArg(), volume,
mox.IgnoreArg(), mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_copy_volume_to_image_vmdk(self):
"""Test copy_volume_to_image for a valid vmdk disk format."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'session')
self._driver.session = self._session
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
image_id = 'image-id-1'
image_meta = FakeObject()
image_meta['disk_format'] = 'vmdk'
image_meta['id'] = image_id
image_meta['name'] = image_id
image_service = FakeObject()
vol_name = 'volume-123456789'
project_id = 'project-owner-id-123'
volume = FakeObject()
volume['name'] = vol_name
volume['project_id'] = project_id
volume['instance_uuid'] = None
volume['attached_host'] = None
# volumeops.get_backing
backing = FakeMor("VirtualMachine", "my_vm")
m.StubOutWithMock(self._volumeops, 'get_backing')
self._volumeops.get_backing(vol_name).AndReturn(backing)
# volumeops.get_vmdk_path
datastore_name = 'datastore1'
file_path = 'my_folder/my_nested_folder/my_vm.vmdk'
vmdk_file_path = '[%s] %s' % (datastore_name, file_path)
m.StubOutWithMock(self._volumeops, 'get_vmdk_path')
self._volumeops.get_vmdk_path(backing).AndReturn(vmdk_file_path)
tmp_vmdk = '[datastore1] %s.vmdk' % image_id
# volumeops.get_host
host = FakeMor('Host', 'my_host')
m.StubOutWithMock(self._volumeops, 'get_host')
self._volumeops.get_host(backing).AndReturn(host)
# volumeops.get_dc
datacenter_name = 'my_datacenter'
datacenter = FakeMor('Datacenter', datacenter_name)
m.StubOutWithMock(self._volumeops, 'get_dc')
self._volumeops.get_dc(host).AndReturn(datacenter)
# volumeops.copy_vmdk_file
m.StubOutWithMock(self._volumeops, 'copy_vmdk_file')
self._volumeops.copy_vmdk_file(datacenter, vmdk_file_path, tmp_vmdk)
# host_ip
host_ip = self.IP
# volumeops.get_entity_name
m.StubOutWithMock(self._volumeops, 'get_entity_name')
self._volumeops.get_entity_name(datacenter).AndReturn(datacenter_name)
# cookiejar
client = FakeObject()
client.options = FakeObject()
client.options.transport = FakeObject()
cookies = FakeObject()
client.options.transport.cookiejar = cookies
m.StubOutWithMock(self._vim.__class__, 'client')
self._vim.client = client
# flat_vmdk
flat_vmdk_file = '%s-flat.vmdk' % image_id
# vmware_images.upload_image
timeout = self._config.vmware_image_transfer_timeout_secs
m.StubOutWithMock(vmware_images, 'upload_image')
vmware_images.upload_image(mox.IgnoreArg(), timeout, image_service,
image_id, project_id, host=host_ip,
data_center_name=datacenter_name,
datastore_name=datastore_name,
cookies=cookies,
file_path=flat_vmdk_file,
snapshot_name=image_meta['name'],
image_version=1)
# volumeops.delete_vmdk_file
m.StubOutWithMock(self._volumeops, 'delete_vmdk_file')
self._volumeops.delete_vmdk_file(tmp_vmdk, datacenter)
m.ReplayAll()
self._driver.copy_volume_to_image(mox.IgnoreArg(), volume,
image_service, image_meta)
m.UnsetStubs()
m.VerifyAll()
def test_retrieve_properties_ex_fault_checker(self):
"""Test retrieve_properties_ex_fault_checker is called."""
m = self.mox
class FakeVim(vim.Vim):
def __init__(self):
pass
@property
def client(self):
class FakeRetrv(object):
def RetrievePropertiesEx(self, collector):
pass
def __getattr__(self, name):
if name == 'service':
return FakeRetrv()
return FakeRetrv()
def RetrieveServiceContent(self, type='ServiceInstance'):
return mox.MockAnything()
_vim = FakeVim()
m.ReplayAll()
# retrieve_properties_ex_fault_checker throws authentication error
self.assertRaises(error_util.VimFaultException,
_vim.RetrievePropertiesEx, mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
class VMwareVcVmdkDriverTestCase(VMwareEsxVmdkDriverTestCase):
"""Test class for VMwareVcVmdkDriver."""
def setUp(self):
super(VMwareVcVmdkDriverTestCase, self).setUp()
self._driver = vmdk.VMwareVcVmdkDriver(configuration=self._config)
<|fim▁hole|> def test_create_folder_not_present(self):
"""Test create_folder when not present."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
parent_folder = FakeMor('Folder', 'my_par_fol')
child_entities = FakeManagedObjectReference()
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, parent_folder,
'childEntity').AndReturn(child_entities)
self._session.invoke_api(self._vim, 'CreateFolder', parent_folder,
name='child_folder_name')
m.ReplayAll()
dc = self._volumeops.create_folder(parent_folder, 'child_folder_name')
m.UnsetStubs()
m.VerifyAll()
def test_create_folder_already_present(self):
"""Test create_folder when already present."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
parent_folder = FakeMor('Folder', 'my_par_fol')
child_folder = FakeMor('Folder', 'my_child_fol')
child_entities = FakeManagedObjectReference([child_folder])
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, parent_folder,
'childEntity').AndReturn(child_entities)
self._session.invoke_api(vim_util, 'get_object_property',
self._vim, child_folder,
'name').AndReturn('child_folder_name')
m.ReplayAll()
fol = self._volumeops.create_folder(parent_folder, 'child_folder_name')
self.assertEqual(fol, child_folder)
m.UnsetStubs()
m.VerifyAll()
def test_relocate_backing(self):
"""Test relocate_backing."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._volumeops, '_get_relocate_spec')
datastore = FakeMor('Datastore', 'my_ds')
resource_pool = FakeMor('ResourcePool', 'my_rp')
host = FakeMor('HostSystem', 'my_host')
disk_move_type = 'moveAllDiskBackingsAndAllowSharing'
self._volumeops._get_relocate_spec(datastore, resource_pool, host,
disk_move_type)
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_back')
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'RelocateVM_Task',
backing, spec=mox.IgnoreArg()).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.relocate_backing(backing, datastore,
resource_pool, host)
m.UnsetStubs()
m.VerifyAll()
def test_move_backing_to_folder(self):
"""Test move_backing_to_folder."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
backing = FakeMor('VirtualMachine', 'my_back')
folder = FakeMor('Folder', 'my_fol')
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'MoveIntoFolder_Task',
folder, list=[backing]).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
self._session.wait_for_task(task)
m.ReplayAll()
self._volumeops.move_backing_to_folder(backing, folder)
m.UnsetStubs()
m.VerifyAll()
def test_init_conn_with_instance_and_backing(self):
"""Test initialize_connection with instance and backing."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['id'] = 'volume_id'
volume['size'] = 1
connector = {'instance': 'my_instance'}
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(volume['name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_host')
host = FakeMor('HostSystem', 'my_host')
self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host)
datastore = FakeMor('Datastore', 'my_ds')
resource_pool = FakeMor('ResourcePool', 'my_rp')
m.StubOutWithMock(self._volumeops, 'get_dss_rp')
self._volumeops.get_dss_rp(host).AndReturn(([datastore],
resource_pool))
m.StubOutWithMock(self._volumeops, 'get_datastore')
self._volumeops.get_datastore(backing).AndReturn(datastore)
m.ReplayAll()
conn_info = self._driver.initialize_connection(volume, connector)
self.assertEqual(conn_info['driver_volume_type'], 'vmdk')
self.assertEqual(conn_info['data']['volume'], 'my_back')
self.assertEqual(conn_info['data']['volume_id'], 'volume_id')
m.UnsetStubs()
m.VerifyAll()
def test_get_volume_group_folder(self):
"""Test _get_volume_group_folder."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
datacenter = FakeMor('Datacenter', 'my_dc')
m.StubOutWithMock(self._volumeops, 'get_vmfolder')
self._volumeops.get_vmfolder(datacenter)
m.StubOutWithMock(self._volumeops, 'create_folder')
self._volumeops.create_folder(mox.IgnoreArg(),
self._config.vmware_volume_folder)
m.ReplayAll()
self._driver._get_volume_group_folder(datacenter)
m.UnsetStubs()
m.VerifyAll()
def test_init_conn_with_instance_and_backing_and_relocation(self):
"""Test initialize_connection with backing being relocated."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['id'] = 'volume_id'
volume['size'] = 1
connector = {'instance': 'my_instance'}
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(volume['name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_host')
host = FakeMor('HostSystem', 'my_host')
self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host)
datastore1 = FakeMor('Datastore', 'my_ds_1')
datastore2 = FakeMor('Datastore', 'my_ds_2')
resource_pool = FakeMor('ResourcePool', 'my_rp')
m.StubOutWithMock(self._volumeops, 'get_dss_rp')
self._volumeops.get_dss_rp(host).AndReturn(([datastore1],
resource_pool))
m.StubOutWithMock(self._volumeops, 'get_datastore')
self._volumeops.get_datastore(backing).AndReturn(datastore2)
m.StubOutWithMock(self._driver, '_get_folder_ds_summary')
folder = FakeMor('Folder', 'my_fol')
summary = FakeDatastoreSummary(1, 1, datastore1)
size = 1
self._driver._get_folder_ds_summary(size, resource_pool,
[datastore1]).AndReturn((folder,
summary))
m.StubOutWithMock(self._volumeops, 'relocate_backing')
self._volumeops.relocate_backing(backing, datastore1,
resource_pool, host)
m.StubOutWithMock(self._volumeops, 'move_backing_to_folder')
self._volumeops.move_backing_to_folder(backing, folder)
m.ReplayAll()
conn_info = self._driver.initialize_connection(volume, connector)
self.assertEqual(conn_info['driver_volume_type'], 'vmdk')
self.assertEqual(conn_info['data']['volume'], 'my_back')
self.assertEqual(conn_info['data']['volume_id'], 'volume_id')
m.UnsetStubs()
m.VerifyAll()
def test_get_folder(self):
"""Test _get_folder."""
m = self.mox
m.StubOutWithMock(self._volumeops, '_get_parent')
self._volumeops._get_parent(mox.IgnoreArg(), 'Folder')
m.ReplayAll()
self._volumeops._get_folder(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_volumeops_clone_backing(self):
"""Test volumeops.clone_backing."""
m = self.mox
m.StubOutWithMock(self._volumeops, '_get_parent')
backing = FakeMor('VirtualMachine', 'my_back')
folder = FakeMor('Folder', 'my_fol')
self._volumeops._get_folder(backing).AndReturn(folder)
m.StubOutWithMock(self._volumeops, '_get_clone_spec')
name = 'name'
snapshot = FakeMor('VirtualMachineSnapshot', 'my_snap')
datastore = FakeMor('Datastore', 'my_ds')
self._volumeops._get_clone_spec(datastore, mox.IgnoreArg(), snapshot)
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.StubOutWithMock(self._session, 'invoke_api')
task = FakeMor('Task', 'my_task')
self._session.invoke_api(self._vim, 'CloneVM_Task', backing,
folder=folder, name=name,
spec=mox.IgnoreArg()).AndReturn(task)
m.StubOutWithMock(self._session, 'wait_for_task')
clone = FakeMor('VirtualMachine', 'my_clone')
task_info = FakeTaskInfo('success', clone)
self._session.wait_for_task(task).AndReturn(task_info)
m.ReplayAll()
ret = self._volumeops.clone_backing(name, backing, snapshot,
mox.IgnoreArg(), datastore)
self.assertEqual(ret, clone)
m.UnsetStubs()
m.VerifyAll()
def test_clone_backing_linked(self):
"""Test _clone_backing with clone type - linked."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'clone_backing')
volume = FakeObject()
volume['name'] = 'volume_name'
self._volumeops.clone_backing(volume['name'], mox.IgnoreArg(),
mox.IgnoreArg(),
volumeops.LINKED_CLONE_TYPE,
mox.IgnoreArg())
m.ReplayAll()
self._driver._clone_backing(volume, mox.IgnoreArg(), mox.IgnoreArg(),
volumeops.LINKED_CLONE_TYPE)
m.UnsetStubs()
m.VerifyAll()
def test_clone_backing_full(self):
"""Test _clone_backing with clone type - full."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_host')
backing = FakeMor('VirtualMachine', 'my_vm')
host = FakeMor('HostSystem', 'my_host')
self._volumeops.get_host(backing).AndReturn(host)
m.StubOutWithMock(self._volumeops, 'get_dss_rp')
datastore = FakeMor('Datastore', 'my_ds')
datastores = [datastore]
resource_pool = FakeMor('ResourcePool', 'my_rp')
self._volumeops.get_dss_rp(host).AndReturn((datastores,
resource_pool))
m.StubOutWithMock(self._driver, '_select_datastore_summary')
volume = FakeObject()
volume['name'] = 'volume_name'
volume['size'] = 1
summary = FakeDatastoreSummary(1, 1, datastore=datastore)
self._driver._select_datastore_summary(volume['size'] * units.GiB,
datastores).AndReturn(summary)
m.StubOutWithMock(self._volumeops, 'clone_backing')
self._volumeops.clone_backing(volume['name'], backing,
mox.IgnoreArg(),
volumeops.FULL_CLONE_TYPE,
datastore)
m.ReplayAll()
self._driver._clone_backing(volume, backing, mox.IgnoreArg(),
volumeops.FULL_CLONE_TYPE)
m.UnsetStubs()
m.VerifyAll()
def test_create_volume_from_snapshot(self):
"""Test create_volume_from_snapshot."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
snapshot = FakeObject()
snapshot['volume_name'] = 'volume_name'
snapshot['name'] = 'snapshot_name'
backing = FakeMor('VirtualMachine', 'my_back')
self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing)
m.StubOutWithMock(self._volumeops, 'get_snapshot')
snap_mor = FakeMor('VirtualMachineSnapshot', 'my_snap')
self._volumeops.get_snapshot(backing,
snapshot['name']).AndReturn(snap_mor)
volume = FakeObject()
volume['volume_type_id'] = None
m.StubOutWithMock(self._driver, '_clone_backing')
self._driver._clone_backing(volume, backing, snap_mor, mox.IgnoreArg())
m.ReplayAll()
self._driver.create_volume_from_snapshot(volume, snapshot)
m.UnsetStubs()
m.VerifyAll()
def test_create_cloned_volume_with_backing(self):
"""Test create_cloned_volume with clone type - full."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
backing = FakeMor('VirtualMachine', 'my_back')
src_vref = FakeObject()
src_vref['name'] = 'src_vol_name'
src_vref['status'] = 'available'
self._volumeops.get_backing(src_vref['name']).AndReturn(backing)
volume = FakeObject()
volume['volume_type_id'] = None
m.StubOutWithMock(self._driver, '_clone_backing')
self._driver._clone_backing(volume, backing, mox.IgnoreArg(),
volumeops.FULL_CLONE_TYPE)
m.ReplayAll()
self._driver.create_cloned_volume(volume, src_vref)
m.UnsetStubs()
def test_create_linked_cloned_volume_with_backing(self):
"""Test create_cloned_volume with clone type - linked."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
backing = FakeMor('VirtualMachine', 'my_back')
src_vref = FakeObject()
src_vref['name'] = 'src_vol_name'
src_vref['status'] = 'available'
self._volumeops.get_backing(src_vref['name']).AndReturn(backing)
volume = FakeObject()
volume['id'] = 'volume_id'
m.StubOutWithMock(vmdk.VMwareVcVmdkDriver, '_get_clone_type')
moxed = vmdk.VMwareVcVmdkDriver._get_clone_type(volume)
moxed.AndReturn(volumeops.LINKED_CLONE_TYPE)
m.StubOutWithMock(self._volumeops, 'create_snapshot')
name = 'snapshot-%s' % volume['id']
snapshot = FakeMor('VirtualMachineSnapshot', 'my_snap')
self._volumeops.create_snapshot(backing, name,
None).AndReturn(snapshot)
m.StubOutWithMock(self._driver, '_clone_backing')
self._driver._clone_backing(volume, backing, snapshot,
volumeops.LINKED_CLONE_TYPE)
m.ReplayAll()
self._driver.create_cloned_volume(volume, src_vref)
m.UnsetStubs()
def test_create_linked_cloned_volume_when_attached(self):
"""Test create_cloned_volume linked clone when volume is attached."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'volumeops')
self._driver.volumeops = self._volumeops
m.StubOutWithMock(self._volumeops, 'get_backing')
backing = FakeMor('VirtualMachine', 'my_back')
src_vref = FakeObject()
src_vref['name'] = 'src_vol_name'
src_vref['status'] = 'in-use'
volume = FakeObject()
self._volumeops.get_backing(src_vref['name']).AndReturn(backing)
m.StubOutWithMock(vmdk.VMwareVcVmdkDriver, '_get_clone_type')
moxed = vmdk.VMwareVcVmdkDriver._get_clone_type(volume)
moxed.AndReturn(volumeops.LINKED_CLONE_TYPE)
m.ReplayAll()
self.assertRaises(exception.InvalidVolume,
self._driver.create_cloned_volume, volume, src_vref)
m.UnsetStubs()
m.VerifyAll()<|fim▁end|>
| |
<|file_name|>outgoing.js<|end_file_name|><|fim▁begin|>const _ = require('lodash')
const handleText = (event, next, botfmk) => {
if (event.platform !== 'botfmk' || event.type !== 'text') {
return next()
}
// TODO DEBUG
console.log('REPLIED', event.raw)
const session = event.session
const replied = event.raw
if (_.isArray(replied)) {
replied.forEach(r => session.send(r))
} else {
session.send(replied)<|fim▁hole|>
module.exports = {
'text': handleText
}<|fim▁end|>
|
}
return next()
}
|
<|file_name|>dc.rs<|end_file_name|><|fim▁begin|>/*!
DC (Distance Coding) forward and backward transformation.
Designed to be used on BWT block output for compression.
# Links
http://www.data-compression.info/Algorithms/DC/
# Example
```rust
use compress::bwt::dc;
let bytes = b"abracadabra";
let distances = dc::encode_simple::<usize>(bytes);
let decoded = dc::decode_simple(bytes.len(), &distances[..]);
```
# Credit
This is an original implementation.
Thanks to Edgar Binder for inventing DC!
*/
use std::io;
use std::iter::{self, repeat};
use std::slice as vec;
use super::num::traits::{NumCast, ToPrimitive};
use super::mtf::MTF;
pub type Symbol = u8;
pub type Rank = u8;
pub const TOTAL_SYMBOLS: usize = 0x100;
/// Distance coding context
/// Has all the information potentially needed by the underlying coding model
#[derive(PartialEq, Eq, Debug)]
pub struct Context {
/// current symbol
pub symbol: Symbol,
/// last known MTF rank
pub last_rank: Rank,
/// maximum possible distance
pub distance_limit: usize,
}
impl Context {
/// create a new distance context
pub fn new(s: Symbol, r: Rank, dmax: usize) -> Context {
Context {
symbol: s,
last_rank: r,
distance_limit: dmax,
}
}
}
/// DC body iterator, can be used to encode distances
pub struct EncodeIterator<'a,'b, D: 'b> {
data: iter::Enumerate<iter::Zip<vec::Iter<'a,Symbol>,vec::Iter<'b, D>>>,
pos: [usize; TOTAL_SYMBOLS],
last_active: usize,
size: usize,
}
impl<'a, 'b, D: NumCast> EncodeIterator<'a,'b, D> {
/// create a new encode iterator
pub fn new(input: &'a [Symbol], dist: &'b [D], init: [usize; TOTAL_SYMBOLS]) -> EncodeIterator<'a,'b,D> {
assert_eq!(input.len(), dist.len());
EncodeIterator {
data: input.iter().zip(dist.iter()).enumerate(),
pos: init,
last_active: 0,
size: input.len()
}
}
/// get the initial symbol positions, to be called before iteration
pub fn get_init<'c>(&'c self) -> &'c [usize; TOTAL_SYMBOLS] {
assert_eq!(self.last_active, 0);
&self.pos
}
}
impl<'a, 'b, D> Iterator for EncodeIterator<'a,'b,D>
where D: Clone + Eq + NumCast + 'b
{
type Item = (D, Context);
fn next(&mut self) -> Option<(D,Context)> {
let filler: D = NumCast::from(self.size).unwrap();
self.data.find(|&(_,(_,d))| *d != filler).map(|(i,(sym,d))| {
let rank = self.last_active - self.pos[*sym as usize];
assert!(rank < TOTAL_SYMBOLS);
self.last_active = i+1;
self.pos[*sym as usize] = i + 1 + d.to_usize().unwrap();
debug!("Encoding distance {} at pos {} for symbol {}, computed rank {}, predicting next at {}",
d.to_usize().unwrap(), i, *sym, rank, self.pos[*sym as usize]);
(d.clone(), Context::new(*sym, rank as Rank, self.size-i))
})
}
}
/// Encode a block of bytes 'input'
/// write output distance stream into 'distances'
/// return: unique bytes encountered in the order they appear
/// with the corresponding initial distances
pub fn encode<'a, 'b, D: Clone + Copy + Eq + NumCast>(input: &'a [Symbol], distances: &'b mut [D], mtf: &mut MTF) -> EncodeIterator<'a,'b,D> {
let n = input.len();
assert_eq!(distances.len(), n);
let mut num_unique = 0;
let mut last = [n; TOTAL_SYMBOLS];
let mut init = [n; TOTAL_SYMBOLS];
let filler: D = NumCast::from(n).unwrap();
for (i,&sym) in input.iter().enumerate() {
distances[i] = filler.clone();
let base = last[sym as usize];
last[sym as usize] = i;
debug!("\tProcessing symbol {} at position {}, last known at {}", sym, i, base);
if base == n {
let rank = num_unique;
mtf.symbols[rank] = sym;
mtf.encode(sym); //==rank
// initial distances are not ordered to support re-shuffle
debug!("\t\tUnique => assigning rank {}, encoding {}", rank, i);
init[sym as usize] = i;
num_unique += 1;
}else {
let rank = mtf.encode(sym) as usize;
if rank > 0 {
debug!("\t\tRegular at rank {}, encoding {}", rank, i-base-rank-1);
assert!(i >= base+rank+1);
distances[base] = NumCast::from(i-base-rank-1).unwrap();
}
}
}
for (rank,&sym) in mtf.symbols[..num_unique].iter().enumerate() {
let base = last[sym as usize];
debug!("\tSweep symbol {} of rank {}, last known at {}, encoding {}", sym, rank, base, n-base-rank-1);
assert!(n >= base+rank+1);
distances[base] = NumCast::from(n-base-rank-1).unwrap();
}
// a basic but expensive check, to be improved
//assert_eq!(input.iter().zip(input.iter().skip(1)).zip(distances.iter()).
// position(|((&a,&b),d)| *d==filler && a!=b), None);
EncodeIterator::new(input, distances, init)
}
/// Encode version with "batteries included" for quick testing
pub fn encode_simple<D: Clone + Copy + Eq + NumCast>(input: &[Symbol]) -> Vec<D> {
let n = input.len();
let mut raw_dist: Vec<D> = repeat(NumCast::from(0).unwrap()).take(n).collect();
let mut eniter = encode(input, &mut raw_dist, &mut MTF::new());
let init: Vec<D> = (0..TOTAL_SYMBOLS).map(|i| NumCast::from(eniter.get_init()[i]).unwrap()).collect();
init.iter().map(|d| d.clone()).chain(eniter.by_ref().map(|(d,_)| d)).collect()
}
/// Decode a block of distances given the initial symbol positions
pub fn decode<F>(mut next: [usize; TOTAL_SYMBOLS], output: &mut [Symbol], mtf: &mut MTF,
mut fn_dist: F) -> io::Result<()>
where F: FnMut(Context) -> io::Result<usize>
{
let n = output.len();
let mut i = 0;
for (sym,d) in next.iter().enumerate() {
if *d < n {
let mut j = i;
while j>0 && next[mtf.symbols[j-1] as usize] > *d {
mtf.symbols[j] = mtf.symbols[j-1];
j -= 1;
}
mtf.symbols[j] = sym as Symbol;
i += 1;
}
}
if i<=1 {
// redundant alphabet case
let sym = mtf.symbols[0];
for out in output.iter_mut() {
*out = sym;
}
return Ok(())
}
let alphabet_size = i;
let mut ranks = [0 as Rank; TOTAL_SYMBOLS];
for rank in 0..i {
let sym = mtf.symbols[rank];
debug!("\tRegistering symbol {} of rank {} at position {}",
sym, rank, next[sym as usize]);
ranks[sym as usize] = 0; //could use 'rank' but don't know how to derive it during encoding
}
i = 0;
while i<n {
let sym = mtf.symbols[0];
let stop = next[mtf.symbols[1] as usize];
debug!("\tFilling region [{}-{}) with symbol {}", i, stop, sym);
while i<stop {
output[i] = sym;
i += 1;
}
let ctx = Context::new(sym, ranks[sym as usize], n+1-i);
let future = match fn_dist(ctx) {
Ok(d) => stop + d,
Err(e) => return Err(e)
};
debug!("\t\tLooking for future position {}", future);
assert!(future <= n);
let mut rank = 1;
while rank < alphabet_size && future+rank > next[mtf.symbols[rank] as usize] {
mtf.symbols[rank-1] = mtf.symbols[rank];
rank += 1;
}
if rank < alphabet_size {
debug!("\t\tFound sym {} of rank {} at position {}", mtf.symbols[rank],
rank, next[mtf.symbols[rank] as usize]);
}else {
debug!("\t\tNot found");
}
mtf.symbols[rank-1] = sym;
debug!("\t\tAssigning future pos {} for symbol {}", future+rank-1, sym);
next[sym as usize] = future+rank-1;
ranks[sym as usize] = (rank-1) as Rank;
}
assert_eq!(next.iter().position(|&d| d<n || d>=n+alphabet_size), None);
assert_eq!(i, n);
Ok(())
}
/// Decode version with "batteries included" for quick testing
pub fn decode_simple<D: ToPrimitive>(n: usize, distances: &[D]) -> Vec<Symbol> {
let mut output: Vec<Symbol> = repeat(0 as Symbol).take(n).collect();
let mut init = [0; TOTAL_SYMBOLS];
for i in 0..TOTAL_SYMBOLS {
init[i] = distances[i].to_usize().unwrap();
}
let mut di = TOTAL_SYMBOLS;
decode(init, &mut output[..], &mut MTF::new(), |_ctx| {
di += 1;
if di > distances.len() {
Err(io::Error::new(io::ErrorKind::Other, "Unexpected end of file"))
} else {
Ok(distances[di-1].to_usize().unwrap())
}
}).unwrap();
output.into_iter().collect()
}
#[cfg(test)]
mod test {
use std::iter::repeat;<|fim▁hole|> info!("Roundtrip DC of size {}", bytes.len());
let distances = super::encode_simple::<usize>(bytes);
debug!("Roundtrip DC input: {:?}, distances: {:?}", bytes, distances);
let decoded = super::decode_simple(bytes.len(), &distances[..]);
assert_eq!(&decoded[..], bytes);
}
/// rountrip version that compares the coding contexts on the way
fn roundtrip_ctx(bytes: &[u8]) {
let n = bytes.len();
info!("Roundtrip DC context of size {}", n);
let mut mtf = super::super::mtf::MTF::new();
let mut raw_dist: Vec<u16> = repeat(0).take(n).collect();
let eniter = super::encode(bytes, &mut raw_dist[..], &mut mtf);
let mut init = [0; super::TOTAL_SYMBOLS];
for i in 0..super::TOTAL_SYMBOLS {
init[i] = eniter.get_init()[i];
}
// implicit iterator copies, or we can gather in one pass and then split
let (distances, contexts): (Vec<_>, Vec<_>) = eniter.unzip();
let mut output: Vec<u8> = repeat(0).take(n).collect();
let mut di = 0;
super::decode(init, &mut output[..], &mut mtf, |ctx| {
assert_eq!(contexts[di], ctx);
di += 1;
Ok(distances[di-1] as usize)
}).unwrap();
assert_eq!(di, distances.len());
assert_eq!(&output[..], bytes);
}
#[test]
fn roundtrips() {
roundtrip(b"teeesst_dc");
roundtrip(b"");
roundtrip(include_bytes!("../data/test.txt"));
}
#[test]
fn roundtrips_context() {
roundtrip_ctx(b"teeesst_dc");
roundtrip_ctx(b"../data/test.txt");
}
}<|fim▁end|>
|
fn roundtrip(bytes: &[u8]) {
|
<|file_name|>union-abi.rs<|end_file_name|><|fim▁begin|>// ignore-emscripten vectors passed directly
// compile-flags: -C no-prepopulate-passes
// This test that using union forward the abi of the inner type, as
// discussed in #54668
#![crate_type="lib"]
#![feature(repr_simd)]
#[derive(Copy, Clone)]
pub enum Unhab {}
#[repr(simd)]
#[derive(Copy, Clone)]
pub struct i64x4(i64, i64, i64, i64);
#[derive(Copy, Clone)]
pub union UnionI64x4{ a:(), b: i64x4 }
// CHECK: define void @test_UnionI64x4(<4 x i64>* {{.*}} %_1)
#[no_mangle]
pub fn test_UnionI64x4(_: UnionI64x4) { loop {} }
pub union UnionI64x4_{ a: i64x4, b: (), c:i64x4, d: Unhab, e: ((),()), f: UnionI64x4 }
// CHECK: define void @test_UnionI64x4_(<4 x i64>* {{.*}} %_1)
#[no_mangle]
pub fn test_UnionI64x4_(_: UnionI64x4_) { loop {} }
pub union UnionI64x4I64{ a: i64x4, b: i64 }
// CHECK: define void @test_UnionI64x4I64(%UnionI64x4I64* {{.*}} %_1)
#[no_mangle]
pub fn test_UnionI64x4I64(_: UnionI64x4I64) { loop {} }<|fim▁hole|>// CHECK: define void @test_UnionI64x4Tuple(%UnionI64x4Tuple* {{.*}} %_1)
#[no_mangle]
pub fn test_UnionI64x4Tuple(_: UnionI64x4Tuple) { loop {} }
pub union UnionF32{a:f32}
// CHECK: define float @test_UnionF32(float %_1)
#[no_mangle]
pub fn test_UnionF32(_: UnionF32) -> UnionF32 { loop {} }
pub union UnionF32F32{a:f32, b:f32}
// CHECK: define float @test_UnionF32F32(float %_1)
#[no_mangle]
pub fn test_UnionF32F32(_: UnionF32F32) -> UnionF32F32 { loop {} }
pub union UnionF32U32{a:f32, b:u32}
// CHECK: define i32 @test_UnionF32U32(i32{{( %0)?}})
#[no_mangle]
pub fn test_UnionF32U32(_: UnionF32U32) -> UnionF32U32 { loop {} }
pub union UnionU128{a:u128}
// CHECK: define i128 @test_UnionU128(i128 %_1)
#[no_mangle]
pub fn test_UnionU128(_: UnionU128) -> UnionU128 { loop {} }
pub union UnionU128x2{a:(u128, u128)}
// CHECK: define void @test_UnionU128x2(i128 %_1.0, i128 %_1.1)
#[no_mangle]
pub fn test_UnionU128x2(_: UnionU128x2) { loop {} }
#[repr(C)]
pub union CUnionU128x2{a:(u128, u128)}
// CHECK: define void @test_CUnionU128x2(%CUnionU128x2* {{.*}} %_1)
#[no_mangle]
pub fn test_CUnionU128x2(_: CUnionU128x2) { loop {} }
pub union UnionBool { b:bool }
// CHECK: define zeroext i1 @test_UnionBool(i8 %b)
#[no_mangle]
pub fn test_UnionBool(b: UnionBool) -> bool { unsafe { b.b } }
// CHECK: %0 = trunc i8 %b to i1<|fim▁end|>
|
pub union UnionI64x4Tuple{ a: i64x4, b: (i64, i64, i64, i64) }
|
<|file_name|>ItemController.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.controller;
import com.navercorp.pinpoint.web.service.oncecloud.ItemService;
import com.navercorp.pinpoint.web.vo.oncecloud.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.List;
/**
* @author wziyong
*/
@Controller
@RequestMapping("/Item")
public class ItemController {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private ItemService itemService;
/*@RequestMapping(value = "/add", method = RequestMethod.POST)
@ResponseBody
public MyResult add(@RequestParam(value = "name", required = true) String name, @RequestParam(value = "cluster_id", required = true) String cluster_id, @RequestParam(value = "interface", required = true) String interface_addr, @RequestParam(value = "status", required = true) String status, @RequestParam(value = "description", required = false) String desc) {
Host host = new Host();
host.setName(name);
host.setClusterId(Integer.parseInt(cluster_id));
host.setInterfaceAddr(interface_addr);
host.setStatus(Integer.parseInt(status));
host.setDescription(desc);
this.hostService.add(host);
return new MyResult(true, 0, null);
}*/
@RequestMapping(value = "/getList", method = RequestMethod.POST)
@ResponseBody
public List<Item> getItemList(@RequestParam(value = "host_id", required = true) String host_id, @RequestParam(value = "offset", required = false) String offset) {
if (offset != null && offset != "") {
return this.itemService.getList(Integer.parseInt(host_id), Integer.parseInt(offset));
}
else{
return this.itemService.getList(Integer.parseInt(host_id), 0);
}
}
}<|fim▁end|>
|
* Unless required by applicable law or agreed to in writing, software
|
<|file_name|>simplestreams_test.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package tools_test
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"reflect"
"runtime"
"strings"
"testing"
jc "github.com/juju/testing/checkers"
"github.com/juju/utils"
"gopkg.in/amz.v2/aws"
gc "gopkg.in/check.v1"
"github.com/juju/juju/environs/filestorage"
"github.com/juju/juju/environs/jujutest"
"github.com/juju/juju/environs/simplestreams"
sstesting "github.com/juju/juju/environs/simplestreams/testing"
"github.com/juju/juju/environs/storage"
"github.com/juju/juju/environs/tools"
toolstesting "github.com/juju/juju/environs/tools/testing"
coretesting "github.com/juju/juju/testing"
coretools "github.com/juju/juju/tools"
"github.com/juju/juju/version"
)
var live = flag.Bool("live", false, "Include live simplestreams tests")
var vendor = flag.String("vendor", "", "The vendor representing the source of the simplestream data")
type liveTestData struct {
baseURL string
requireSigned bool
validCloudSpec simplestreams.CloudSpec
}
var liveUrls = map[string]liveTestData{
"ec2": {
baseURL: tools.DefaultBaseURL,
requireSigned: true,
validCloudSpec: simplestreams.CloudSpec{"us-east-1", aws.Regions["us-east-1"].EC2Endpoint},
},
"canonistack": {
baseURL: "https://swift.canonistack.canonical.com/v1/AUTH_526ad877f3e3464589dc1145dfeaac60/juju-tools",
requireSigned: false,
validCloudSpec: simplestreams.CloudSpec{"lcy01", "https://keystone.canonistack.canonical.com:443/v1.0/"},
},
}
func setupSimpleStreamsTests(t *testing.T) {
if *live {
if *vendor == "" {
t.Fatal("missing vendor")
}
var ok bool
var testData liveTestData
if testData, ok = liveUrls[*vendor]; !ok {
keys := reflect.ValueOf(liveUrls).MapKeys()
t.Fatalf("Unknown vendor %s. Must be one of %s", *vendor, keys)
}
registerLiveSimpleStreamsTests(testData.baseURL,
tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
CloudSpec: testData.validCloudSpec,
Series: []string{version.Current.Series},
Arches: []string{"amd64"},
Stream: "released",
}), testData.requireSigned)
}
registerSimpleStreamsTests()
}
func registerSimpleStreamsTests() {
gc.Suite(&simplestreamsSuite{
LocalLiveSimplestreamsSuite: sstesting.LocalLiveSimplestreamsSuite{
Source: simplestreams.NewURLDataSource("test", "test:", utils.VerifySSLHostnames),
RequireSigned: false,
DataType: tools.ContentDownload,
StreamsVersion: tools.CurrentStreamsVersion,
ValidConstraint: tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{
Region: "us-east-1",
Endpoint: "https://ec2.us-east-1.amazonaws.com",
},
Series: []string{"precise"},
Arches: []string{"amd64", "arm"},
Stream: "released",
}),
},
})
gc.Suite(&signedSuite{})
}
func registerLiveSimpleStreamsTests(baseURL string, validToolsConstraint simplestreams.LookupConstraint, requireSigned bool) {
gc.Suite(&sstesting.LocalLiveSimplestreamsSuite{
Source: simplestreams.NewURLDataSource("test", baseURL, utils.VerifySSLHostnames),
RequireSigned: requireSigned,
DataType: tools.ContentDownload,
StreamsVersion: tools.CurrentStreamsVersion,
ValidConstraint: validToolsConstraint,
})
}
type simplestreamsSuite struct {
sstesting.LocalLiveSimplestreamsSuite
sstesting.TestDataSuite
}
func (s *simplestreamsSuite) SetUpSuite(c *gc.C) {
s.LocalLiveSimplestreamsSuite.SetUpSuite(c)
s.TestDataSuite.SetUpSuite(c)
}
func (s *simplestreamsSuite) TearDownSuite(c *gc.C) {
s.TestDataSuite.TearDownSuite(c)
s.LocalLiveSimplestreamsSuite.TearDownSuite(c)
}
var fetchTests = []struct {
region string
series string
version string
stream string
major int
minor int
arches []string
tools []*tools.ToolsMetadata
}{{
series: "precise",
arches: []string{"amd64", "arm"},
version: "1.13.0",
tools: []*tools.ToolsMetadata{
{
Release: "precise",
Version: "1.13.0",
Arch: "amd64",
Size: 2973595,
Path: "tools/released/20130806/juju-1.13.0-precise-amd64.tgz",
FileType: "tar.gz",
SHA256: "447aeb6a934a5eaec4f703eda4ef2dde",
},
},
}, {
series: "raring",
arches: []string{"amd64", "arm"},
version: "1.13.0",
tools: []*tools.ToolsMetadata{
{
Release: "raring",
Version: "1.13.0",
Arch: "amd64",
Size: 2973173,
Path: "tools/released/20130806/juju-1.13.0-raring-amd64.tgz",
FileType: "tar.gz",
SHA256: "df07ac5e1fb4232d4e9aa2effa57918a",
},
},
}, {
series: "raring",
arches: []string{"amd64", "arm"},
version: "1.11.4",
tools: []*tools.ToolsMetadata{
{
Release: "raring",
Version: "1.11.4",
Arch: "arm",
Size: 1950327,
Path: "tools/released/20130806/juju-1.11.4-raring-arm.tgz",
FileType: "tar.gz",
SHA256: "6472014e3255e3fe7fbd3550ef3f0a11",
},
},
}, {
series: "precise",
arches: []string{"amd64", "arm"},
major: 2,
tools: []*tools.ToolsMetadata{
{
Release: "precise",
Version: "2.0.1",
Arch: "arm",
Size: 1951096,
Path: "tools/released/20130806/juju-2.0.1-precise-arm.tgz",
FileType: "tar.gz",
SHA256: "f65a92b3b41311bdf398663ee1c5cd0c",
},
},
}, {
series: "precise",
arches: []string{"amd64", "arm"},
major: 1,
minor: 11,
tools: []*tools.ToolsMetadata{
{
Release: "precise",
Version: "1.11.4",
Arch: "arm",
Size: 1951096,
Path: "tools/released/20130806/juju-1.11.4-precise-arm.tgz",
FileType: "tar.gz",
SHA256: "f65a92b3b41311bdf398663ee1c5cd0c",
},
{
Release: "precise",
Version: "1.11.5",
Arch: "arm",
Size: 2031281,
Path: "tools/released/20130803/juju-1.11.5-precise-arm.tgz",
FileType: "tar.gz",
SHA256: "df07ac5e1fb4232d4e9aa2effa57918a",
},
},
}, {
series: "trusty",
arches: []string{"amd64"},
version: "1.16.0",
stream: "testing",
tools: []*tools.ToolsMetadata{
{
Release: "trusty",
Version: "1.16.0",
Arch: "amd64",
Size: 2973512,
Path: "tools/testing/20130806/juju-1.16.0-trusty-amd64.tgz",
FileType: "tar.gz",
SHA256: "447aeb6a934a5eaec4f703eda4ef2dac",
},
},
}}
func (s *simplestreamsSuite) TestFetch(c *gc.C) {
for i, t := range fetchTests {
c.Logf("test %d", i)
if t.stream == "" {
t.stream = "released"
}
var toolsConstraint *tools.ToolsConstraint
if t.version == "" {
toolsConstraint = tools.NewGeneralToolsConstraint(t.major, t.minor, simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{"us-east-1", "https://ec2.us-east-1.amazonaws.com"},
Series: []string{t.series},
Arches: t.arches,
Stream: t.stream,
})
} else {
toolsConstraint = tools.NewVersionedToolsConstraint(version.MustParse(t.version),
simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{"us-east-1", "https://ec2.us-east-1.amazonaws.com"},
Series: []string{t.series},
Arches: t.arches,
Stream: t.stream,
})
}
// Add invalid datasource and check later that resolveInfo is correct.
invalidSource := simplestreams.NewURLDataSource("invalid", "file://invalid", utils.VerifySSLHostnames)
tools, resolveInfo, err := tools.Fetch(
[]simplestreams.DataSource{invalidSource, s.Source}, toolsConstraint, s.RequireSigned)
if !c.Check(err, jc.ErrorIsNil) {
continue
}
for _, tm := range t.tools {
tm.FullPath, err = s.Source.URL(tm.Path)
c.Assert(err, jc.ErrorIsNil)
}
c.Check(tools, gc.DeepEquals, t.tools)
c.Check(resolveInfo, gc.DeepEquals, &simplestreams.ResolveInfo{
Source: "test",
Signed: s.RequireSigned,
IndexURL: "test:/streams/v1/index.json",
MirrorURL: "",
})
}
}
func (s *simplestreamsSuite) TestFetchNoMatchingStream(c *gc.C) {
toolsConstraint := tools.NewGeneralToolsConstraint(2, -1, simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{"us-east-1", "https://ec2.us-east-1.amazonaws.com"},
Series: []string{"precise"},
Arches: []string{},
Stream: "proposed",
})
_, _, err := tools.Fetch(
[]simplestreams.DataSource{s.Source}, toolsConstraint, s.RequireSigned)
c.Assert(err, gc.ErrorMatches, `index file missing "content-download" data not found`)
}
func (s *simplestreamsSuite) TestFetchWithMirror(c *gc.C) {
toolsConstraint := tools.NewGeneralToolsConstraint(1, 13, simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{"us-west-2", "https://ec2.us-west-2.amazonaws.com"},
Series: []string{"precise"},
Arches: []string{"amd64"},
Stream: "released",
})
toolsMetadata, resolveInfo, err := tools.Fetch(
[]simplestreams.DataSource{s.Source}, toolsConstraint, s.RequireSigned)
c.Assert(err, jc.ErrorIsNil)
c.Assert(len(toolsMetadata), gc.Equals, 1)
expectedMetadata := &tools.ToolsMetadata{
Release: "precise",
Version: "1.13.0",
Arch: "amd64",
Size: 2973595,
Path: "mirrored-path/juju-1.13.0-precise-amd64.tgz",
FullPath: "test:/mirrored-path/juju-1.13.0-precise-amd64.tgz",
FileType: "tar.gz",
SHA256: "447aeb6a934a5eaec4f703eda4ef2dde",
}
c.Assert(err, jc.ErrorIsNil)
c.Assert(toolsMetadata[0], gc.DeepEquals, expectedMetadata)
c.Assert(resolveInfo, gc.DeepEquals, &simplestreams.ResolveInfo{
Source: "test",
Signed: s.RequireSigned,
IndexURL: "test:/streams/v1/index.json",
MirrorURL: "test:/",
})
}
func assertMetadataMatches(c *gc.C, storageDir string, stream string, toolList coretools.List, metadata []*tools.ToolsMetadata) {
var expectedMetadata []*tools.ToolsMetadata = make([]*tools.ToolsMetadata, len(toolList))
for i, tool := range toolList {
expectedMetadata[i] = &tools.ToolsMetadata{
Release: tool.Version.Series,
Version: tool.Version.Number.String(),
Arch: tool.Version.Arch,
Size: tool.Size,
Path: fmt.Sprintf("%s/juju-%s.tgz", stream, tool.Version.String()),
FileType: "tar.gz",
SHA256: tool.SHA256,
}
}
c.Assert(metadata, gc.DeepEquals, expectedMetadata)
}
func (s *simplestreamsSuite) TestWriteMetadataNoFetch(c *gc.C) {
toolsList := coretools.List{
{
Version: version.MustParseBinary("1.2.3-precise-amd64"),
Size: 123,
SHA256: "abcd",
}, {
Version: version.MustParseBinary("2.0.1-raring-amd64"),
Size: 456,
SHA256: "xyz",
},
}
dir := c.MkDir()
writer, err := filestorage.NewFileStorageWriter(dir)
c.Assert(err, jc.ErrorIsNil)
err = tools.MergeAndWriteMetadata(writer, "proposed", "proposed", toolsList, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
metadata := toolstesting.ParseMetadataFromDir(c, dir, "proposed", false)
assertMetadataMatches(c, dir, "proposed", toolsList, metadata)
}
func (s *simplestreamsSuite) assertWriteMetadata(c *gc.C, withMirrors bool) {
var versionStrings = []string{
"1.2.3-precise-amd64",
"2.0.1-raring-amd64",
}
dir := c.MkDir()
toolstesting.MakeTools(c, dir, "proposed", versionStrings)
toolsList := coretools.List{
{
// If sha256/size is already known, do not recalculate
Version: version.MustParseBinary("1.2.3-precise-amd64"),
Size: 123,
SHA256: "abcd",
}, {
Version: version.MustParseBinary("2.0.1-raring-amd64"),
// The URL is not used for generating metadata.
URL: "bogus://",
},
}
writer, err := filestorage.NewFileStorageWriter(dir)
c.Assert(err, jc.ErrorIsNil)
writeMirrors := tools.DoNotWriteMirrors
if withMirrors {
writeMirrors = tools.WriteMirrors
}
err = tools.MergeAndWriteMetadata(writer, "proposed", "proposed", toolsList, writeMirrors)
c.Assert(err, jc.ErrorIsNil)
metadata := toolstesting.ParseMetadataFromDir(c, dir, "proposed", withMirrors)
assertMetadataMatches(c, dir, "proposed", toolsList, metadata)
// No release stream generated so there will not be a legacy index file created.
_, err = writer.Get("tools/streams/v1/index.json")
c.Assert(err, gc.NotNil)
}
func (s *simplestreamsSuite) TestWriteMetadata(c *gc.C) {
s.assertWriteMetadata(c, false)
}
func (s *simplestreamsSuite) TestWriteMetadataWithMirrors(c *gc.C) {
s.assertWriteMetadata(c, true)
}
func (s *simplestreamsSuite) TestWriteMetadataMergeWithExisting(c *gc.C) {
dir := c.MkDir()
existingToolsList := coretools.List{
{
Version: version.MustParseBinary("1.2.3-precise-amd64"),
Size: 123,
SHA256: "abc",
}, {
Version: version.MustParseBinary("2.0.1-raring-amd64"),
Size: 456,
SHA256: "xyz",
},
}
writer, err := filestorage.NewFileStorageWriter(dir)
c.Assert(err, jc.ErrorIsNil)
err = tools.MergeAndWriteMetadata(writer, "testing", "testing", existingToolsList, tools.WriteMirrors)
c.Assert(err, jc.ErrorIsNil)
newToolsList := coretools.List{
existingToolsList[0],
{
Version: version.MustParseBinary("2.1.0-raring-amd64"),
Size: 789,
SHA256: "def",
},
}
err = tools.MergeAndWriteMetadata(writer, "testing", "testing", newToolsList, tools.WriteMirrors)
c.Assert(err, jc.ErrorIsNil)
requiredToolsList := append(existingToolsList, newToolsList[1])
metadata := toolstesting.ParseMetadataFromDir(c, dir, "testing", true)
assertMetadataMatches(c, dir, "testing", requiredToolsList, metadata)
err = tools.MergeAndWriteMetadata(writer, "devel", "devel", newToolsList, tools.WriteMirrors)
c.Assert(err, jc.ErrorIsNil)
metadata = toolstesting.ParseMetadataFromDir(c, dir, "testing", true)
assertMetadataMatches(c, dir, "testing", requiredToolsList, metadata)
metadata = toolstesting.ParseMetadataFromDir(c, dir, "devel", true)
assertMetadataMatches(c, dir, "devel", newToolsList, metadata)
}
type productSpecSuite struct{}
var _ = gc.Suite(&productSpecSuite{})
func (s *productSpecSuite) TestIndexIdNoStream(c *gc.C) {
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64"},
})
ids := toolsConstraint.IndexIds()
c.Assert(ids, gc.HasLen, 0)
}
func (s *productSpecSuite) TestIndexId(c *gc.C) {
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64"},
Stream: "proposed",
})
ids := toolsConstraint.IndexIds()
c.Assert(ids, gc.DeepEquals, []string{"com.ubuntu.juju:proposed:tools"})
}
func (s *productSpecSuite) TestProductId(c *gc.C) {
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64"},
})
ids, err := toolsConstraint.ProductIds()
c.Assert(err, jc.ErrorIsNil)
c.Assert(ids, gc.DeepEquals, []string{"com.ubuntu.juju:12.04:amd64"})
}
func (s *productSpecSuite) TestIdMultiArch(c *gc.C) {
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.11.3"), simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64", "arm"},
})
ids, err := toolsConstraint.ProductIds()
c.Assert(err, jc.ErrorIsNil)
c.Assert(ids, gc.DeepEquals, []string{
"com.ubuntu.juju:12.04:amd64",
"com.ubuntu.juju:12.04:arm"})
}
func (s *productSpecSuite) TestIdMultiSeries(c *gc.C) {
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.11.3"), simplestreams.LookupParams{
Series: []string{"precise", "raring"},
Arches: []string{"amd64"},
Stream: "released",
})
ids, err := toolsConstraint.ProductIds()
c.Assert(err, jc.ErrorIsNil)
c.Assert(ids, gc.DeepEquals, []string{
"com.ubuntu.juju:12.04:amd64",
"com.ubuntu.juju:13.04:amd64"})
}
func (s *productSpecSuite) TestIdWithMajorVersionOnly(c *gc.C) {
toolsConstraint := tools.NewGeneralToolsConstraint(1, -1, simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64"},
Stream: "released",
})
ids, err := toolsConstraint.ProductIds()
c.Assert(err, jc.ErrorIsNil)
c.Assert(ids, gc.DeepEquals, []string{`com.ubuntu.juju:12.04:amd64`})
}
func (s *productSpecSuite) TestIdWithMajorMinorVersion(c *gc.C) {
toolsConstraint := tools.NewGeneralToolsConstraint(1, 2, simplestreams.LookupParams{
Series: []string{"precise"},
Arches: []string{"amd64"},
Stream: "released",
})
ids, err := toolsConstraint.ProductIds()
c.Assert(err, jc.ErrorIsNil)
c.Assert(ids, gc.DeepEquals, []string{`com.ubuntu.juju:12.04:amd64`})
}
func (s *productSpecSuite) TestLargeNumber(c *gc.C) {
json := `{
"updated": "Fri, 30 Aug 2013 16:12:58 +0800",
"format": "products:1.0",
"products": {
"com.ubuntu.juju:1.10.0:amd64": {
"version": "1.10.0",
"arch": "amd64",
"versions": {
"20133008": {
"items": {
"1.10.0-precise-amd64": {
"release": "precise",
"version": "1.10.0",
"arch": "amd64",
"size": 9223372036854775807,
"path": "releases/juju-1.10.0-precise-amd64.tgz",
"ftype": "tar.gz",
"sha256": ""
}
}
}
}
}
}
}`
cloudMetadata, err := simplestreams.ParseCloudMetadata([]byte(json), "products:1.0", "", tools.ToolsMetadata{})
c.Assert(err, jc.ErrorIsNil)
c.Assert(cloudMetadata.Products, gc.HasLen, 1)
product := cloudMetadata.Products["com.ubuntu.juju:1.10.0:amd64"]
c.Assert(product, gc.NotNil)
c.Assert(product.Items, gc.HasLen, 1)
version := product.Items["20133008"]
c.Assert(version, gc.NotNil)
c.Assert(version.Items, gc.HasLen, 1)
item := version.Items["1.10.0-precise-amd64"]
c.Assert(item, gc.NotNil)
c.Assert(item, gc.FitsTypeOf, &tools.ToolsMetadata{})
c.Assert(item.(*tools.ToolsMetadata).Size, gc.Equals, int64(9223372036854775807))
}
type metadataHelperSuite struct {
coretesting.BaseSuite
}
var _ = gc.Suite(&metadataHelperSuite{})
func (*metadataHelperSuite) TestMetadataFromTools(c *gc.C) {
metadata := tools.MetadataFromTools(nil, "proposed")
c.Assert(metadata, gc.HasLen, 0)
toolsList := coretools.List{{
Version: version.MustParseBinary("1.2.3-precise-amd64"),
Size: 123,
SHA256: "abc",
}, {
Version: version.MustParseBinary("2.0.1-raring-amd64"),
URL: "file:///tmp/proposed/juju-2.0.1-raring-amd64.tgz",
Size: 456,
SHA256: "xyz",
}}
metadata = tools.MetadataFromTools(toolsList, "proposed")
c.Assert(metadata, gc.HasLen, len(toolsList))
for i, t := range toolsList {
md := metadata[i]
c.Assert(md.Release, gc.Equals, t.Version.Series)
c.Assert(md.Version, gc.Equals, t.Version.Number.String())
c.Assert(md.Arch, gc.Equals, t.Version.Arch)
// FullPath is only filled out when reading tools using simplestreams.
// It's not needed elsewhere and requires a URL() call.
c.Assert(md.FullPath, gc.Equals, "")
c.Assert(md.Path, gc.Equals, tools.StorageName(t.Version, "proposed")[len("tools/"):])
c.Assert(md.FileType, gc.Equals, "tar.gz")
c.Assert(md.Size, gc.Equals, t.Size)
c.Assert(md.SHA256, gc.Equals, t.SHA256)
}
}
type countingStorage struct {
storage.StorageReader
counter int
}
func (c *countingStorage) Get(name string) (io.ReadCloser, error) {
c.counter++
return c.StorageReader.Get(name)
}
func (*metadataHelperSuite) TestResolveMetadata(c *gc.C) {
var versionStrings = []string{"1.2.3-precise-amd64"}
dir := c.MkDir()
toolstesting.MakeTools(c, dir, "released", versionStrings)
toolsList := coretools.List{{
Version: version.MustParseBinary(versionStrings[0]),
Size: 123,
SHA256: "abc",
}}
stor, err := filestorage.NewFileStorageReader(dir)
c.Assert(err, jc.ErrorIsNil)
err = tools.ResolveMetadata(stor, "released", nil)
c.Assert(err, jc.ErrorIsNil)
// We already have size/sha256, so ensure that storage isn't consulted.
countingStorage := &countingStorage{StorageReader: stor}
metadata := tools.MetadataFromTools(toolsList, "released")
err = tools.ResolveMetadata(countingStorage, "released", metadata)
c.Assert(err, jc.ErrorIsNil)
c.Assert(countingStorage.counter, gc.Equals, 0)
// Now clear size/sha256, and check that it is called, and
// the size/sha256 sum are updated.
metadata[0].Size = 0
metadata[0].SHA256 = ""
err = tools.ResolveMetadata(countingStorage, "released", metadata)
c.Assert(err, jc.ErrorIsNil)
c.Assert(countingStorage.counter, gc.Equals, 1)
c.Assert(metadata[0].Size, gc.Not(gc.Equals), 0)
c.Assert(metadata[0].SHA256, gc.Not(gc.Equals), "")
}
func (*metadataHelperSuite) TestResolveMetadataLegacyPPC64(c *gc.C) {
var versionStrings = []string{"1.2.3-precise-amd64", "1.2.3-precise-ppc64el"}
dir := c.MkDir()
toolstesting.MakeTools(c, dir, "released", versionStrings)
toolsList := coretools.List{
{
Version: version.MustParseBinary(versionStrings[0]),
}, {
Version: version.MustParseBinary(versionStrings[1]),
}, {
Version: version.MustParseBinary("1.2.3-precise-ppc64"),
},
}
toolsMetadata := tools.MetadataFromTools(toolsList, dir)
stor, err := filestorage.NewFileStorageReader(dir)
c.Assert(err, jc.ErrorIsNil)
err = tools.ResolveMetadata(stor, "released", toolsMetadata)
c.Assert(err, jc.ErrorIsNil)
c.Assert(toolsMetadata, gc.DeepEquals, []*tools.ToolsMetadata{
{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Size: 19,
FileType: "tar.gz",
SHA256: "dcdd65b962b804a3d63b108d670290ee95a867a97fe9b9f99b2b77b5c7173e59",
Path: fmt.Sprintf("%s/juju-1.2.3-precise-amd64.tgz", dir),
},
{
Release: "precise",
Version: "1.2.3",
Arch: "ppc64el",
Size: 21,
FileType: "tar.gz",
SHA256: "a3460ed45eb07a69adfcd541413a495f988c5842d715c6a40353075c3ad47af2",
Path: fmt.Sprintf("%s/juju-1.2.3-precise-ppc64el.tgz", dir),
},
{
Release: "precise",
Version: "1.2.3",
Arch: "ppc64",
Size: 21,
FileType: "tar.gz",
SHA256: "a3460ed45eb07a69adfcd541413a495f988c5842d715c6a40353075c3ad47af2",
Path: fmt.Sprintf("%s/juju-1.2.3-precise-ppc64el.tgz", dir),
},
})
}
func (*metadataHelperSuite) TestMergeMetadata(c *gc.C) {
md1 := &tools.ToolsMetadata{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}
md2 := &tools.ToolsMetadata{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path2",
}
md3 := &tools.ToolsMetadata{
Release: "raring",
Version: "1.2.3",
Arch: "amd64",
Path: "path3",
}
withSize := func(md *tools.ToolsMetadata, size int64) *tools.ToolsMetadata {
clone := *md
clone.Size = size
return &clone
}
withSHA256 := func(md *tools.ToolsMetadata, sha256 string) *tools.ToolsMetadata {
clone := *md
clone.SHA256 = sha256
return &clone
}
type mdlist []*tools.ToolsMetadata
type test struct {
name string
lhs, rhs, merged []*tools.ToolsMetadata
err string
}
tests := []test{{
name: "non-empty lhs, empty rhs",
lhs: mdlist{md1},
rhs: nil,
merged: mdlist{md1},
}, {
name: "empty lhs, non-empty rhs",
lhs: nil,
rhs: mdlist{md2},
merged: mdlist{md2},
}, {
name: "identical lhs, rhs",
lhs: mdlist{md1},
rhs: mdlist{md1},
merged: mdlist{md1},
}, {
name: "same tools in lhs and rhs, neither have size: prefer lhs",
lhs: mdlist{md1},
rhs: mdlist{md2},
merged: mdlist{md1},
}, {
name: "same tools in lhs and rhs, only lhs has a size: prefer lhs",
lhs: mdlist{withSize(md1, 123)},
rhs: mdlist{md2},
merged: mdlist{withSize(md1, 123)},
}, {
name: "same tools in lhs and rhs, only rhs has a size: prefer rhs",
lhs: mdlist{md1},
rhs: mdlist{withSize(md2, 123)},
merged: mdlist{withSize(md2, 123)},
}, {
name: "same tools in lhs and rhs, both have the same size: prefer lhs",
lhs: mdlist{withSize(md1, 123)},
rhs: mdlist{withSize(md2, 123)},
merged: mdlist{withSize(md1, 123)},
}, {
name: "same tools in lhs and rhs, both have different sizes: error",
lhs: mdlist{withSize(md1, 123)},
rhs: mdlist{withSize(md2, 456)},<|fim▁hole|> err: "metadata mismatch for 1\\.2\\.3-precise-amd64: sizes=\\(123,456\\) sha256=\\(,\\)",
}, {
name: "same tools in lhs and rhs, both have same size but different sha256: error",
lhs: mdlist{withSHA256(withSize(md1, 123), "a")},
rhs: mdlist{withSHA256(withSize(md2, 123), "b")},
err: "metadata mismatch for 1\\.2\\.3-precise-amd64: sizes=\\(123,123\\) sha256=\\(a,b\\)",
}, {
name: "lhs is a proper superset of rhs: union of lhs and rhs",
lhs: mdlist{md1, md3},
rhs: mdlist{md1},
merged: mdlist{md1, md3},
}, {
name: "rhs is a proper superset of lhs: union of lhs and rhs",
lhs: mdlist{md1},
rhs: mdlist{md1, md3},
merged: mdlist{md1, md3},
}}
for i, test := range tests {
c.Logf("test %d: %s", i, test.name)
merged, err := tools.MergeMetadata(test.lhs, test.rhs)
if test.err == "" {
c.Assert(err, jc.ErrorIsNil)
c.Assert(merged, gc.DeepEquals, test.merged)
} else {
c.Assert(err, gc.ErrorMatches, test.err)
c.Assert(merged, gc.IsNil)
}
}
}
func (*metadataHelperSuite) TestReadWriteMetadataSingleStream(c *gc.C) {
metadata := map[string][]*tools.ToolsMetadata{
"released": {{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}, {
Release: "raring",
Version: "1.2.3",
Arch: "amd64",
Path: "path2",
}},
}
stor, err := filestorage.NewFileStorageWriter(c.MkDir())
c.Assert(err, jc.ErrorIsNil)
out, err := tools.ReadAllMetadata(stor)
c.Assert(err, jc.ErrorIsNil) // non-existence is not an error
c.Assert(out, gc.HasLen, 0)
err = tools.WriteMetadata(stor, metadata, []string{"released"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
// Read back what was just written.
out, err = tools.ReadAllMetadata(stor)
for _, outMetadata := range out {
for _, md := range outMetadata {
// FullPath is set by ReadAllMetadata.
c.Assert(md.FullPath, gc.Not(gc.Equals), "")
md.FullPath = ""
}
}
c.Assert(out, jc.DeepEquals, metadata)
}
func (*metadataHelperSuite) writeMetadataMultipleStream(c *gc.C) (storage.StorageReader, map[string][]*tools.ToolsMetadata) {
metadata := map[string][]*tools.ToolsMetadata{
"released": {{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}},
"proposed": {{
Release: "raring",
Version: "1.2.3",
Arch: "amd64",
Path: "path2",
}},
}
stor, err := filestorage.NewFileStorageWriter(c.MkDir())
c.Assert(err, jc.ErrorIsNil)
out, err := tools.ReadAllMetadata(stor)
c.Assert(out, gc.HasLen, 0)
c.Assert(err, jc.ErrorIsNil) // non-existence is not an error
err = tools.WriteMetadata(stor, metadata, []string{"released", "proposed"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
return stor, metadata
}
func (s *metadataHelperSuite) TestReadWriteMetadataMultipleStream(c *gc.C) {
stor, metadata := s.writeMetadataMultipleStream(c)
// Read back what was just written.
out, err := tools.ReadAllMetadata(stor)
c.Assert(err, jc.ErrorIsNil)
for _, outMetadata := range out {
for _, md := range outMetadata {
// FullPath is set by ReadAllMetadata.
c.Assert(md.FullPath, gc.Not(gc.Equals), "")
md.FullPath = ""
}
}
c.Assert(out, jc.DeepEquals, metadata)
}
func (s *metadataHelperSuite) TestWriteMetadataLegacyIndex(c *gc.C) {
stor, _ := s.writeMetadataMultipleStream(c)
// Read back the legacy index
rdr, err := stor.Get("tools/streams/v1/index.json")
c.Assert(err, jc.ErrorIsNil)
data, err := ioutil.ReadAll(rdr)
c.Assert(err, jc.ErrorIsNil)
var indices simplestreams.Indices
err = json.Unmarshal(data, &indices)
c.Assert(err, jc.ErrorIsNil)
c.Assert(indices.Indexes, gc.HasLen, 1)
indices.Updated = ""
c.Assert(indices.Indexes["com.ubuntu.juju:released:tools"], gc.NotNil)
indices.Indexes["com.ubuntu.juju:released:tools"].Updated = ""
expected := simplestreams.Indices{
Format: "index:1.0",
Indexes: map[string]*simplestreams.IndexMetadata{
"com.ubuntu.juju:released:tools": {
Format: "products:1.0",
DataType: "content-download",
ProductsFilePath: "streams/v1/com.ubuntu.juju-released-tools.json",
ProductIds: []string{"com.ubuntu.juju:12.04:amd64"},
},
},
}
c.Assert(indices, jc.DeepEquals, expected)
}
func (s *metadataHelperSuite) TestReadWriteMetadataUnchanged(c *gc.C) {
metadata := map[string][]*tools.ToolsMetadata{
"released": {{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}, {
Release: "raring",
Version: "1.2.3",
Arch: "amd64",
Path: "path2",
}},
}
stor, err := filestorage.NewFileStorageWriter(c.MkDir())
c.Assert(err, jc.ErrorIsNil)
err = tools.WriteMetadata(stor, metadata, []string{"released"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
s.PatchValue(tools.WriteMetadataFiles, func(stor storage.Storage, metadataInfo []tools.MetadataFile) error {
// The product data is the same, we only write the indices.
c.Assert(metadataInfo, gc.HasLen, 2)
c.Assert(metadataInfo[0].Path, gc.Equals, "streams/v1/index2.json")
c.Assert(metadataInfo[1].Path, gc.Equals, "streams/v1/index.json")
return nil
})
err = tools.WriteMetadata(stor, metadata, []string{"released"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
}
func (*metadataHelperSuite) TestReadMetadataPrefersNewIndex(c *gc.C) {
if runtime.GOOS == "windows" {
c.Skip("Skipped for now because of introduced regression")
}
metadataDir := c.MkDir()
// Generate metadata and rename index to index.json
metadata := map[string][]*tools.ToolsMetadata{
"proposed": {{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}},
"released": {{
Release: "trusty",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}},
}
stor, err := filestorage.NewFileStorageWriter(metadataDir)
c.Assert(err, jc.ErrorIsNil)
err = tools.WriteMetadata(stor, metadata, []string{"proposed", "released"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
err = os.Rename(
filepath.Join(metadataDir, "tools", "streams", "v1", "index2.json"),
filepath.Join(metadataDir, "tools", "streams", "v1", "index.json"),
)
c.Assert(err, jc.ErrorIsNil)
// Generate different metadata with index2.json
metadata = map[string][]*tools.ToolsMetadata{
"released": {{
Release: "precise",
Version: "1.2.3",
Arch: "amd64",
Path: "path1",
}},
}
err = tools.WriteMetadata(stor, metadata, []string{"released"}, tools.DoNotWriteMirrors)
c.Assert(err, jc.ErrorIsNil)
// Read back all metadata, expecting to find metadata in index2.json.
out, err := tools.ReadAllMetadata(stor)
for _, outMetadata := range out {
for _, md := range outMetadata {
// FullPath is set by ReadAllMetadata.
c.Assert(md.FullPath, gc.Not(gc.Equals), "")
md.FullPath = ""
}
}
c.Assert(out, jc.DeepEquals, metadata)
}
type signedSuite struct {
origKey string
}
var testRoundTripper *jujutest.ProxyRoundTripper
func init() {
testRoundTripper = &jujutest.ProxyRoundTripper{}
testRoundTripper.RegisterForScheme("signedtest")
}
func (s *signedSuite) SetUpSuite(c *gc.C) {
var imageData = map[string]string{
"/unsigned/streams/v1/index.json": unsignedIndex,
"/unsigned/streams/v1/tools_metadata.json": unsignedProduct,
}
// Set up some signed data from the unsigned data.
// Overwrite the product path to use the sjson suffix.
rawUnsignedIndex := strings.Replace(
unsignedIndex, "streams/v1/tools_metadata.json", "streams/v1/tools_metadata.sjson", -1)
r := bytes.NewReader([]byte(rawUnsignedIndex))
signedData, err := simplestreams.Encode(
r, sstesting.SignedMetadataPrivateKey, sstesting.PrivateKeyPassphrase)
c.Assert(err, jc.ErrorIsNil)
imageData["/signed/streams/v1/index.sjson"] = string(signedData)
// Replace the tools path in the unsigned data with a different one so we can test that the right
// tools path is used.
rawUnsignedProduct := strings.Replace(
unsignedProduct, "juju-1.13.0", "juju-1.13.1", -1)
r = bytes.NewReader([]byte(rawUnsignedProduct))
signedData, err = simplestreams.Encode(
r, sstesting.SignedMetadataPrivateKey, sstesting.PrivateKeyPassphrase)
c.Assert(err, jc.ErrorIsNil)
imageData["/signed/streams/v1/tools_metadata.sjson"] = string(signedData)
testRoundTripper.Sub = jujutest.NewCannedRoundTripper(
imageData, map[string]int{"signedtest://unauth": http.StatusUnauthorized})
s.origKey = tools.SetSigningPublicKey(sstesting.SignedMetadataPublicKey)
}
func (s *signedSuite) TearDownSuite(c *gc.C) {
testRoundTripper.Sub = nil
tools.SetSigningPublicKey(s.origKey)
}
func (s *signedSuite) TestSignedToolsMetadata(c *gc.C) {
signedSource := simplestreams.NewURLDataSource("test", "signedtest://host/signed", utils.VerifySSLHostnames)
toolsConstraint := tools.NewVersionedToolsConstraint(version.MustParse("1.13.0"), simplestreams.LookupParams{
CloudSpec: simplestreams.CloudSpec{"us-east-1", "https://ec2.us-east-1.amazonaws.com"},
Series: []string{"precise"},
Arches: []string{"amd64"},
Stream: "released",
})
toolsMetadata, resolveInfo, err := tools.Fetch(
[]simplestreams.DataSource{signedSource}, toolsConstraint, true)
c.Assert(err, jc.ErrorIsNil)
c.Assert(len(toolsMetadata), gc.Equals, 1)
c.Assert(toolsMetadata[0].Path, gc.Equals, "tools/releases/20130806/juju-1.13.1-precise-amd64.tgz")
c.Assert(resolveInfo, gc.DeepEquals, &simplestreams.ResolveInfo{
Source: "test",
Signed: true,
IndexURL: "signedtest://host/signed/streams/v1/index.sjson",
MirrorURL: "",
})
}
var unsignedIndex = `
{
"index": {
"com.ubuntu.juju:released:tools": {
"updated": "Mon, 05 Aug 2013 11:07:04 +0000",
"datatype": "content-download",
"format": "products:1.0",
"products": [
"com.ubuntu.juju:12.04:amd64"
],
"path": "streams/v1/tools_metadata.json"
}
},
"updated": "Wed, 01 May 2013 13:31:26 +0000",
"format": "index:1.0"
}
`
var unsignedProduct = `
{
"updated": "Wed, 01 May 2013 13:31:26 +0000",
"content_id": "com.ubuntu.cloud:released:aws",
"datatype": "content-download",
"products": {
"com.ubuntu.juju:12.04:amd64": {
"arch": "amd64",
"release": "precise",
"versions": {
"20130806": {
"items": {
"1130preciseamd64": {
"version": "1.13.0",
"size": 2973595,
"path": "tools/releases/20130806/juju-1.13.0-precise-amd64.tgz",
"ftype": "tar.gz",
"sha256": "447aeb6a934a5eaec4f703eda4ef2dde"
}
}
}
}
}
},
"format": "products:1.0"
}
`<|fim▁end|>
| |
<|file_name|>test_count_minimal_sketch_counter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import logging
import unittest
import os
import sys
import random
import itertools
from gensim.models.phrases import Phrases
if sys.version_info[0] >= 3:
unicode = str
module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder
datapath = lambda fname: os.path.join(module_path, 'test_data', fname)
WORDS = ['PHRASE%i' % i for i in range(10)] # selected words for phrases
class TestPhrasesModel(unittest.TestCase):
@staticmethod
def get_word():
"""Generate random word from letters A-Z."""
word_len = random.randint(1, 12)
return ''.join(chr(random.randint(65, 80)) for i in range(word_len))
@staticmethod
def get_sentence(size=10000):
"""Generator for random sentences.
10% probability to return sentence containing only preselected words"""
for i in range(size):
if random.random() > 0.9:
yield [WORDS[random.randint(0, len(WORDS) -1)] for i in range(random.randint(2, 10))] + ["."]
else:
yield [TestPhrasesModel.get_word() for i in range(random.randint(2, 10))] + ["."]
def testUpdate(self):
"""Test adding one token.
"""
special_token = 'non_present_token'
phrases = Phrases(TestPhrasesModel.get_sentence(), min_count=1)
present = special_token in phrases.vocab
freq = phrases.vocab[special_token]
phrases.add_vocab([[special_token]])
freq_after_change = phrases.vocab[special_token]
present_after_change = special_token in phrases.vocab
self.assertEqual(present, False, msg="Non-present token is marked as present.")
self.assertEqual(present_after_change, True, msg="Present token is marked as non-present.")
self.assertEqual(freq, 0, msg="Predicted non-zero freq for non-present token.")
self.assertEqual(freq_after_change, 1, msg="Predicted non 1 freq for token inserted once.")
def testFreqCount(self):
"""Test adding one token.
"""
special_token = 'non_present_token'
phrases = Phrases(None, min_count=1)
current = iter([])
for i in range(100):<|fim▁hole|> phrases.add_vocab(current)
freq = phrases.vocab[special_token]
self.assertTrue(freq >= 100)
current = iter([])
for i in range(100):
current = itertools.chain(current, iter([[special_token]]), TestPhrasesModel.get_sentence(i))
phrases.add_vocab(current)
freq = phrases.vocab[special_token]
self.assertTrue(freq >= 200)
#endclass TestPhrasesModel
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()<|fim▁end|>
|
current = itertools.chain(current, iter([[special_token]]), TestPhrasesModel.get_sentence(i))
|
<|file_name|>panel.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
// The panel module currently supports only Firefox.
// See: https://bugzilla.mozilla.org/show_bug.cgi?id=jetpack-panel-apps
module.metadata = {
"stability": "stable",
"engines": {
"Firefox": "*"
}
};
const { Ci } = require("chrome");
const { Class } = require("sdk/core/heritage");
const { merge } = require("sdk/util/object");
const { WorkerHost } = require("sdk/content/utils");
const { Worker } = require("sdk/deprecated/sync-worker");
const { Disposable } = require("sdk/core/disposable");
const { WeakReference } = require('sdk/core/reference');
const { contract: loaderContract } = require("sdk/content/loader");
const { contract } = require("sdk/util/contract");
const { on, off, emit, setListeners } = require("sdk/event/core");
const { EventTarget } = require("sdk/event/target");
const domPanel = require("./panel/utils");
const { events } = require("./panel/events");
const systemEvents = require("sdk/system/events");
const { filter, pipe, stripListeners } = require("sdk/event/utils");
const { getNodeView, getActiveView } = require("sdk/view/core");
const { isNil, isObject, isNumber } = require("sdk/lang/type");
const { getAttachEventType } = require("sdk/content/utils");
const { number, boolean, object } = require('sdk/deprecated/api-utils');
const { Style } = require("sdk/stylesheet/style");
const { attach, detach } = require("sdk/content/mod");
let isRect = ({top, right, bottom, left}) => [top, right, bottom, left].
some(value => isNumber(value) && !isNaN(value));
let isSDKObj = obj => obj instanceof Class;
let rectContract = contract({
top: number,
right: number,
bottom: number,
left: number
});
let position = {
is: object,
map: v => (isNil(v) || isSDKObj(v) || !isObject(v)) ? v : rectContract(v),
ok: v => isNil(v) || isSDKObj(v) || (isObject(v) && isRect(v)),
msg: 'The option "position" must be a SDK object registered as anchor; ' +
'or an object with one or more of the following keys set to numeric ' +
'values: top, right, bottom, left.'
}
let displayContract = contract({
width: number,
height: number,
focus: boolean,
autohide: boolean,
position: position,
opacity: number
});
let panelContract = contract(merge({
// contentStyle* / contentScript* are sharing the same validation constraints,
// so they can be mostly reused, except for the messages.
contentStyle: merge(Object.create(loaderContract.rules.contentScript), {
msg: 'The `contentStyle` option must be a string or an array of strings.'
}),
contentStyleFile: merge(Object.create(loaderContract.rules.contentScriptFile), {
msg: 'The `contentStyleFile` option must be a local URL or an array of URLs'
})
}, displayContract.rules, loaderContract.rules));
function isDisposed(panel) {
return !views.has(panel)
}
let panels = new WeakMap();
let models = new WeakMap();
let views = new WeakMap();
let workers = new WeakMap();
let styles = new WeakMap();
const viewFor = (panel) => views.get(panel);
const modelFor = (panel) => models.get(panel);
const panelFor = (view) => panels.get(view);
const workerFor = (panel) => workers.get(panel);
const styleFor = (panel) => styles.get(panel);
// Utility function takes `panel` instance and makes sure it will be
// automatically hidden as soon as other panel is shown.
let setupAutoHide = new function() {
let refs = new WeakMap();
return function setupAutoHide(panel) {
// Create system event listener that reacts to any panel showing and
// hides given `panel` if it's not the one being shown.
function listener({subject}) {
// It could be that listener is not GC-ed in the same cycle as
// panel in such case we remove listener manually.
let view = viewFor(panel);
if (!view) systemEvents.off("popupshowing", listener);
else if (subject !== view) panel.hide();
}
// system event listener is intentionally weak this way we'll allow GC
// to claim panel if it's no longer referenced by an add-on code. This also
// helps minimizing cleanup required on unload.
systemEvents.on("popupshowing", listener);
// To make sure listener is not claimed by GC earlier than necessary we
// associate it with `panel` it's associated with. This way it won't be
// GC-ed earlier than `panel` itself.
refs.set(panel, listener);
}
}
const Panel = Class({
implements: [
// Generate accessors for the validated properties that update model on
// set and return values from model on get.
panelContract.properties(modelFor),
EventTarget,
Disposable,
WeakReference
],
extends: WorkerHost(workerFor),
setup: function setup(options) {
let model = merge({
defaultWidth: 320,
defaultHeight: 220,
focus: true,
position: Object.freeze({}),
}, panelContract(options));
models.set(this, model);
if (model.contentStyle || model.contentStyleFile) {
styles.set(this, Style({
uri: model.contentStyleFile,
source: model.contentStyle
}));
}
// Setup view
let view = domPanel.make();
panels.set(view, this);
views.set(this, view);
// Load panel content.
domPanel.setURL(view, model.contentURL);
setupAutoHide(this);
// Setup listeners.
setListeners(this, options);
let worker = new Worker(stripListeners(options));
workers.set(this, worker);
// pipe events from worker to a panel.
pipe(worker, this);
},
dispose: function dispose() {
this.hide();
off(this);
workerFor(this).destroy();
detach(styleFor(this));
domPanel.dispose(viewFor(this));
// Release circular reference between view and panel instance. This
// way view will be GC-ed. And panel as well once all the other refs
// will be removed from it.
views.delete(this);
},
/* Public API: Panel.width */
get width() {
return modelFor(this).width;
},
set width(value) {
return this.resize(value, this.height);
},
/* Public API: Panel.height */
get height() {
return modelFor(this).height;
},
set height(value) {
return this.resize(this.width, value);
},
/* Public API: Panel.focus */
get focus() {
return modelFor(this).focus;
},
/* Public API: Panel.position */
get position() {
return modelFor(this).position;
},
get contentURL() {
modelFor(this).contentURL;
},
set contentURL(value) {
let model = modelFor(this);
model.contentURL = panelContract({ contentURL: value }).contentURL;
domPanel.setURL(viewFor(this), model.contentURL);
// Detach worker so that messages send will be queued until it's
// reatached once panel content is ready.
workerFor(this).detach();
},
/* Public API: Panel.isShowing */
get isShowing() {
return !isDisposed(this) && domPanel.isOpen(viewFor(this));
},
/* Public API: Panel.show */
show: function show(options={}, anchor) {
if (options instanceof Ci.nsIDOMElement) {
[anchor, options] = [options, null];
}<|fim▁hole|> "Passing a DOM node to Panel.show() method is an unsupported " +
"feature that will be soon replaced. " +
"See: https://bugzilla.mozilla.org/show_bug.cgi?id=878877"
);
}
let model = modelFor(this);
let view = viewFor(this);
let anchorView = getNodeView(anchor || options.position || model.position);
options = merge({
position: model.position,
width: model.width,
height: model.height,
defaultWidth: model.defaultWidth,
defaultHeight: model.defaultHeight,
focus: model.focus,
autohide: model.autohide,
opacity: model.opacity
}, displayContract(options));
if (!isDisposed(this))
domPanel.show(view, options, anchorView);
return this;
},
/* Public API: Panel.hide */
hide: function hide() {
// Quit immediately if panel is disposed or there is no state change.
domPanel.close(viewFor(this));
return this;
},
/* Public API: Panel.resize */
resize: function resize(width, height) {
let model = modelFor(this);
let view = viewFor(this);
let change = panelContract({
width: width || model.width || model.defaultWidth,
height: height || model.height || model.defaultHeight
});
model.width = change.width;
model.height = change.height;
domPanel.resize(view, model.width, model.height);
return this;
},
fadeOut: function fadeOut() {
let view = viewFor(this);
domPanel.fadeOut(view);
}
});
exports.Panel = Panel;
// Note must be defined only after value to `Panel` is assigned.
getActiveView.define(Panel, viewFor);
// Filter panel events to only panels that are create by this module.
let panelEvents = filter(events, ({target}) => panelFor(target));
// Panel events emitted after panel has being shown.
let shows = filter(panelEvents, ({type}) => type === "popupshown");
// Panel events emitted after panel became hidden.
let hides = filter(panelEvents, ({type}) => type === "popuphidden");
// Panel events emitted after content inside panel is ready. For different
// panels ready may mean different state based on `contentScriptWhen` attribute.
// Weather given event represents readyness is detected by `getAttachEventType`
// helper function.
let ready = filter(panelEvents, ({type, target}) =>
getAttachEventType(modelFor(panelFor(target))) === type);
// Panel event emitted when the contents of the panel has been loaded.
let readyToShow = filter(panelEvents, ({type}) => type === "DOMContentLoaded");
// Styles should be always added as soon as possible, and doesn't makes them
// depends on `contentScriptWhen`
let start = filter(panelEvents, ({type}) => type === "document-element-inserted");
// Forward panel show / hide events to panel's own event listeners.
on(shows, "data", ({target}) => {
let panel = panelFor(target);
if (modelFor(panel).ready)
emit(panel, "show");
});
on(hides, "data", ({target}) => {
let panel = panelFor(target);
if (modelFor(panel).ready)
emit(panel, "hide");
});
on(ready, "data", ({target}) => {
let panel = panelFor(target);
let window = domPanel.getContentDocument(target).defaultView;
workerFor(panel).attach(window);
});
on(readyToShow, "data", ({target}) => {
let panel = panelFor(target);
if (!modelFor(panel).ready) {
modelFor(panel).ready = true;
if (viewFor(panel).state == "open")
emit(panel, "show");
}
});
on(start, "data", ({target}) => {
let panel = panelFor(target);
let window = domPanel.getContentDocument(target).defaultView;
attach(styleFor(panel), window);
});<|fim▁end|>
|
if (anchor instanceof Ci.nsIDOMElement) {
console.warn(
|
<|file_name|>api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# RERO ILS
# Copyright (C) 2019 RERO
# Copyright (C) 2020 UCLouvain
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""API for manipulating organisation."""
from functools import partial
from elasticsearch.exceptions import NotFoundError
from .models import OrganisationIdentifier, OrganisationMetadata
from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch
from ..fetchers import id_fetcher
from ..item_types.api import ItemTypesSearch
from ..libraries.api import LibrariesSearch, Library
from ..minters import id_minter
from ..providers import Provider
from ..utils import sorted_pids
from ..vendors.api import Vendor, VendorsSearch
# provider
OrganisationProvider = type(
'OrganisationProvider',
(Provider,),
dict(identifier=OrganisationIdentifier, pid_type='org')
)
# minter
organisation_id_minter = partial(id_minter, provider=OrganisationProvider)
# fetcher
organisation_id_fetcher = partial(id_fetcher, provider=OrganisationProvider)
class OrganisationsSearch(IlsRecordsSearch):
"""Organisation search."""
class Meta:
"""Meta class."""
index = 'organisations'
doc_types = None
fields = ('*', )
facets = {}
default_filter = None
def get_record_by_viewcode(self, viewcode, fields=None):
"""Search by viewcode."""
query = self.filter('term', code=viewcode).extra(size=1)
if fields:
query = query.source(includes=fields)
response = query.execute()
if response.hits.total.value != 1:
raise NotFoundError(
f'Organisation viewcode {viewcode}: Result not found.')
return response.hits.hits[0]._source
<|fim▁hole|>class Organisation(IlsRecord):
"""Organisation class."""
minter = organisation_id_minter
fetcher = organisation_id_fetcher
provider = OrganisationProvider
model_cls = OrganisationMetadata
@classmethod
def get_all(cls):
"""Get all organisations."""
return sorted([
Organisation.get_record_by_id(_id)
for _id in Organisation.get_all_ids()
], key=lambda org: org.get('name'))
@classmethod
def all_code(cls):
"""Get all code."""
return [org.get('code') for org in cls.get_all()]
@classmethod
def get_record_by_viewcode(cls, viewcode):
"""Get record by view code."""
result = OrganisationsSearch().filter(
'term',
code=viewcode
).execute()
if result['hits']['total']['value'] != 1:
raise Exception(
'Organisation (get_record_by_viewcode): Result not found.')
return result['hits']['hits'][0]['_source']
@classmethod
def get_record_by_online_harvested_source(cls, source):
"""Get record by online harvested source.
:param source: the record source
:return: Organisation record or None.
"""
results = OrganisationsSearch().filter(
'term', online_harvested_source=source).scan()
try:
return Organisation.get_record_by_pid(next(results).pid)
except StopIteration:
return None
@property
def organisation_pid(self):
"""Get organisation pid ."""
return self.pid
def online_circulation_category(self):
"""Get the default circulation category for online resources."""
results = ItemTypesSearch().filter(
'term', organisation__pid=self.pid).filter(
'term', type='online').source(['pid']).scan()
try:
return next(results).pid
except StopIteration:
return None
def get_online_locations(self):
"""Get list of online locations."""
return [library.online_location
for library in self.get_libraries() if library.online_location]
def get_libraries_pids(self):
"""Get all libraries pids related to the organisation."""
results = LibrariesSearch().source(['pid'])\
.filter('term', organisation__pid=self.pid)\
.scan()
for result in results:
yield result.pid
def get_libraries(self):
"""Get all libraries related to the organisation."""
pids = self.get_libraries_pids()
for pid in pids:
yield Library.get_record_by_pid(pid)
def get_vendor_pids(self):
"""Get all vendor pids related to the organisation."""
results = VendorsSearch().source(['pid'])\
.filter('term', organisation__pid=self.pid)\
.scan()
for result in results:
yield result.pid
def get_vendors(self):
"""Get all vendors related to the organisation."""
pids = self.get_vendor_pids()
for pid in pids:
yield Vendor.get_record_by_pid(pid)
def get_links_to_me(self, get_pids=False):
"""Record links.
:param get_pids: if True list of linked pids
if False count of linked records
"""
from ..acq_receipts.api import AcqReceiptsSearch
library_query = LibrariesSearch()\
.filter('term', organisation__pid=self.pid)
receipt_query = AcqReceiptsSearch() \
.filter('term', organisation__pid=self.pid)
links = {}
if get_pids:
libraries = sorted_pids(library_query)
receipts = sorted_pids(receipt_query)
else:
libraries = library_query.count()
receipts = receipt_query.count()
if libraries:
links['libraries'] = libraries
if receipts:
links['acq_receipts'] = receipts
return links
def reasons_not_to_delete(self):
"""Get reasons not to delete record."""
cannot_delete = {}
links = self.get_links_to_me()
if links:
cannot_delete['links'] = links
return cannot_delete
def is_test_organisation(self):
"""Check if this is a test organisation."""
if self.get('code') == 'cypress':
return True
return False
class OrganisationsIndexer(IlsRecordsIndexer):
"""Holdings indexing class."""
record_cls = Organisation
def bulk_index(self, record_id_iterator):
"""Bulk index records.
:param record_id_iterator: Iterator yielding record UUIDs.
"""
super().bulk_index(record_id_iterator, doc_type='org')<|fim▁end|>
| |
<|file_name|>capture_source_list_dshow.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 The WebM project authors. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.<|fim▁hole|>
#include "glog/logging.h"
#include "encoder/win/media_source_dshow.h"
#include "encoder/win/string_util_win.h"
class AutoComInit {
public:
AutoComInit() { CoInitialize(NULL); }
~AutoComInit() { CoUninitialize(); }
};
namespace webmlive {
std::string GetAudioSourceList() {
AutoComInit com_init;
CaptureSourceLoader loader;
const int status = loader.Init(CLSID_AudioInputDeviceCategory);
if (status) {
LOG(ERROR) << "no video source!";
return "";
}
std::ostringstream aud_list;
for (int i = 0; i < loader.GetNumSources(); ++i) {
const std::string dev_name =
WStringToString(loader.GetSourceName(i).c_str());
LOG(INFO) << "adev" << i << ": " << dev_name;
aud_list << i << ": " << dev_name << "\n";
}
return aud_list.str();
}
std::string GetVideoSourceList() {
AutoComInit com_init;
CaptureSourceLoader loader;
const int status = loader.Init(CLSID_VideoInputDeviceCategory);
if (status) {
LOG(ERROR) << "no video source!";
return "";
}
std::ostringstream vid_list;
for (int i = 0; i < loader.GetNumSources(); ++i) {
const std::string dev_name =
WStringToString(loader.GetSourceName(i).c_str());
LOG(INFO) << "vdev" << i << ": " << dev_name;
vid_list << i << ": " << dev_name << "\n";
}
return vid_list.str();
}
} // namespace webmlive<|fim▁end|>
|
#include "encoder/capture_source_list.h"
#include <sstream>
|
<|file_name|>facebox_test.go<|end_file_name|><|fim▁begin|>package facebox_test
import (
"io"
"net/http"
"net/http/httptest"
"testing"
"github.com/machinebox/sdk-go/facebox"
"github.com/matryer/is"
)
func TestInfo(t *testing.T) {
is := is.New(t)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
is.Equal(r.Method, "GET")
is.Equal(r.URL.Path, "/info")
is.Equal(r.Header.Get("Accept"), "application/json; charset=utf-8")
io.WriteString(w, `{
"name": "facebox",
"version": 1,
"build": "abcdefg",
"status": "ready"
}`)
}))
defer srv.Close()
fb := facebox.New(srv.URL)
info, err := fb.Info()
is.NoErr(err)
is.Equal(info.Name, "facebox")
is.Equal(info.Version, 1)
is.Equal(info.Build, "abcdefg")
is.Equal(info.Status, "ready")<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>telnet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import telnetlib
import time
import sys
import socket
TEL_PORT = 23
TEL_TO = 3
def write_cmd(cmd, conn):
cmd = cmd.rstrip()
conn.write(cmd + '\n')
time.sleep(1)
return conn.read_very_eager()
def telnet_conn(ip, port, timeout):
try:
conn = telnetlib.Telnet(ip, port, timeout)
except socket.timeout:
sys.exit("connection timed out")
return conn
def login(user, passwd, conn):
output = conn.read_until("sername:", TEL_TO)
conn.write(user + '\n')
output += conn.read_until("assword:", TEL_TO)
conn.write(passwd + '\n')
return output
def main():
ip = '50.76.53.27'
user = 'pyclass'
passwd = '88newclass'
conn = telnet_conn(ip, TEL_PORT, TEL_TO)
login(user, passwd, conn)
hostname = write_cmd('show run | i hostname', conn)<|fim▁hole|> write_cmd('terminal length 0', conn)
out = write_cmd('show ver ', conn)
print out.rstrip('\n' + hostname + '#')
conn.close()
if __name__ == "__main__":
main()<|fim▁end|>
|
hostname.lstrip('hostname ')
|
<|file_name|>test_pool3d_op.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from __future__ import division
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
def adaptive_start_index(index, input_size, output_size):
return int(np.floor(index * input_size / output_size))
def adaptive_end_index(index, input_size, output_size):
return int(np.ceil((index + 1) * input_size / output_size))
def max_pool3D_forward_naive(x,
ksize,
strides,
paddings,
global_pool=0,
ceil_mode=False,
exclusive=True,
adaptive=False):
N, C, D, H, W = x.shape
if global_pool == 1:
ksize = [D, H, W]
if adaptive:
D_out, H_out, W_out = ksize
else:
D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1
) // strides[0] + 1 if ceil_mode else (
H - ksize[0] + 2 * paddings[0]) // strides[0] + 1
H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1
) // strides[1] + 1 if ceil_mode else (
W - ksize[1] + 2 * paddings[1]) // strides[1] + 1
W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1
) // strides[2] + 1 if ceil_mode else (
W - ksize[2] + 2 * paddings[2]) // strides[2] + 1
out = np.zeros((N, C, D_out, H_out, W_out))
for k in range(D_out):
if adaptive:
d_start = adaptive_start_index(k, D, ksize[0])
d_end = adaptive_end_index(k, D, ksize[0])
else:
d_start = np.max((k * strides[0] - paddings[0], 0))
d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D))
for i in range(H_out):
if adaptive:
h_start = adaptive_start_index(i, H, ksize[1])
h_end = adaptive_end_index(i, H, ksize[1])
else:
h_start = np.max((i * strides[1] - paddings[1], 0))
h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H))
for j in range(W_out):
if adaptive:
w_start = adaptive_start_index(j, W, ksize[2])
w_end = adaptive_end_index(j, W, ksize[2])
else:
w_start = np.max((j * strides[2] - paddings[2], 0))
w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W))
x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end]
out[:, :, k, i, j] = np.max(x_masked, axis=(2, 3, 4))
return out
def avg_pool3D_forward_naive(x,
ksize,
strides,
paddings,
global_pool=0,
ceil_mode=False,
exclusive=True,
adaptive=False):
N, C, D, H, W = x.shape
if global_pool == 1:
ksize = [D, H, W]
if adaptive:
D_out, H_out, W_out = ksize
else:
D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1
) // strides[0] + 1 if ceil_mode else (
H - ksize[0] + 2 * paddings[0]) // strides[0] + 1
H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1
) // strides[1] + 1 if ceil_mode else (
W - ksize[1] + 2 * paddings[1]) // strides[1] + 1
W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1
) // strides[2] + 1 if ceil_mode else (
W - ksize[2] + 2 * paddings[2]) // strides[2] + 1
out = np.zeros((N, C, D_out, H_out, W_out))
for k in range(D_out):
if adaptive:
d_start = adaptive_start_index(k, D, ksize[0])
d_end = adaptive_end_index(k, D, ksize[0])
else:
d_start = np.max((k * strides[0] - paddings[0], 0))
d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D))
for i in range(H_out):
if adaptive:
h_start = adaptive_start_index(i, H, ksize[1])
h_end = adaptive_end_index(i, H, ksize[1])
else:
h_start = np.max((i * strides[1] - paddings[1], 0))
h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H))
for j in range(W_out):
if adaptive:
w_start = adaptive_start_index(j, W, ksize[2])
w_end = adaptive_end_index(j, W, ksize[2])
else:
w_start = np.max((j * strides[2] - paddings[2], 0))
w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W))
x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end]
field_size = (d_end - d_start) * (h_end - h_start) * (w_end - w_start) \
if (exclusive or adaptive) else ksize[0] * ksize[1] * ksize[2]
out[:, :, k, i, j] = np.sum(x_masked, axis=(2, 3,
4)) / field_size
return out
class TestPool3d_Op(OpTest):
def setUp(self):
self.op_type = "pool3d"
self.use_cudnn = False
self.dtype = np.float32
self.init_test_case()
self.init_global_pool()
self.init_kernel_type()
self.init_pool_type()
self.init_ceil_mode()
self.init_exclusive()
self.init_adaptive()
if self.global_pool:
self.paddings = [0 for _ in range(len(self.paddings))]
input = np.random.random(self.shape).astype(self.dtype)
output = self.pool3D_forward_naive(
input, self.ksize, self.strides, self.paddings, self.global_pool,
self.ceil_mode, self.exclusive, self.adaptive).astype(self.dtype)
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(input)}
self.attrs = {
'strides': self.strides,
'paddings': self.paddings,
'ksize': self.ksize,
'pooling_type': self.pool_type,
'global_pooling': self.global_pool,
'use_cudnn': self.use_cudnn,
'ceil_mode': self.ceil_mode,
'data_format':
'AnyLayout', # TODO(dzhwinter) : should be fix latter
'exclusive': self.exclusive,
'adaptive': self.adaptive
}
self.outputs = {'Out': output}
def testcudnn(self):
return core.is_compiled_with_cuda() and self.use_cudnn
def test_check_output(self):
if self.testcudnn():
place = core.CUDAPlace(0)
self.check_output_with_place(place, atol=1e-5)
else:
self.check_output()
def test_check_grad(self):
if self.dtype == np.float16:
return
if self.testcudnn() and self.pool_type != "max":
place = core.CUDAPlace(0)
self.check_grad_with_place(
place, set(['X']), 'Out', max_relative_error=0.07)
elif self.pool_type != "max":
self.check_grad(set(['X']), 'Out', max_relative_error=0.07)
def init_test_case(self):
self.shape = [2, 3, 5, 5, 5]
self.ksize = [3, 3, 3]
self.strides = [1, 1, 1]
self.paddings = [0, 0, 0]
def init_kernel_type(self):
pass
def init_pool_type(self):
self.pool_type = "avg"
self.pool3D_forward_naive = avg_pool3D_forward_naive
def init_global_pool(self):
self.global_pool = True
def init_ceil_mode(self):
self.ceil_mode = False
def init_exclusive(self):
self.exclusive = True
def init_adaptive(self):
self.adaptive = False
class TestCase1(TestPool3d_Op):
def init_test_case(self):
self.shape = [2, 3, 7, 7, 7]
self.ksize = [3, 3, 3]
self.strides = [1, 1, 1]
self.paddings = [0, 0, 0]
def init_pool_type(self):
self.pool_type = "avg"
self.pool3D_forward_naive = avg_pool3D_forward_naive
def init_global_pool(self):
self.global_pool = False
class TestCase2(TestPool3d_Op):
def init_test_case(self):
self.shape = [2, 3, 7, 7, 7]
self.ksize = [3, 3, 3]
self.strides = [1, 1, 1]
self.paddings = [1, 1, 1]
def init_pool_type(self):
self.pool_type = "avg"
self.pool3D_forward_naive = avg_pool3D_forward_naive
def init_global_pool(self):
self.global_pool = False
class TestCase3(TestPool3d_Op):
def init_pool_type(self):
self.pool_type = "max"
self.pool3D_forward_naive = max_pool3D_forward_naive
class TestCase4(TestCase1):
def init_pool_type(self):
self.pool_type = "max"
self.pool3D_forward_naive = max_pool3D_forward_naive
class TestCase5(TestCase2):
def init_pool_type(self):
self.pool_type = "max"
self.pool3D_forward_naive = max_pool3D_forward_naive
#--------------------test pool3d--------------------
class TestCUDNNCase1(TestPool3d_Op):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase1(TestPool3d_Op):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestCUDNNCase2(TestCase1):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase2(TestCase1):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestCUDNNCase3(TestCase2):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase3(TestCase2):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestCUDNNCase4(TestCase3):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase4(TestCase3):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestCUDNNCase5(TestCase4):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase5(TestCase4):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestCUDNNCase6(TestCase5):
def init_kernel_type(self):
self.use_cudnn = True
class TestFP16CUDNNCase6(TestCase5):
def init_kernel_type(self):
self.use_cudnn = True
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
<|fim▁hole|>class TestCeilModeCase1(TestCUDNNCase1):
def init_ceil_mode(self):
self.ceil_mode = True
class TestCeilModeCase2(TestCUDNNCase2):
def init_ceil_mode(self):
self.ceil_mode = True
class TestCeilModeCase3(TestCase1):
def init_ceil_mode(self):
self.ceil_mode = True
class TestCeilModeCase4(TestCase2):
def init_ceil_mode(self):
self.ceil_mode = True
class TestAvgInclude(TestCase2):
def init_exclusive(self):
self.exclusive = False
class TestCUDNNAvgInclude(TestCUDNNCase3):
def init_exclusive(self):
self.exclusive = False
class TestAvgPoolAdaptive(TestCase1):
def init_adaptive(self):
self.adaptive = True
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>LinkKeySerializer.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|> * limitations under the License.
*/
package org.onosproject.store.serializers;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.LinkKey;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
/**
* Kryo Serializer for {@link LinkKey}.
*/
public class LinkKeySerializer extends Serializer<LinkKey> {
/**
* Creates {@link LinkKey} serializer instance.
*/
public LinkKeySerializer() {
// non-null, immutable
super(false, true);
}
@Override
public void write(Kryo kryo, Output output, LinkKey object) {
kryo.writeClassAndObject(output, object.src());
kryo.writeClassAndObject(output, object.dst());
}
@Override
public LinkKey read(Kryo kryo, Input input, Class<LinkKey> type) {
ConnectPoint src = (ConnectPoint) kryo.readClassAndObject(input);
ConnectPoint dst = (ConnectPoint) kryo.readClassAndObject(input);
return LinkKey.linkKey(src, dst);
}
}<|fim▁end|>
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
|
<|file_name|>controllers.py<|end_file_name|><|fim▁begin|>from application import CONFIG, app
from .models import *
from flask import current_app, session
from flask.ext.login import login_user, logout_user, current_user
from flask.ext.principal import Principal, Identity, AnonymousIdentity, identity_changed, identity_loaded, RoleNeed
import bcrypt
import re
import sendgrid
import time
from itsdangerous import URLSafeTimedSerializer
AuthenticationError = Exception("AuthenticationError", "Invalid credentials.")
UserExistsError = Exception("UserExistsError", "Email already exists in database.")
UserDoesNotExistError = Exception("UserDoesNotExistError", "Account with given email does not exist.")
login_manager = LoginManager()
login_manager.init_app(app)
principals = Principal(app)
sg = sendgrid.SendGridClient(CONFIG["SENDGRID_API_KEY"])
ts = URLSafeTimedSerializer(CONFIG["SECRET_KEY"])
@login_manager.user_loader
def load_user(user_id):
user_entries = StaffUserEntry.objects(id = user_id)
if user_entries.count() != 1:
return None
currUser = user_entries[0]
user = User(currUser.id, currUser.email, currUser.firstname, currUser.lastname, currUser.roles)
return user
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
identity.user = current_user
if hasattr(current_user, 'roles'):
for role in current_user.roles:
identity.provides.add(RoleNeed(role))
def get_user(email):
entries = StaffUserEntry.objects(email = email)
if entries.count() == 1:
return entries[0]<|fim▁hole|> currUser = get_user(email)
if currUser is None:
return None
hashed = currUser.hashed
if bcrypt.hashpw(password.encode("utf-8"), hashed.encode("utf-8")) == hashed.encode("utf-8"):
return load_user(currUser.id)
else:
return None
def login(email):
user = load_user(get_user(email).id)
if user != None:
login_user(user)
identity_changed.send(current_app._get_current_object(), identity = Identity(user.uid))
else:
raise UserDoesNotExistError
def logout():
logout_user()
for key in ('identity.name', 'identity.auth_type'):
session.pop(key, None)
identity_changed.send(current_app._get_current_object(), identity = AnonymousIdentity())
def tokenize_email(email):
return ts.dumps(email, salt = CONFIG["EMAIL_TOKENIZER_SALT"])
def detokenize_email(token):
return ts.loads(token, salt = CONFIG["EMAIL_TOKENIZER_SALT"], max_age = 86400)
def send_recovery_email(email):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
token = tokenize_email(email)
message = sendgrid.Mail()
message.add_to(email)
message.set_from("[email protected]")
message.set_subject("hackBCA III - Account Recovery")
message.set_html("<p></p>")
message.add_filter("templates", "enable", "1")
message.add_filter("templates", "template_id", CONFIG["SENDGRID_ACCOUNT_RECOVERY_TEMPLATE"])
message.add_substitution("prefix", "staff")
message.add_substitution("token", token)
status, msg = sg.send(message)
def change_name(email, firstname, lastname):
account = get_user(email)
if account is None:
raise UserDoesNotExistError
account.firstname = firstname
account.lastname = lastname
account.save()
login(email) #To update navbar
def change_password(email, password):
account = get_user(email)
if account is None:
raise UserDoesNotExistError
hashed = str(bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()))[2:-1]
account.hashed = hashed
account.save()
def get_user_attr(email, attr):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
return getattr(user, attr)
def set_user_attr(email, attr, value):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
setattr(user, attr, value)
user.save()<|fim▁end|>
|
return None
def verify_user(email, password):
|
<|file_name|>inferFromUsage.ts<|end_file_name|><|fim▁begin|>/* @internal */
namespace ts.codefix {
const fixId = "inferFromUsage";
const errorCodes = [
// Variable declarations
Diagnostics.Variable_0_implicitly_has_type_1_in_some_locations_where_its_type_cannot_be_determined.code,
// Variable uses
Diagnostics.Variable_0_implicitly_has_an_1_type.code,
// Parameter declarations
Diagnostics.Parameter_0_implicitly_has_an_1_type.code,
Diagnostics.Rest_parameter_0_implicitly_has_an_any_type.code,
// Get Accessor declarations
Diagnostics.Property_0_implicitly_has_type_any_because_its_get_accessor_lacks_a_return_type_annotation.code,
Diagnostics._0_which_lacks_return_type_annotation_implicitly_has_an_1_return_type.code,
// Set Accessor declarations
Diagnostics.Property_0_implicitly_has_type_any_because_its_set_accessor_lacks_a_parameter_type_annotation.code,
// Property declarations
Diagnostics.Member_0_implicitly_has_an_1_type.code,
//// Suggestions
// Variable declarations
Diagnostics.Variable_0_implicitly_has_type_1_in_some_locations_but_a_better_type_may_be_inferred_from_usage.code,
// Variable uses
Diagnostics.Variable_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code,
// Parameter declarations
Diagnostics.Parameter_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code,
Diagnostics.Rest_parameter_0_implicitly_has_an_any_type_but_a_better_type_may_be_inferred_from_usage.code,
// Get Accessor declarations
Diagnostics.Property_0_implicitly_has_type_any_but_a_better_type_for_its_get_accessor_may_be_inferred_from_usage.code,
Diagnostics._0_implicitly_has_an_1_return_type_but_a_better_type_may_be_inferred_from_usage.code,
// Set Accessor declarations
Diagnostics.Property_0_implicitly_has_type_any_but_a_better_type_for_its_set_accessor_may_be_inferred_from_usage.code,
// Property declarations
Diagnostics.Member_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code,
// Function expressions and declarations
Diagnostics.this_implicitly_has_type_any_because_it_does_not_have_a_type_annotation.code,
];
registerCodeFix({
errorCodes,
getCodeActions(context) {
const { sourceFile, program, span: { start }, errorCode, cancellationToken, host, preferences } = context;
const token = getTokenAtPosition(sourceFile, start);
let declaration: Declaration | undefined;
const changes = textChanges.ChangeTracker.with(context, changes => { declaration = doChange(changes, sourceFile, token, errorCode, program, cancellationToken, /*markSeen*/ returnTrue, host, preferences); });
const name = declaration && getNameOfDeclaration(declaration);
return !name || changes.length === 0 ? undefined
: [createCodeFixAction(fixId, changes, [getDiagnostic(errorCode, token), name.getText(sourceFile)], fixId, Diagnostics.Infer_all_types_from_usage)];
},
fixIds: [fixId],
getAllCodeActions(context) {
const { sourceFile, program, cancellationToken, host, preferences } = context;
const markSeen = nodeSeenTracker();
return codeFixAll(context, errorCodes, (changes, err) => {
doChange(changes, sourceFile, getTokenAtPosition(err.file, err.start), err.code, program, cancellationToken, markSeen, host, preferences);
});
},
});
function getDiagnostic(errorCode: number, token: Node): DiagnosticMessage {
switch (errorCode) {
case Diagnostics.Parameter_0_implicitly_has_an_1_type.code:
case Diagnostics.Parameter_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code:
return isSetAccessorDeclaration(getContainingFunction(token)!) ? Diagnostics.Infer_type_of_0_from_usage : Diagnostics.Infer_parameter_types_from_usage; // TODO: GH#18217
case Diagnostics.Rest_parameter_0_implicitly_has_an_any_type.code:
case Diagnostics.Rest_parameter_0_implicitly_has_an_any_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Infer_parameter_types_from_usage;
case Diagnostics.this_implicitly_has_type_any_because_it_does_not_have_a_type_annotation.code:
return Diagnostics.Infer_this_type_of_0_from_usage;
default:
return Diagnostics.Infer_type_of_0_from_usage;
}
}
/** Map suggestion code to error code */
function mapSuggestionDiagnostic(errorCode: number) {
switch (errorCode) {
case Diagnostics.Variable_0_implicitly_has_type_1_in_some_locations_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Variable_0_implicitly_has_type_1_in_some_locations_where_its_type_cannot_be_determined.code;
case Diagnostics.Variable_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Variable_0_implicitly_has_an_1_type.code;
case Diagnostics.Parameter_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Parameter_0_implicitly_has_an_1_type.code;
case Diagnostics.Rest_parameter_0_implicitly_has_an_any_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Rest_parameter_0_implicitly_has_an_any_type.code;
case Diagnostics.Property_0_implicitly_has_type_any_but_a_better_type_for_its_get_accessor_may_be_inferred_from_usage.code:
return Diagnostics.Property_0_implicitly_has_type_any_because_its_get_accessor_lacks_a_return_type_annotation.code;
case Diagnostics._0_implicitly_has_an_1_return_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics._0_which_lacks_return_type_annotation_implicitly_has_an_1_return_type.code;
case Diagnostics.Property_0_implicitly_has_type_any_but_a_better_type_for_its_set_accessor_may_be_inferred_from_usage.code:
return Diagnostics.Property_0_implicitly_has_type_any_because_its_set_accessor_lacks_a_parameter_type_annotation.code;
case Diagnostics.Member_0_implicitly_has_an_1_type_but_a_better_type_may_be_inferred_from_usage.code:
return Diagnostics.Member_0_implicitly_has_an_1_type.code;
}
return errorCode;
}
function doChange(changes: textChanges.ChangeTracker, sourceFile: SourceFile, token: Node, errorCode: number, program: Program, cancellationToken: CancellationToken, markSeen: NodeSeenTracker, host: LanguageServiceHost, preferences: UserPreferences): Declaration | undefined {
if (!isParameterPropertyModifier(token.kind) && token.kind !== SyntaxKind.Identifier && token.kind !== SyntaxKind.DotDotDotToken && token.kind !== SyntaxKind.ThisKeyword) {
return undefined;
}
const { parent } = token;
const importAdder = createImportAdder(sourceFile, program, preferences, host);
errorCode = mapSuggestionDiagnostic(errorCode);
switch (errorCode) {
// Variable and Property declarations
case Diagnostics.Member_0_implicitly_has_an_1_type.code:
case Diagnostics.Variable_0_implicitly_has_type_1_in_some_locations_where_its_type_cannot_be_determined.code:
if ((isVariableDeclaration(parent) && markSeen(parent)) || isPropertyDeclaration(parent) || isPropertySignature(parent)) { // handle bad location
annotateVariableDeclaration(changes, importAdder, sourceFile, parent, program, host, cancellationToken);
importAdder.writeFixes(changes);
return parent;
}
if (isPropertyAccessExpression(parent)) {
const type = inferTypeForVariableFromUsage(parent.name, program, cancellationToken);
const typeNode = getTypeNodeIfAccessible(type, parent, program, host);
if (typeNode) {
// Note that the codefix will never fire with an existing `@type` tag, so there is no need to merge tags
const typeTag = factory.createJSDocTypeTag(/*tagName*/ undefined, factory.createJSDocTypeExpression(typeNode), /*comment*/ undefined);
addJSDocTags(changes, sourceFile, cast(parent.parent.parent, isExpressionStatement), [typeTag]);
}
importAdder.writeFixes(changes);
return parent;
}
return undefined;
case Diagnostics.Variable_0_implicitly_has_an_1_type.code: {
const symbol = program.getTypeChecker().getSymbolAtLocation(token);
if (symbol && symbol.valueDeclaration && isVariableDeclaration(symbol.valueDeclaration) && markSeen(symbol.valueDeclaration)) {
annotateVariableDeclaration(changes, importAdder, sourceFile, symbol.valueDeclaration, program, host, cancellationToken);
importAdder.writeFixes(changes);
return symbol.valueDeclaration;
}
return undefined;
}
}
const containingFunction = getContainingFunction(token);
if (containingFunction === undefined) {
return undefined;
}
let declaration: Declaration | undefined;
switch (errorCode) {
// Parameter declarations
case Diagnostics.Parameter_0_implicitly_has_an_1_type.code:
if (isSetAccessorDeclaration(containingFunction)) {
annotateSetAccessor(changes, importAdder, sourceFile, containingFunction, program, host, cancellationToken);
declaration = containingFunction;
break;
}
// falls through
case Diagnostics.Rest_parameter_0_implicitly_has_an_any_type.code:
if (markSeen(containingFunction)) {
const param = cast(parent, isParameter);
annotateParameters(changes, importAdder, sourceFile, param, containingFunction, program, host, cancellationToken);
declaration = param;
}
break;
// Get Accessor declarations
case Diagnostics.Property_0_implicitly_has_type_any_because_its_get_accessor_lacks_a_return_type_annotation.code:
case Diagnostics._0_which_lacks_return_type_annotation_implicitly_has_an_1_return_type.code:
if (isGetAccessorDeclaration(containingFunction) && isIdentifier(containingFunction.name)) {
annotate(changes, importAdder, sourceFile, containingFunction, inferTypeForVariableFromUsage(containingFunction.name, program, cancellationToken), program, host);
declaration = containingFunction;
}
break;
// Set Accessor declarations
case Diagnostics.Property_0_implicitly_has_type_any_because_its_set_accessor_lacks_a_parameter_type_annotation.code:
if (isSetAccessorDeclaration(containingFunction)) {
annotateSetAccessor(changes, importAdder, sourceFile, containingFunction, program, host, cancellationToken);
declaration = containingFunction;
}
break;
// Function 'this'
case Diagnostics.this_implicitly_has_type_any_because_it_does_not_have_a_type_annotation.code:
if (textChanges.isThisTypeAnnotatable(containingFunction) && markSeen(containingFunction)) {
annotateThis(changes, sourceFile, containingFunction, program, host, cancellationToken);
declaration = containingFunction;
}
break;
default:
return Debug.fail(String(errorCode));
}
importAdder.writeFixes(changes);
return declaration;
}
function annotateVariableDeclaration(
changes: textChanges.ChangeTracker,
importAdder: ImportAdder,
sourceFile: SourceFile,
declaration: VariableDeclaration | PropertyDeclaration | PropertySignature,
program: Program,
host: LanguageServiceHost,
cancellationToken: CancellationToken,
): void {
if (isIdentifier(declaration.name)) {
annotate(changes, importAdder, sourceFile, declaration, inferTypeForVariableFromUsage(declaration.name, program, cancellationToken), program, host);
}
}
function annotateParameters(
changes: textChanges.ChangeTracker,
importAdder: ImportAdder,
sourceFile: SourceFile,
parameterDeclaration: ParameterDeclaration,
containingFunction: SignatureDeclaration,
program: Program,
host: LanguageServiceHost,
cancellationToken: CancellationToken,
): void {
if (!isIdentifier(parameterDeclaration.name)) {
return;
}
const parameterInferences = inferTypeForParametersFromUsage(containingFunction, sourceFile, program, cancellationToken);
Debug.assert(containingFunction.parameters.length === parameterInferences.length, "Parameter count and inference count should match");
if (isInJSFile(containingFunction)) {
annotateJSDocParameters(changes, sourceFile, parameterInferences, program, host);
}
else {
const needParens = isArrowFunction(containingFunction) && !findChildOfKind(containingFunction, SyntaxKind.OpenParenToken, sourceFile);
if (needParens) changes.insertNodeBefore(sourceFile, first(containingFunction.parameters), factory.createToken(SyntaxKind.OpenParenToken));
for (const { declaration, type } of parameterInferences) {
if (declaration && !declaration.type && !declaration.initializer) {
annotate(changes, importAdder, sourceFile, declaration, type, program, host);
}
}
if (needParens) changes.insertNodeAfter(sourceFile, last(containingFunction.parameters), factory.createToken(SyntaxKind.CloseParenToken));
}
}
function annotateThis(changes: textChanges.ChangeTracker, sourceFile: SourceFile, containingFunction: textChanges.ThisTypeAnnotatable, program: Program, host: LanguageServiceHost, cancellationToken: CancellationToken) {
const references = getFunctionReferences(containingFunction, sourceFile, program, cancellationToken);
if (!references || !references.length) {
return;
}
const thisInference = inferTypeFromReferences(program, references, cancellationToken).thisParameter();
const typeNode = getTypeNodeIfAccessible(thisInference, containingFunction, program, host);
if (!typeNode) {
return;
}
if (isInJSFile(containingFunction)) {
annotateJSDocThis(changes, sourceFile, containingFunction, typeNode);
}
else {
changes.tryInsertThisTypeAnnotation(sourceFile, containingFunction, typeNode);
}
}
function annotateJSDocThis(changes: textChanges.ChangeTracker, sourceFile: SourceFile, containingFunction: SignatureDeclaration, typeNode: TypeNode) {
addJSDocTags(changes, sourceFile, containingFunction, [
factory.createJSDocThisTag(/*tagName*/ undefined, factory.createJSDocTypeExpression(typeNode)),
]);
}
function annotateSetAccessor(
changes: textChanges.ChangeTracker,
importAdder: ImportAdder,
sourceFile: SourceFile,
setAccessorDeclaration: SetAccessorDeclaration,
program: Program,
host: LanguageServiceHost,
cancellationToken: CancellationToken,
): void {
const param = firstOrUndefined(setAccessorDeclaration.parameters);
if (param && isIdentifier(setAccessorDeclaration.name) && isIdentifier(param.name)) {
let type = inferTypeForVariableFromUsage(setAccessorDeclaration.name, program, cancellationToken);
if (type === program.getTypeChecker().getAnyType()) {
type = inferTypeForVariableFromUsage(param.name, program, cancellationToken);
}
if (isInJSFile(setAccessorDeclaration)) {
annotateJSDocParameters(changes, sourceFile, [{ declaration: param, type }], program, host);
}
else {
annotate(changes, importAdder, sourceFile, param, type, program, host);
}
}
}
function annotate(changes: textChanges.ChangeTracker, importAdder: ImportAdder, sourceFile: SourceFile, declaration: textChanges.TypeAnnotatable, type: Type, program: Program, host: LanguageServiceHost): void {
const typeNode = getTypeNodeIfAccessible(type, declaration, program, host);
if (typeNode) {
if (isInJSFile(sourceFile) && declaration.kind !== SyntaxKind.PropertySignature) {
const parent = isVariableDeclaration(declaration) ? tryCast(declaration.parent.parent, isVariableStatement) : declaration;
if (!parent) {
return;
}
const typeExpression = factory.createJSDocTypeExpression(typeNode);
const typeTag = isGetAccessorDeclaration(declaration) ? factory.createJSDocReturnTag(/*tagName*/ undefined, typeExpression, /*comment*/ undefined) : factory.createJSDocTypeTag(/*tagName*/ undefined, typeExpression, /*comment*/ undefined);
addJSDocTags(changes, sourceFile, parent, [typeTag]);
}
else if (!tryReplaceImportTypeNodeWithAutoImport(typeNode, declaration, sourceFile, changes, importAdder, getEmitScriptTarget(program.getCompilerOptions()))) {
changes.tryInsertTypeAnnotation(sourceFile, declaration, typeNode);
}
}
}
function tryReplaceImportTypeNodeWithAutoImport(
typeNode: TypeNode,
declaration: textChanges.TypeAnnotatable,
sourceFile: SourceFile,
changes: textChanges.ChangeTracker,
importAdder: ImportAdder,
scriptTarget: ScriptTarget
): boolean {
const importableReference = tryGetAutoImportableReferenceFromTypeNode(typeNode, scriptTarget);
if (importableReference && changes.tryInsertTypeAnnotation(sourceFile, declaration, importableReference.typeNode)) {
forEach(importableReference.symbols, s => importAdder.addImportFromExportedSymbol(s, /*usageIsTypeOnly*/ true));
return true;
}
return false;
}
function annotateJSDocParameters(changes: textChanges.ChangeTracker, sourceFile: SourceFile, parameterInferences: readonly ParameterInference[], program: Program, host: LanguageServiceHost): void {
const signature = parameterInferences.length && parameterInferences[0].declaration.parent;
if (!signature) {
return;
}
const inferences = mapDefined(parameterInferences, inference => {
const param = inference.declaration;
// only infer parameters that have (1) no type and (2) an accessible inferred type
if (param.initializer || getJSDocType(param) || !isIdentifier(param.name)) {
return;
}
const typeNode = inference.type && getTypeNodeIfAccessible(inference.type, param, program, host);
if (typeNode) {
const name = factory.cloneNode(param.name);
setEmitFlags(name, EmitFlags.NoComments | EmitFlags.NoNestedComments);
return { name: factory.cloneNode(param.name), param, isOptional: !!inference.isOptional, typeNode };
}
});
if (!inferences.length) {
return;
}
if (isArrowFunction(signature) || isFunctionExpression(signature)) {
const needParens = isArrowFunction(signature) && !findChildOfKind(signature, SyntaxKind.OpenParenToken, sourceFile);
if (needParens) {
changes.insertNodeBefore(sourceFile, first(signature.parameters), factory.createToken(SyntaxKind.OpenParenToken));
}
forEach(inferences, ({ typeNode, param }) => {
const typeTag = factory.createJSDocTypeTag(/*tagName*/ undefined, factory.createJSDocTypeExpression(typeNode));
const jsDoc = factory.createJSDocComment(/*comment*/ undefined, [typeTag]);
changes.insertNodeAt(sourceFile, param.getStart(sourceFile), jsDoc, { suffix: " " });
});
if (needParens) {
changes.insertNodeAfter(sourceFile, last(signature.parameters), factory.createToken(SyntaxKind.CloseParenToken));
}
}
else {
const paramTags = map(inferences, ({ name, typeNode, isOptional }) =>
factory.createJSDocParameterTag(/*tagName*/ undefined, name, /*isBracketed*/ !!isOptional, factory.createJSDocTypeExpression(typeNode), /* isNameFirst */ false, /*comment*/ undefined));
addJSDocTags(changes, sourceFile, signature, paramTags);
}
}
export function addJSDocTags(changes: textChanges.ChangeTracker, sourceFile: SourceFile, parent: HasJSDoc, newTags: readonly JSDocTag[]): void {
const comments = flatMap(parent.jsDoc, j => typeof j.comment === "string" ? factory.createJSDocText(j.comment) : j.comment) as JSDocComment[];
const oldTags = flatMapToMutable(parent.jsDoc, j => j.tags);
const unmergedNewTags = newTags.filter(newTag => !oldTags || !oldTags.some((tag, i) => {
const merged = tryMergeJsdocTags(tag, newTag);
if (merged) oldTags[i] = merged;
return !!merged;
}));
const tag = factory.createJSDocComment(factory.createNodeArray(intersperse(comments, factory.createJSDocText("\n"))), factory.createNodeArray([...(oldTags || emptyArray), ...unmergedNewTags]));
const jsDocNode = parent.kind === SyntaxKind.ArrowFunction ? getJsDocNodeForArrowFunction(parent) : parent;
jsDocNode.jsDoc = parent.jsDoc;
jsDocNode.jsDocCache = parent.jsDocCache;
changes.insertJsdocCommentBefore(sourceFile, jsDocNode, tag);
}
function getJsDocNodeForArrowFunction(signature: ArrowFunction): HasJSDoc {
if (signature.parent.kind === SyntaxKind.PropertyDeclaration) {
return signature.parent as HasJSDoc;
}
return signature.parent.parent as HasJSDoc;
}
function tryMergeJsdocTags(oldTag: JSDocTag, newTag: JSDocTag): JSDocTag | undefined {
if (oldTag.kind !== newTag.kind) {
return undefined;
}
switch (oldTag.kind) {
case SyntaxKind.JSDocParameterTag: {
const oldParam = oldTag as JSDocParameterTag;
const newParam = newTag as JSDocParameterTag;
return isIdentifier(oldParam.name) && isIdentifier(newParam.name) && oldParam.name.escapedText === newParam.name.escapedText
? factory.createJSDocParameterTag(/*tagName*/ undefined, newParam.name, /*isBracketed*/ false, newParam.typeExpression, newParam.isNameFirst, oldParam.comment)
: undefined;
}
case SyntaxKind.JSDocReturnTag:
return factory.createJSDocReturnTag(/*tagName*/ undefined, (newTag as JSDocReturnTag).typeExpression, oldTag.comment);
}
}
function getReferences(token: PropertyName | Token<SyntaxKind.ConstructorKeyword>, program: Program, cancellationToken: CancellationToken): readonly Identifier[] {
// Position shouldn't matter since token is not a SourceFile.
return mapDefined(FindAllReferences.getReferenceEntriesForNode(-1, token, program, program.getSourceFiles(), cancellationToken), entry =>
entry.kind !== FindAllReferences.EntryKind.Span ? tryCast(entry.node, isIdentifier) : undefined);
}
function inferTypeForVariableFromUsage(token: Identifier | PrivateIdentifier, program: Program, cancellationToken: CancellationToken): Type {
const references = getReferences(token, program, cancellationToken);
return inferTypeFromReferences(program, references, cancellationToken).single();
}
function inferTypeForParametersFromUsage(func: SignatureDeclaration, sourceFile: SourceFile, program: Program, cancellationToken: CancellationToken) {
const references = getFunctionReferences(func, sourceFile, program, cancellationToken);
return references && inferTypeFromReferences(program, references, cancellationToken).parameters(func) ||
func.parameters.map<ParameterInference>(p => ({
declaration: p,
type: isIdentifier(p.name) ? inferTypeForVariableFromUsage(p.name, program, cancellationToken) : program.getTypeChecker().getAnyType()
}));
}
function getFunctionReferences(containingFunction: SignatureDeclaration, sourceFile: SourceFile, program: Program, cancellationToken: CancellationToken): readonly Identifier[] | undefined {
let searchToken;
switch (containingFunction.kind) {
case SyntaxKind.Constructor:
searchToken = findChildOfKind<Token<SyntaxKind.ConstructorKeyword>>(containingFunction, SyntaxKind.ConstructorKeyword, sourceFile);
break;
case SyntaxKind.ArrowFunction:
case SyntaxKind.FunctionExpression:
const parent = containingFunction.parent;
searchToken = (isVariableDeclaration(parent) || isPropertyDeclaration(parent)) && isIdentifier(parent.name) ?
parent.name :
containingFunction.name;
break;
case SyntaxKind.FunctionDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.MethodSignature:
searchToken = containingFunction.name;
break;
}
if (!searchToken) {
return undefined;
}
return getReferences(searchToken, program, cancellationToken);
}
interface ParameterInference {
readonly declaration: ParameterDeclaration;
readonly type: Type;
readonly isOptional?: boolean;
}
function inferTypeFromReferences(program: Program, references: readonly Identifier[], cancellationToken: CancellationToken) {
const checker = program.getTypeChecker();
const builtinConstructors: { [s: string]: (t: Type) => Type } = {
string: () => checker.getStringType(),
number: () => checker.getNumberType(),
Array: t => checker.createArrayType(t),
Promise: t => checker.createPromiseType(t),
};
const builtins = [
checker.getStringType(),
checker.getNumberType(),
checker.createArrayType(checker.getAnyType()),
checker.createPromiseType(checker.getAnyType()),
];
return {
single,
parameters,
thisParameter,
};
interface CallUsage {
argumentTypes: Type[];
return_: Usage;
}
interface Usage {
isNumber: boolean | undefined;
isString: boolean | undefined;
/** Used ambiguously, eg x + ___ or object[___]; results in string | number if no other evidence exists */
isNumberOrString: boolean | undefined;
candidateTypes: Type[] | undefined;
properties: UnderscoreEscapedMap<Usage> | undefined;
calls: CallUsage[] | undefined;
constructs: CallUsage[] | undefined;
numberIndex: Usage | undefined;
stringIndex: Usage | undefined;
candidateThisTypes: Type[] | undefined;
inferredTypes: Type[] | undefined;
}
function createEmptyUsage(): Usage {
return {
isNumber: undefined,
isString: undefined,
isNumberOrString: undefined,
candidateTypes: undefined,
properties: undefined,
calls: undefined,
constructs: undefined,
numberIndex: undefined,
stringIndex: undefined,
candidateThisTypes: undefined,
inferredTypes: undefined,
};
}
function combineUsages(usages: Usage[]): Usage {
const combinedProperties = new Map<__String, Usage[]>();
for (const u of usages) {
if (u.properties) {
u.properties.forEach((p, name) => {
if (!combinedProperties.has(name)) {
combinedProperties.set(name, []);
}
combinedProperties.get(name)!.push(p);
});
}
}
const properties = new Map<__String, Usage>();
combinedProperties.forEach((ps, name) => {
properties.set(name, combineUsages(ps));
});
return {
isNumber: usages.some(u => u.isNumber),
isString: usages.some(u => u.isString),
isNumberOrString: usages.some(u => u.isNumberOrString),
candidateTypes: flatMap(usages, u => u.candidateTypes) as Type[],
properties,
calls: flatMap(usages, u => u.calls) as CallUsage[],
constructs: flatMap(usages, u => u.constructs) as CallUsage[],
numberIndex: forEach(usages, u => u.numberIndex),
stringIndex: forEach(usages, u => u.stringIndex),
candidateThisTypes: flatMap(usages, u => u.candidateThisTypes) as Type[],
inferredTypes: undefined, // clear type cache
};
}
function single(): Type {
return combineTypes(inferTypesFromReferencesSingle(references));
}
function parameters(declaration: SignatureDeclaration): ParameterInference[] | undefined {
if (references.length === 0 || !declaration.parameters) {
return undefined;
}
const usage = createEmptyUsage();
for (const reference of references) {
cancellationToken.throwIfCancellationRequested();
calculateUsageOfNode(reference, usage);
}
const calls = [...usage.constructs || [], ...usage.calls || []];
return declaration.parameters.map((parameter, parameterIndex): ParameterInference => {
const types = [];
const isRest = isRestParameter(parameter);
let isOptional = false;
for (const call of calls) {
if (call.argumentTypes.length <= parameterIndex) {
isOptional = isInJSFile(declaration);
types.push(checker.getUndefinedType());
}
else if (isRest) {
for (let i = parameterIndex; i < call.argumentTypes.length; i++) {
types.push(checker.getBaseTypeOfLiteralType(call.argumentTypes[i]));
}
}
else {
types.push(checker.getBaseTypeOfLiteralType(call.argumentTypes[parameterIndex]));
}
}
if (isIdentifier(parameter.name)) {
const inferred = inferTypesFromReferencesSingle(getReferences(parameter.name, program, cancellationToken));
types.push(...(isRest ? mapDefined(inferred, checker.getElementTypeOfArrayType) : inferred));
}
const type = combineTypes(types);
return {
type: isRest ? checker.createArrayType(type) : type,
isOptional: isOptional && !isRest,
declaration: parameter
};
});
}
function thisParameter() {
const usage = createEmptyUsage();
for (const reference of references) {
cancellationToken.throwIfCancellationRequested();
calculateUsageOfNode(reference, usage);
}
return combineTypes(usage.candidateThisTypes || emptyArray);
}
function inferTypesFromReferencesSingle(references: readonly Identifier[]): Type[] {
const usage: Usage = createEmptyUsage();
for (const reference of references) {
cancellationToken.throwIfCancellationRequested();
calculateUsageOfNode(reference, usage);
}
return inferTypes(usage);
}
function calculateUsageOfNode(node: Expression, usage: Usage): void {
while (isRightSideOfQualifiedNameOrPropertyAccess(node)) {
node = node.parent as Expression;
}
switch (node.parent.kind) {
case SyntaxKind.ExpressionStatement:
inferTypeFromExpressionStatement(node, usage);
break;
case SyntaxKind.PostfixUnaryExpression:
usage.isNumber = true;
break;
case SyntaxKind.PrefixUnaryExpression:
inferTypeFromPrefixUnaryExpression(node.parent as PrefixUnaryExpression, usage);
break;
case SyntaxKind.BinaryExpression:
inferTypeFromBinaryExpression(node, node.parent as BinaryExpression, usage);
break;
case SyntaxKind.CaseClause:
case SyntaxKind.DefaultClause:
inferTypeFromSwitchStatementLabel(node.parent as CaseOrDefaultClause, usage);
break;
case SyntaxKind.CallExpression:
case SyntaxKind.NewExpression:
if ((node.parent as CallExpression | NewExpression).expression === node) {
inferTypeFromCallExpression(node.parent as CallExpression | NewExpression, usage);
}
else {
inferTypeFromContextualType(node, usage);
}
break;
case SyntaxKind.PropertyAccessExpression:
inferTypeFromPropertyAccessExpression(node.parent as PropertyAccessExpression, usage);
break;
case SyntaxKind.ElementAccessExpression:
inferTypeFromPropertyElementExpression(node.parent as ElementAccessExpression, node, usage);
break;
case SyntaxKind.PropertyAssignment:
case SyntaxKind.ShorthandPropertyAssignment:
inferTypeFromPropertyAssignment(node.parent as PropertyAssignment | ShorthandPropertyAssignment, usage);
break;
case SyntaxKind.PropertyDeclaration:
inferTypeFromPropertyDeclaration(node.parent as PropertyDeclaration, usage);
break;
case SyntaxKind.VariableDeclaration: {
const { name, initializer } = node.parent as VariableDeclaration;
if (node === name) {
if (initializer) { // This can happen for `let x = null;` which still has an implicit-any error.
addCandidateType(usage, checker.getTypeAtLocation(initializer));
}
break;
}
}
// falls through
default:
return inferTypeFromContextualType(node, usage);
}
}
function inferTypeFromContextualType(node: Expression, usage: Usage): void {
if (isExpressionNode(node)) {
addCandidateType(usage, checker.getContextualType(node));
}
}
function inferTypeFromExpressionStatement(node: Expression, usage: Usage): void {
addCandidateType(usage, isCallExpression(node) ? checker.getVoidType() : checker.getAnyType());
}
function inferTypeFromPrefixUnaryExpression(node: PrefixUnaryExpression, usage: Usage): void {
switch (node.operator) {
case SyntaxKind.PlusPlusToken:
case SyntaxKind.MinusMinusToken:
case SyntaxKind.MinusToken:
case SyntaxKind.TildeToken:
usage.isNumber = true;
break;
case SyntaxKind.PlusToken:
usage.isNumberOrString = true;
break;
// case SyntaxKind.ExclamationToken:
// no inferences here;
}
}
function inferTypeFromBinaryExpression(node: Expression, parent: BinaryExpression, usage: Usage): void {
switch (parent.operatorToken.kind) {
// ExponentiationOperator
case SyntaxKind.AsteriskAsteriskToken:
// MultiplicativeOperator
// falls through
case SyntaxKind.AsteriskToken:
case SyntaxKind.SlashToken:
case SyntaxKind.PercentToken:
// ShiftOperator
// falls through
case SyntaxKind.LessThanLessThanToken:
case SyntaxKind.GreaterThanGreaterThanToken:
case SyntaxKind.GreaterThanGreaterThanGreaterThanToken:
// BitwiseOperator
// falls through
case SyntaxKind.AmpersandToken:
case SyntaxKind.BarToken:
case SyntaxKind.CaretToken:
// CompoundAssignmentOperator
// falls through
case SyntaxKind.MinusEqualsToken:
case SyntaxKind.AsteriskAsteriskEqualsToken:
case SyntaxKind.AsteriskEqualsToken:
case SyntaxKind.SlashEqualsToken:
case SyntaxKind.PercentEqualsToken:
case SyntaxKind.AmpersandEqualsToken:
case SyntaxKind.BarEqualsToken:
case SyntaxKind.CaretEqualsToken:
case SyntaxKind.LessThanLessThanEqualsToken:
case SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken:
case SyntaxKind.GreaterThanGreaterThanEqualsToken:
// AdditiveOperator
// falls through
case SyntaxKind.MinusToken:
// RelationalOperator
// falls through
case SyntaxKind.LessThanToken:
case SyntaxKind.LessThanEqualsToken:
case SyntaxKind.GreaterThanToken:
case SyntaxKind.GreaterThanEqualsToken:
const operandType = checker.getTypeAtLocation(parent.left === node ? parent.right : parent.left);
if (operandType.flags & TypeFlags.EnumLike) {
addCandidateType(usage, operandType);
}
else {
usage.isNumber = true;
}
break;
case SyntaxKind.PlusEqualsToken:
case SyntaxKind.PlusToken:
const otherOperandType = checker.getTypeAtLocation(parent.left === node ? parent.right : parent.left);
if (otherOperandType.flags & TypeFlags.EnumLike) {
addCandidateType(usage, otherOperandType);
}
else if (otherOperandType.flags & TypeFlags.NumberLike) {
usage.isNumber = true;
}
else if (otherOperandType.flags & TypeFlags.StringLike) {
usage.isString = true;
}
else if (otherOperandType.flags & TypeFlags.Any) {
// do nothing, maybe we'll learn something elsewhere
}
else {
usage.isNumberOrString = true;
}
break;
// AssignmentOperators
case SyntaxKind.EqualsToken:
case SyntaxKind.EqualsEqualsToken:
case SyntaxKind.EqualsEqualsEqualsToken:
case SyntaxKind.ExclamationEqualsEqualsToken:
case SyntaxKind.ExclamationEqualsToken:
addCandidateType(usage, checker.getTypeAtLocation(parent.left === node ? parent.right : parent.left));
break;
case SyntaxKind.InKeyword:
if (node === parent.left) {
usage.isString = true;
}
break;
// LogicalOperator Or NullishCoalescing
case SyntaxKind.BarBarToken:
case SyntaxKind.QuestionQuestionToken:
if (node === parent.left &&
(node.parent.parent.kind === SyntaxKind.VariableDeclaration || isAssignmentExpression(node.parent.parent, /*excludeCompoundAssignment*/ true))) {
// var x = x || {};
// TODO: use getFalsyflagsOfType
addCandidateType(usage, checker.getTypeAtLocation(parent.right));
}
break;
case SyntaxKind.AmpersandAmpersandToken:
case SyntaxKind.CommaToken:
case SyntaxKind.InstanceOfKeyword:
// nothing to infer here
break;
}
}
function inferTypeFromSwitchStatementLabel(parent: CaseOrDefaultClause, usage: Usage): void {
addCandidateType(usage, checker.getTypeAtLocation(parent.parent.parent.expression));
}
function inferTypeFromCallExpression(parent: CallExpression | NewExpression, usage: Usage): void {
const call: CallUsage = {
argumentTypes: [],
return_: createEmptyUsage()
};
if (parent.arguments) {
for (const argument of parent.arguments) {
call.argumentTypes.push(checker.getTypeAtLocation(argument));
}
}
calculateUsageOfNode(parent, call.return_);
if (parent.kind === SyntaxKind.CallExpression) {
(usage.calls || (usage.calls = [])).push(call);
}
else {
(usage.constructs || (usage.constructs = [])).push(call);
}
}
function inferTypeFromPropertyAccessExpression(parent: PropertyAccessExpression, usage: Usage): void {
const name = escapeLeadingUnderscores(parent.name.text);
if (!usage.properties) {
usage.properties = new Map();
}
const propertyUsage = usage.properties.get(name) || createEmptyUsage();
calculateUsageOfNode(parent, propertyUsage);
usage.properties.set(name, propertyUsage);
}
function inferTypeFromPropertyElementExpression(parent: ElementAccessExpression, node: Expression, usage: Usage): void {
if (node === parent.argumentExpression) {
usage.isNumberOrString = true;
return;
}
else {
const indexType = checker.getTypeAtLocation(parent.argumentExpression);
const indexUsage = createEmptyUsage();
calculateUsageOfNode(parent, indexUsage);
if (indexType.flags & TypeFlags.NumberLike) {
usage.numberIndex = indexUsage;
}
else {
usage.stringIndex = indexUsage;
}
}
}
function inferTypeFromPropertyAssignment(assignment: PropertyAssignment | ShorthandPropertyAssignment, usage: Usage) {
const nodeWithRealType = isVariableDeclaration(assignment.parent.parent) ?
assignment.parent.parent :
assignment.parent;
addCandidateThisType(usage, checker.getTypeAtLocation(nodeWithRealType));
}
function inferTypeFromPropertyDeclaration(declaration: PropertyDeclaration, usage: Usage) {
addCandidateThisType(usage, checker.getTypeAtLocation(declaration.parent));
}
interface Priority {
high: (t: Type) => boolean;
low: (t: Type) => boolean;
}
function removeLowPriorityInferences(inferences: readonly Type[], priorities: Priority[]): Type[] {
const toRemove: ((t: Type) => boolean)[] = [];
for (const i of inferences) {
for (const { high, low } of priorities) {
if (high(i)) {
Debug.assert(!low(i), "Priority can't have both low and high");
toRemove.push(low);
}
}
}
return inferences.filter(i => toRemove.every(f => !f(i)));
}
function combineFromUsage(usage: Usage) {
return combineTypes(inferTypes(usage));
}
function combineTypes(inferences: readonly Type[]): Type {
if (!inferences.length) return checker.getAnyType();
// 1. string or number individually override string | number
// 2. non-any, non-void overrides any or void
// 3. non-nullable, non-any, non-void, non-anonymous overrides anonymous types
const stringNumber = checker.getUnionType([checker.getStringType(), checker.getNumberType()]);
const priorities: Priority[] = [
{
high: t => t === checker.getStringType() || t === checker.getNumberType(),
low: t => t === stringNumber
},
{
high: t => !(t.flags & (TypeFlags.Any | TypeFlags.Void)),
low: t => !!(t.flags & (TypeFlags.Any | TypeFlags.Void))
},
{
high: t => !(t.flags & (TypeFlags.Nullable | TypeFlags.Any | TypeFlags.Void)) && !(getObjectFlags(t) & ObjectFlags.Anonymous),
low: t => !!(getObjectFlags(t) & ObjectFlags.Anonymous)
}];
let good = removeLowPriorityInferences(inferences, priorities);
const anons = good.filter(i => getObjectFlags(i) & ObjectFlags.Anonymous) as AnonymousType[];
if (anons.length) {
good = good.filter(i => !(getObjectFlags(i) & ObjectFlags.Anonymous));
good.push(combineAnonymousTypes(anons));
}
return checker.getWidenedType(checker.getUnionType(good.map(checker.getBaseTypeOfLiteralType), UnionReduction.Subtype));
}
function combineAnonymousTypes(anons: AnonymousType[]) {
if (anons.length === 1) {
return anons[0];
}
const calls = [];
const constructs = [];
const stringIndices = [];
const numberIndices = [];
let stringIndexReadonly = false;
let numberIndexReadonly = false;
const props = createMultiMap<Type>();
for (const anon of anons) {
for (const p of checker.getPropertiesOfType(anon)) {
props.add(p.name, p.valueDeclaration ? checker.getTypeOfSymbolAtLocation(p, p.valueDeclaration) : checker.getAnyType());
}
calls.push(...checker.getSignaturesOfType(anon, SignatureKind.Call));
constructs.push(...checker.getSignaturesOfType(anon, SignatureKind.Construct));
const stringIndexInfo = checker.getIndexInfoOfType(anon, IndexKind.String);
if (stringIndexInfo) {
stringIndices.push(stringIndexInfo.type);
stringIndexReadonly = stringIndexReadonly || stringIndexInfo.isReadonly;
}
const numberIndexInfo = checker.getIndexInfoOfType(anon, IndexKind.Number);
if (numberIndexInfo) {
numberIndices.push(numberIndexInfo.type);
numberIndexReadonly = numberIndexReadonly || numberIndexInfo.isReadonly;
}
}
const members = mapEntries(props, (name, types) => {
const isOptional = types.length < anons.length ? SymbolFlags.Optional : 0;
const s = checker.createSymbol(SymbolFlags.Property | isOptional, name as __String);
s.type = checker.getUnionType(types);
return [name, s];
});
const indexInfos = [];
if (stringIndices.length) indexInfos.push(checker.createIndexInfo(checker.getStringType(), checker.getUnionType(stringIndices), stringIndexReadonly));
if (numberIndices.length) indexInfos.push(checker.createIndexInfo(checker.getNumberType(), checker.getUnionType(numberIndices), numberIndexReadonly));
return checker.createAnonymousType(
anons[0].symbol,
members as UnderscoreEscapedMap<TransientSymbol>,
calls,
constructs,
indexInfos);
}
function inferTypes(usage: Usage): Type[] {
const types = [];
if (usage.isNumber) {
types.push(checker.getNumberType());
}
if (usage.isString) {
types.push(checker.getStringType());
}
if (usage.isNumberOrString) {
types.push(checker.getUnionType([checker.getStringType(), checker.getNumberType()]));
}
if (usage.numberIndex) {
types.push(checker.createArrayType(combineFromUsage(usage.numberIndex)));
}
if (usage.properties?.size || usage.calls?.length || usage.constructs?.length || usage.stringIndex) {
types.push(inferStructuralType(usage));
}
types.push(...(usage.candidateTypes || []).map(t => checker.getBaseTypeOfLiteralType(t)));
types.push(...inferNamedTypesFromProperties(usage));
return types;
}
function inferStructuralType(usage: Usage) {
const members = new Map<__String, Symbol>();
if (usage.properties) {
usage.properties.forEach((u, name) => {
const symbol = checker.createSymbol(SymbolFlags.Property, name);
symbol.type = combineFromUsage(u);
members.set(name, symbol);
});
}
const callSignatures: Signature[] = usage.calls ? [getSignatureFromCalls(usage.calls)] : [];
const constructSignatures: Signature[] = usage.constructs ? [getSignatureFromCalls(usage.constructs)] : [];
const indexInfos = usage.stringIndex ? [checker.createIndexInfo(checker.getStringType(), combineFromUsage(usage.stringIndex), /*isReadonly*/ false)] : [];
return checker.createAnonymousType(/*symbol*/ undefined, members, callSignatures, constructSignatures, indexInfos);
}
function inferNamedTypesFromProperties(usage: Usage): Type[] {
if (!usage.properties || !usage.properties.size) return [];
const types = builtins.filter(t => allPropertiesAreAssignableToUsage(t, usage));
if (0 < types.length && types.length < 3) {
return types.map(t => inferInstantiationFromUsage(t, usage));
}
return [];
}
function allPropertiesAreAssignableToUsage(type: Type, usage: Usage) {
if (!usage.properties) return false;
return !forEachEntry(usage.properties, (propUsage, name) => {
const source = checker.getTypeOfPropertyOfType(type, name as string);
if (!source) {
return true;
}
if (propUsage.calls) {
const sigs = checker.getSignaturesOfType(source, SignatureKind.Call);
return !sigs.length || !checker.isTypeAssignableTo(source, getFunctionFromCalls(propUsage.calls));
}
else {
return !checker.isTypeAssignableTo(source, combineFromUsage(propUsage));
}
});
}
/**
* inference is limited to
* 1. generic types with a single parameter
* 2. inference to/from calls with a single signature
*/
function inferInstantiationFromUsage(type: Type, usage: Usage) {
if (!(getObjectFlags(type) & ObjectFlags.Reference) || !usage.properties) {
return type;
}
const generic = (type as TypeReference).target;
const singleTypeParameter = singleOrUndefined(generic.typeParameters);
if (!singleTypeParameter) return type;
const types: Type[] = [];
usage.properties.forEach((propUsage, name) => {
const genericPropertyType = checker.getTypeOfPropertyOfType(generic, name as string);
Debug.assert(!!genericPropertyType, "generic should have all the properties of its reference.");
types.push(...inferTypeParameters(genericPropertyType, combineFromUsage(propUsage), singleTypeParameter));
});
<|fim▁hole|> }
function inferTypeParameters(genericType: Type, usageType: Type, typeParameter: Type): readonly Type[] {
if (genericType === typeParameter) {
return [usageType];
}
else if (genericType.flags & TypeFlags.UnionOrIntersection) {
return flatMap((genericType as UnionOrIntersectionType).types, t => inferTypeParameters(t, usageType, typeParameter));
}
else if (getObjectFlags(genericType) & ObjectFlags.Reference && getObjectFlags(usageType) & ObjectFlags.Reference) {
// this is wrong because we need a reference to the targetType to, so we can check that it's also a reference
const genericArgs = checker.getTypeArguments(genericType as TypeReference);
const usageArgs = checker.getTypeArguments(usageType as TypeReference);
const types = [];
if (genericArgs && usageArgs) {
for (let i = 0; i < genericArgs.length; i++) {
if (usageArgs[i]) {
types.push(...inferTypeParameters(genericArgs[i], usageArgs[i], typeParameter));
}
}
}
return types;
}
const genericSigs = checker.getSignaturesOfType(genericType, SignatureKind.Call);
const usageSigs = checker.getSignaturesOfType(usageType, SignatureKind.Call);
if (genericSigs.length === 1 && usageSigs.length === 1) {
return inferFromSignatures(genericSigs[0], usageSigs[0], typeParameter);
}
return [];
}
function inferFromSignatures(genericSig: Signature, usageSig: Signature, typeParameter: Type) {
const types = [];
for (let i = 0; i < genericSig.parameters.length; i++) {
const genericParam = genericSig.parameters[i];
const usageParam = usageSig.parameters[i];
const isRest = genericSig.declaration && isRestParameter(genericSig.declaration.parameters[i]);
if (!usageParam) {
break;
}
let genericParamType = genericParam.valueDeclaration ? checker.getTypeOfSymbolAtLocation(genericParam, genericParam.valueDeclaration) : checker.getAnyType();
const elementType = isRest && checker.getElementTypeOfArrayType(genericParamType);
if (elementType) {
genericParamType = elementType;
}
const targetType = (usageParam as SymbolLinks).type
|| (usageParam.valueDeclaration ? checker.getTypeOfSymbolAtLocation(usageParam, usageParam.valueDeclaration) : checker.getAnyType());
types.push(...inferTypeParameters(genericParamType, targetType, typeParameter));
}
const genericReturn = checker.getReturnTypeOfSignature(genericSig);
const usageReturn = checker.getReturnTypeOfSignature(usageSig);
types.push(...inferTypeParameters(genericReturn, usageReturn, typeParameter));
return types;
}
function getFunctionFromCalls(calls: CallUsage[]) {
return checker.createAnonymousType(/*symbol*/ undefined, createSymbolTable(), [getSignatureFromCalls(calls)], emptyArray, emptyArray);
}
function getSignatureFromCalls(calls: CallUsage[]): Signature {
const parameters: Symbol[] = [];
const length = Math.max(...calls.map(c => c.argumentTypes.length));
for (let i = 0; i < length; i++) {
const symbol = checker.createSymbol(SymbolFlags.FunctionScopedVariable, escapeLeadingUnderscores(`arg${i}`));
symbol.type = combineTypes(calls.map(call => call.argumentTypes[i] || checker.getUndefinedType()));
if (calls.some(call => call.argumentTypes[i] === undefined)) {
symbol.flags |= SymbolFlags.Optional;
}
parameters.push(symbol);
}
const returnType = combineFromUsage(combineUsages(calls.map(call => call.return_)));
return checker.createSignature(/*declaration*/ undefined, /*typeParameters*/ undefined, /*thisParameter*/ undefined, parameters, returnType, /*typePredicate*/ undefined, length, SignatureFlags.None);
}
function addCandidateType(usage: Usage, type: Type | undefined) {
if (type && !(type.flags & TypeFlags.Any) && !(type.flags & TypeFlags.Never)) {
(usage.candidateTypes || (usage.candidateTypes = [])).push(type);
}
}
function addCandidateThisType(usage: Usage, type: Type | undefined) {
if (type && !(type.flags & TypeFlags.Any) && !(type.flags & TypeFlags.Never)) {
(usage.candidateThisTypes || (usage.candidateThisTypes = [])).push(type);
}
}
}
}<|fim▁end|>
|
return builtinConstructors[type.symbol.escapedName as string](combineTypes(types));
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"encoding/json"
"flag"
"fmt"
"gopkg.in/mgo.v2/bson"
"os"
"path/filepath"
"strings"
"time"
"github.com/apache/thrift/lib/go/thrift"
"github.com/go-kit/kit/log"
thriftclient "github.com/banerwai/micros/command/workhistory/client/thrift"
"github.com/banerwai/micros/command/workhistory/service"
thriftworkhistory "github.com/banerwai/micros/command/workhistory/thrift/gen-go/workhistory"
banerwaicrypto "github.com/banerwai/gommon/crypto"
"github.com/banerwai/global/bean"
)
func main() {
var (
thriftAddr = flag.String("thrift.addr", "localhost:36080", "Address for Thrift server")
thriftProtocol = flag.String("thrift.protocol", "binary", "binary, compact, json, simplejson")
thriftBufferSize = flag.Int("thrift.buffer.size", 0, "0 for unbuffered")
thriftFramed = flag.Bool("thrift.framed", false, "true to enable framing")
_defaultObjectID = flag.String("default.user.ojbectid", "5707cb10ae6faa1d1071a189", "default user ojbectid")
)
flag.Parse()
if len(os.Args) < 2 {
fmt.Fprintf(os.Stderr, "\n%s [flags] method arg1 arg2\n\n", filepath.Base(os.Args[0]))
flag.Usage()
os.Exit(1)
}
_instances := strings.Split(*thriftAddr, ",")
_instancesRandomIndex := banerwaicrypto.GetRandomItNum(len(_instances))
method := flag.Arg(0)
var logger log.Logger
logger = log.NewLogfmtLogger(os.Stdout)
logger = log.NewContext(logger).With("caller", log.DefaultCaller)
var svc service.WorkHistoryService
var protocolFactory thrift.TProtocolFactory
switch *thriftProtocol {
case "compact":
protocolFactory = thrift.NewTCompactProtocolFactory()
case "simplejson":
protocolFactory = thrift.NewTSimpleJSONProtocolFactory()
case "json":<|fim▁hole|> case "binary", "":
protocolFactory = thrift.NewTBinaryProtocolFactoryDefault()
default:
logger.Log("protocol", *thriftProtocol, "err", "invalid protocol")
os.Exit(1)
}
var transportFactory thrift.TTransportFactory
if *thriftBufferSize > 0 {
transportFactory = thrift.NewTBufferedTransportFactory(*thriftBufferSize)
} else {
transportFactory = thrift.NewTTransportFactory()
}
if *thriftFramed {
transportFactory = thrift.NewTFramedTransportFactory(transportFactory)
}
transportSocket, err := thrift.NewTSocket(_instances[_instancesRandomIndex])
if err != nil {
logger.Log("during", "thrift.NewTSocket", "err", err)
os.Exit(1)
}
trans := transportFactory.GetTransport(transportSocket)
defer trans.Close()
if err := trans.Open(); err != nil {
logger.Log("during", "thrift transport.Open", "err", err)
os.Exit(1)
}
cli := thriftworkhistory.NewWorkHistoryServiceClientFactory(trans, protocolFactory)
svc = thriftclient.New(cli, logger)
begin := time.Now()
switch method {
case "ping":
v := svc.Ping()
logger.Log("method", "Ping", "v", v, "took", time.Since(begin))
case "upsert":
var _obj bean.WorkHistory
_obj.ID = bson.ObjectIdHex(*_defaultObjectID)
_obj.ProfileID = bson.ObjectIdHex(*_defaultObjectID)
var lsWorkHistoryAndFeedbacks []bean.WorkHistoryAndFeedback
var _WorkHistoryAndFeedback1 bean.WorkHistoryAndFeedback
_WorkHistoryAndFeedback1.Title = "ceshi"
_WorkHistoryAndFeedback1.WorkPeriod = "2016.01-2016.04"
_WorkHistoryAndFeedback1.WorkHours = 40
var lsWorkFeedbacks []bean.WorkFeedback
var _WorkFeedback1 bean.WorkFeedback
_WorkFeedback1.WorkRate = 5
_WorkFeedback1.Feedback = "perfect"
lsWorkFeedbacks = append(lsWorkFeedbacks, _WorkFeedback1)
var _WorkFeedback2 bean.WorkFeedback
_WorkFeedback2.WorkRate = 5
_WorkFeedback2.Feedback = "good job"
lsWorkFeedbacks = append(lsWorkFeedbacks, _WorkFeedback2)
_WorkHistoryAndFeedback1.WorkFeedbacks = lsWorkFeedbacks
lsWorkHistoryAndFeedbacks = append(lsWorkHistoryAndFeedbacks, _WorkHistoryAndFeedback1)
_obj.HistoryAndFeedbacks = lsWorkHistoryAndFeedbacks
b, _ := json.Marshal(_obj)
v := svc.UpdateWorkHistory(*_defaultObjectID, string(b))
logger.Log("method", "UpdateWorkHistory", "v", v, "took", time.Since(begin))
default:
logger.Log("err", "invalid method "+method)
os.Exit(1)
}
}<|fim▁end|>
|
protocolFactory = thrift.NewTJSONProtocolFactory()
|
<|file_name|>rx.binding.js<|end_file_name|><|fim▁begin|>/* */
"format cjs";
;
(function(factory) {
var objectTypes = {
'function': true,
'object': true
};
function checkGlobal(value) {
return (value && value.Object === Object) ? value : null;
}
var freeExports = (objectTypes[typeof exports] && exports && !exports.nodeType) ? exports : null;
var freeModule = (objectTypes[typeof module] && module && !module.nodeType) ? module : null;
var freeGlobal = checkGlobal(freeExports && freeModule && typeof global === 'object' && global);
var freeSelf = checkGlobal(objectTypes[typeof self] && self);
var freeWindow = checkGlobal(objectTypes[typeof window] && window);
var moduleExports = (freeModule && freeModule.exports === freeExports) ? freeExports : null;
var thisGlobal = checkGlobal(objectTypes[typeof this] && this);
var root = freeGlobal || ((freeWindow !== (thisGlobal && thisGlobal.window)) && freeWindow) || freeSelf || thisGlobal || Function('return this')();
if (typeof define === 'function' && define.amd) {
define(['./rx'], function(Rx, exports) {
return factory(root, exports, Rx);
});
} else if (typeof module === 'object' && module && module.exports === freeExports) {
module.exports = factory(root, module.exports, require('./rx'));
} else {
root.Rx = factory(root, {}, root.Rx);
}
}.call(this, function(root, exp, Rx, undefined) {
var Observable = Rx.Observable,
observableProto = Observable.prototype,
AnonymousObservable = Rx.AnonymousObservable,
ObservableBase = Rx.ObservableBase,
Subject = Rx.Subject,
AsyncSubject = Rx.AsyncSubject,
Observer = Rx.Observer,
ScheduledObserver = Rx.internals.ScheduledObserver,
disposableCreate = Rx.Disposable.create,
disposableEmpty = Rx.Disposable.empty,
BinaryDisposable = Rx.BinaryDisposable,
currentThreadScheduler = Rx.Scheduler.currentThread,
isFunction = Rx.helpers.isFunction,
inherits = Rx.internals.inherits,
addProperties = Rx.internals.addProperties,
checkDisposed = Rx.Disposable.checkDisposed;
function cloneArray(arr) {
var len = arr.length,
a = new Array(len);
for (var i = 0; i < len; i++) {
a[i] = arr[i];
}
return a;
}
var MulticastObservable = (function(__super__) {
inherits(MulticastObservable, __super__);
function MulticastObservable(source, fn1, fn2) {
this.source = source;
this._fn1 = fn1;
this._fn2 = fn2;
__super__.call(this);
}
MulticastObservable.prototype.subscribeCore = function(o) {
var connectable = this.source.multicast(this._fn1());
return new BinaryDisposable(this._fn2(connectable).subscribe(o), connectable.connect());
};
return MulticastObservable;
}(ObservableBase));
observableProto.multicast = function(subjectOrSubjectSelector, selector) {
return isFunction(subjectOrSubjectSelector) ? new MulticastObservable(this, subjectOrSubjectSelector, selector) : new ConnectableObservable(this, subjectOrSubjectSelector);
};
observableProto.publish = function(selector) {<|fim▁hole|> return selector && isFunction(selector) ? this.multicast(function() {
return new Subject();
}, selector) : this.multicast(new Subject());
};
observableProto.share = function() {
return this.publish().refCount();
};
observableProto.publishLast = function(selector) {
return selector && isFunction(selector) ? this.multicast(function() {
return new AsyncSubject();
}, selector) : this.multicast(new AsyncSubject());
};
observableProto.publishValue = function(initialValueOrSelector, initialValue) {
return arguments.length === 2 ? this.multicast(function() {
return new BehaviorSubject(initialValue);
}, initialValueOrSelector) : this.multicast(new BehaviorSubject(initialValueOrSelector));
};
observableProto.shareValue = function(initialValue) {
return this.publishValue(initialValue).refCount();
};
observableProto.replay = function(selector, bufferSize, windowSize, scheduler) {
return selector && isFunction(selector) ? this.multicast(function() {
return new ReplaySubject(bufferSize, windowSize, scheduler);
}, selector) : this.multicast(new ReplaySubject(bufferSize, windowSize, scheduler));
};
observableProto.shareReplay = function(bufferSize, windowSize, scheduler) {
return this.replay(null, bufferSize, windowSize, scheduler).refCount();
};
var InnerSubscription = function(s, o) {
this._s = s;
this._o = o;
};
InnerSubscription.prototype.dispose = function() {
if (!this._s.isDisposed && this._o !== null) {
var idx = this._s.observers.indexOf(this._o);
this._s.observers.splice(idx, 1);
this._o = null;
}
};
var BehaviorSubject = Rx.BehaviorSubject = (function(__super__) {
inherits(BehaviorSubject, __super__);
function BehaviorSubject(value) {
__super__.call(this);
this.value = value;
this.observers = [];
this.isDisposed = false;
this.isStopped = false;
this.hasError = false;
}
addProperties(BehaviorSubject.prototype, Observer.prototype, {
_subscribe: function(o) {
checkDisposed(this);
if (!this.isStopped) {
this.observers.push(o);
o.onNext(this.value);
return new InnerSubscription(this, o);
}
if (this.hasError) {
o.onError(this.error);
} else {
o.onCompleted();
}
return disposableEmpty;
},
getValue: function() {
checkDisposed(this);
if (this.hasError) {
thrower(this.error);
}
return this.value;
},
hasObservers: function() {
return this.observers.length > 0;
},
onCompleted: function() {
checkDisposed(this);
if (this.isStopped) {
return;
}
this.isStopped = true;
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
os[i].onCompleted();
}
this.observers.length = 0;
},
onError: function(error) {
checkDisposed(this);
if (this.isStopped) {
return;
}
this.isStopped = true;
this.hasError = true;
this.error = error;
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
os[i].onError(error);
}
this.observers.length = 0;
},
onNext: function(value) {
checkDisposed(this);
if (this.isStopped) {
return;
}
this.value = value;
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
os[i].onNext(value);
}
},
dispose: function() {
this.isDisposed = true;
this.observers = null;
this.value = null;
this.error = null;
}
});
return BehaviorSubject;
}(Observable));
var ReplaySubject = Rx.ReplaySubject = (function(__super__) {
var maxSafeInteger = Math.pow(2, 53) - 1;
function createRemovableDisposable(subject, observer) {
return disposableCreate(function() {
observer.dispose();
!subject.isDisposed && subject.observers.splice(subject.observers.indexOf(observer), 1);
});
}
inherits(ReplaySubject, __super__);
function ReplaySubject(bufferSize, windowSize, scheduler) {
this.bufferSize = bufferSize == null ? maxSafeInteger : bufferSize;
this.windowSize = windowSize == null ? maxSafeInteger : windowSize;
this.scheduler = scheduler || currentThreadScheduler;
this.q = [];
this.observers = [];
this.isStopped = false;
this.isDisposed = false;
this.hasError = false;
this.error = null;
__super__.call(this);
}
addProperties(ReplaySubject.prototype, Observer.prototype, {
_subscribe: function(o) {
checkDisposed(this);
var so = new ScheduledObserver(this.scheduler, o),
subscription = createRemovableDisposable(this, so);
this._trim(this.scheduler.now());
this.observers.push(so);
for (var i = 0,
len = this.q.length; i < len; i++) {
so.onNext(this.q[i].value);
}
if (this.hasError) {
so.onError(this.error);
} else if (this.isStopped) {
so.onCompleted();
}
so.ensureActive();
return subscription;
},
hasObservers: function() {
return this.observers.length > 0;
},
_trim: function(now) {
while (this.q.length > this.bufferSize) {
this.q.shift();
}
while (this.q.length > 0 && (now - this.q[0].interval) > this.windowSize) {
this.q.shift();
}
},
onNext: function(value) {
checkDisposed(this);
if (this.isStopped) {
return;
}
var now = this.scheduler.now();
this.q.push({
interval: now,
value: value
});
this._trim(now);
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
var observer = os[i];
observer.onNext(value);
observer.ensureActive();
}
},
onError: function(error) {
checkDisposed(this);
if (this.isStopped) {
return;
}
this.isStopped = true;
this.error = error;
this.hasError = true;
var now = this.scheduler.now();
this._trim(now);
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
var observer = os[i];
observer.onError(error);
observer.ensureActive();
}
this.observers.length = 0;
},
onCompleted: function() {
checkDisposed(this);
if (this.isStopped) {
return;
}
this.isStopped = true;
var now = this.scheduler.now();
this._trim(now);
for (var i = 0,
os = cloneArray(this.observers),
len = os.length; i < len; i++) {
var observer = os[i];
observer.onCompleted();
observer.ensureActive();
}
this.observers.length = 0;
},
dispose: function() {
this.isDisposed = true;
this.observers = null;
}
});
return ReplaySubject;
}(Observable));
var RefCountObservable = (function(__super__) {
inherits(RefCountObservable, __super__);
function RefCountObservable(source) {
this.source = source;
this._count = 0;
this._connectableSubscription = null;
__super__.call(this);
}
RefCountObservable.prototype.subscribeCore = function(o) {
var subscription = this.source.subscribe(o);
++this._count === 1 && (this._connectableSubscription = this.source.connect());
return new RefCountDisposable(this, subscription);
};
function RefCountDisposable(p, s) {
this._p = p;
this._s = s;
this.isDisposed = false;
}
RefCountDisposable.prototype.dispose = function() {
if (!this.isDisposed) {
this.isDisposed = true;
this._s.dispose();
--this._p._count === 0 && this._p._connectableSubscription.dispose();
}
};
return RefCountObservable;
}(ObservableBase));
var ConnectableObservable = Rx.ConnectableObservable = (function(__super__) {
inherits(ConnectableObservable, __super__);
function ConnectableObservable(source, subject) {
this.source = source;
this._connection = null;
this._source = source.asObservable();
this._subject = subject;
__super__.call(this);
}
function ConnectDisposable(parent, subscription) {
this._p = parent;
this._s = subscription;
}
ConnectDisposable.prototype.dispose = function() {
if (this._s) {
this._s.dispose();
this._s = null;
this._p._connection = null;
}
};
ConnectableObservable.prototype.connect = function() {
if (!this._connection) {
var subscription = this._source.subscribe(this._subject);
this._connection = new ConnectDisposable(this, subscription);
}
return this._connection;
};
ConnectableObservable.prototype._subscribe = function(o) {
return this._subject.subscribe(o);
};
ConnectableObservable.prototype.refCount = function() {
return new RefCountObservable(this);
};
return ConnectableObservable;
}(Observable));
observableProto.singleInstance = function() {
var source = this,
hasObservable = false,
observable;
function getObservable() {
if (!hasObservable) {
hasObservable = true;
observable = source['finally'](function() {
hasObservable = false;
}).publish().refCount();
}
return observable;
}
return new AnonymousObservable(function(o) {
return getObservable().subscribe(o);
});
};
return Rx;
}));<|fim▁end|>
| |
<|file_name|>proc_macro_harness.rs<|end_file_name|><|fim▁begin|>use std::mem;
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
use rustc_ast_pretty::pprust;
use rustc_expand::base::{parse_macro_name_and_helper_attrs, ExtCtxt, ResolverExpand};
use rustc_expand::expand::{AstFragment, ExpansionConfig};
use rustc_session::Session;
use rustc_span::hygiene::AstPass;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use smallvec::smallvec;
struct ProcMacroDerive {
id: NodeId,
trait_name: Symbol,
function_name: Ident,
span: Span,
attrs: Vec<Symbol>,
}
enum ProcMacroDefType {
Attr,
Bang,
}
struct ProcMacroDef {
id: NodeId,
function_name: Ident,
span: Span,
def_type: ProcMacroDefType,
}
enum ProcMacro {
Derive(ProcMacroDerive),
Def(ProcMacroDef),
}
struct CollectProcMacros<'a> {
sess: &'a Session,
macros: Vec<ProcMacro>,
in_root: bool,
handler: &'a rustc_errors::Handler,
source_map: &'a SourceMap,
is_proc_macro_crate: bool,
is_test_crate: bool,
}
pub fn inject(
sess: &Session,
resolver: &mut dyn ResolverExpand,
mut krate: ast::Crate,
is_proc_macro_crate: bool,
has_proc_macro_decls: bool,
is_test_crate: bool,
num_crate_types: usize,
handler: &rustc_errors::Handler,
) -> ast::Crate {
let ecfg = ExpansionConfig::default("proc_macro".to_string());
let mut cx = ExtCtxt::new(sess, ecfg, resolver, None);
let mut collect = CollectProcMacros {
sess,
macros: Vec::new(),
in_root: true,
handler,
source_map: sess.source_map(),
is_proc_macro_crate,
is_test_crate,
};
if has_proc_macro_decls || is_proc_macro_crate {
visit::walk_crate(&mut collect, &krate);
}
let macros = collect.macros;
if !is_proc_macro_crate {
return krate;
}
if num_crate_types > 1 {
handler.err("cannot mix `proc-macro` crate type with others");
}
if is_test_crate {
return krate;
}
let decls = mk_decls(&mut cx, ¯os);
krate.items.push(decls);
krate
}
impl<'a> CollectProcMacros<'a> {
fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) {
if self.is_proc_macro_crate && self.in_root && vis.kind.is_pub() {
self.handler.span_err(
sp,
"`proc-macro` crate types currently cannot export any items other \
than functions tagged with `#[proc_macro]`, `#[proc_macro_derive]`, \
or `#[proc_macro_attribute]`",
);
}
}
fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
let (trait_name, proc_attrs) =
match parse_macro_name_and_helper_attrs(self.handler, attr, "derive") {
Some(name_and_attrs) => name_and_attrs,
None => return,
};
if self.in_root && item.vis.kind.is_pub() {
self.macros.push(ProcMacro::Derive(ProcMacroDerive {
id: item.id,
span: item.span,
trait_name,
function_name: item.ident,
attrs: proc_attrs,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_derive]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_derive]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item) {
if self.in_root && item.vis.kind.is_pub() {
self.macros.push(ProcMacro::Def(ProcMacroDef {
id: item.id,
span: item.span,
function_name: item.ident,
def_type: ProcMacroDefType::Attr,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_attribute]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_attribute]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item) {
if self.in_root && item.vis.kind.is_pub() {
self.macros.push(ProcMacro::Def(ProcMacroDef {
id: item.id,
span: item.span,
function_name: item.ident,
def_type: ProcMacroDefType::Bang,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
}
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
if let ast::ItemKind::MacroDef(..) = item.kind {
if self.is_proc_macro_crate && self.sess.contains_name(&item.attrs, sym::macro_export) {
let msg =
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.<|fim▁hole|> let is_fn = matches!(item.kind, ast::ItemKind::Fn(..));
let mut found_attr: Option<&'a ast::Attribute> = None;
for attr in &item.attrs {
if self.sess.is_proc_macro_attr(&attr) {
if let Some(prev_attr) = found_attr {
let prev_item = prev_attr.get_normal_item();
let item = attr.get_normal_item();
let path_str = pprust::path_to_string(&item.path);
let msg = if item.path.segments[0].ident.name
== prev_item.path.segments[0].ident.name
{
format!(
"only one `#[{}]` attribute is allowed on any given function",
path_str,
)
} else {
format!(
"`#[{}]` and `#[{}]` attributes cannot both be applied
to the same function",
path_str,
pprust::path_to_string(&prev_item.path),
)
};
self.handler
.struct_span_err(attr.span, &msg)
.span_label(prev_attr.span, "previous attribute here")
.emit();
return;
}
found_attr = Some(attr);
}
}
let attr = match found_attr {
None => {
self.check_not_pub_in_root(&item.vis, self.source_map.guess_head_span(item.span));
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
return;
}
Some(attr) => attr,
};
if !is_fn {
let msg = format!(
"the `#[{}]` attribute may only be used on bare functions",
pprust::path_to_string(&attr.get_normal_item().path),
);
self.handler.span_err(attr.span, &msg);
return;
}
if self.is_test_crate {
return;
}
if !self.is_proc_macro_crate {
let msg = format!(
"the `#[{}]` attribute is only usable with crates of the `proc-macro` crate type",
pprust::path_to_string(&attr.get_normal_item().path),
);
self.handler.span_err(attr.span, &msg);
return;
}
if attr.has_name(sym::proc_macro_derive) {
self.collect_custom_derive(item, attr);
} else if attr.has_name(sym::proc_macro_attribute) {
self.collect_attr_proc_macro(item);
} else if attr.has_name(sym::proc_macro) {
self.collect_bang_proc_macro(item);
};
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
}
}
// Creates a new module which looks like:
//
// const _: () = {
// extern crate proc_macro;
//
// use proc_macro::bridge::client::ProcMacro;
//
// #[rustc_proc_macro_decls]
// #[allow(deprecated)]
// static DECLS: &[ProcMacro] = &[
// ProcMacro::custom_derive($name_trait1, &[], ::$name1);
// ProcMacro::custom_derive($name_trait2, &["attribute_name"], ::$name2);
// // ...
// ];
// }
fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
let expn_id = cx.resolver.expansion_for_ast_pass(
DUMMY_SP,
AstPass::ProcMacroHarness,
&[sym::rustc_attrs, sym::proc_macro_internals],
None,
);
let span = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id());
let proc_macro = Ident::new(sym::proc_macro, span);
let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None));
let bridge = Ident::new(sym::bridge, span);
let client = Ident::new(sym::client, span);
let proc_macro_ty = Ident::new(sym::ProcMacro, span);
let custom_derive = Ident::new(sym::custom_derive, span);
let attr = Ident::new(sym::attr, span);
let bang = Ident::new(sym::bang, span);
// We add NodeIds to 'resolver.proc_macros' in the order
// that we generate expressions. The position of each NodeId
// in the 'proc_macros' Vec corresponds to its position
// in the static array that will be generated
let decls = {
let local_path = |cx: &ExtCtxt<'_>, sp: Span, name| {
cx.expr_path(cx.path(sp.with_ctxt(span.ctxt()), vec![name]))
};
let proc_macro_ty_method_path = |cx: &ExtCtxt<'_>, method| {
cx.expr_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty, method]))
};
macros
.iter()
.map(|m| match m {
ProcMacro::Derive(cd) => {
cx.resolver.declare_proc_macro(cd.id);
cx.expr_call(
span,
proc_macro_ty_method_path(cx, custom_derive),
vec![
cx.expr_str(cd.span, cd.trait_name),
cx.expr_vec_slice(
span,
cd.attrs
.iter()
.map(|&s| cx.expr_str(cd.span, s))
.collect::<Vec<_>>(),
),
local_path(cx, cd.span, cd.function_name),
],
)
}
ProcMacro::Def(ca) => {
cx.resolver.declare_proc_macro(ca.id);
let ident = match ca.def_type {
ProcMacroDefType::Attr => attr,
ProcMacroDefType::Bang => bang,
};
cx.expr_call(
span,
proc_macro_ty_method_path(cx, ident),
vec![
cx.expr_str(ca.span, ca.function_name.name),
local_path(cx, ca.span, ca.function_name),
],
)
}
})
.collect()
};
let decls_static = cx
.item_static(
span,
Ident::new(sym::_DECLS, span),
cx.ty_rptr(
span,
cx.ty(
span,
ast::TyKind::Slice(
cx.ty_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty])),
),
),
None,
ast::Mutability::Not,
),
ast::Mutability::Not,
cx.expr_vec_slice(span, decls),
)
.map(|mut i| {
let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
i.attrs.push(cx.attribute(attr));
let deprecated_attr = attr::mk_nested_word_item(Ident::new(sym::deprecated, span));
let allow_deprecated_attr =
attr::mk_list_item(Ident::new(sym::allow, span), vec![deprecated_attr]);
i.attrs.push(cx.attribute(allow_deprecated_attr));
i
});
let block = cx.expr_block(
cx.block(span, vec![cx.stmt_item(span, krate), cx.stmt_item(span, decls_static)]),
);
let anon_constant = cx.item_const(
span,
Ident::new(kw::Underscore, span),
cx.ty(span, ast::TyKind::Tup(Vec::new())),
block,
);
// Integrate the new item into existing module structures.
let items = AstFragment::Items(smallvec![anon_constant]);
cx.monotonic_expander().fully_expand_fragment(items).make_items().pop().unwrap()
}<|fim▁end|>
|
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on it.
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from models import *
import datetime
admin.site.register(PingHost)<|fim▁hole|><|fim▁end|>
|
admin.site.register(PingResult)
|
<|file_name|>bitcoin_nb.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="nb" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Podcoin</source>
<translation>Om Podcoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Podcoin</b> version</source>
<translation><b>Podcoin</b> versjon</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Dette er eksperimentell programvare.
Distribuert under MIT/X11 programvarelisensen, se medfølgende fil COPYING eller http://www.opensource.org/licenses/mit-license.php.
Dette produktet inneholder programvare utviklet av OpenSSL prosjektet for bruk i OpenSSL Toolkit (http://www.openssl.org/) og kryptografisk programvare skrevet av Eric Young ([email protected]) og UPnP programvare skrevet av Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Podcoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Adressebok</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Dobbeltklikk for å redigere adresse eller merkelapp</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Lag en ny adresse</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopier den valgte adressen til systemets utklippstavle</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Ny Adresse</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Podcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Dette er dine Podcoin-adresser for mottak av betalinger. Du kan gi forskjellige adresser til alle som skal betale deg for å holde bedre oversikt.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Kopier Adresse</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Vis &QR Kode</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Podcoin address</source>
<translation>Signer en melding for å bevise at du eier en Podcoin-adresse</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Signér &Melding</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Slett den valgte adressen fra listen.</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Eksporter data fra nåværende fane til fil</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Podcoin address</source>
<translation>Verifiser en melding for å være sikker på at den ble signert av en angitt Podcoin-adresse</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verifiser Melding</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Slett</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Podcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Kopier &Merkelapp</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Rediger</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Send &Coins</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Eksporter adressebok</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommaseparert fil (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Feil ved eksportering</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Kunne ikke skrive til filen %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Merkelapp</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(ingen merkelapp)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Dialog for Adgangsfrase</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Angi adgangsfrase</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Ny adgangsfrase</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Gjenta ny adgangsfrase</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Skriv inn den nye adgangsfrasen for lommeboken.<br/>Vennligst bruk en adgangsfrase med <b>10 eller flere tilfeldige tegn</b>, eller <b>åtte eller flere ord</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Krypter lommebok</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Denne operasjonen krever adgangsfrasen til lommeboken for å låse den opp.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Lås opp lommebok</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Denne operasjonen krever adgangsfrasen til lommeboken for å dekryptere den.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dekrypter lommebok</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Endre adgangsfrase</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Skriv inn gammel og ny adgangsfrase for lommeboken.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Bekreft kryptering av lommebok</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR PODCOINS</b>!</source>
<translation>Advarsel: Hvis du krypterer lommeboken og mister adgangsfrasen, så vil du <b>MISTE ALLE DINE PODCOINS</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Er du sikker på at du vil kryptere lommeboken?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>VIKTIG: Tidligere sikkerhetskopier av din lommebok-fil, bør erstattes med den nylig genererte, krypterte filen, da de blir ugyldiggjort av sikkerhetshensyn så snart du begynner å bruke den nye krypterte lommeboken.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Advarsel: Caps Lock er på !</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Lommebok kryptert</translation>
</message>
<message>
<location line="-56"/>
<source>Podcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your podcoins from being stolen by malware infecting your computer.</source>
<translation>Podcoin vil nå lukkes for å fullføre krypteringsprosessen. Husk at kryptering av lommeboken ikke fullt ut kan beskytte dine podcoins fra å bli stjålet om skadevare infiserer datamaskinen.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Kryptering av lommebok feilet</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Kryptering av lommebok feilet på grunn av en intern feil. Din lommebok ble ikke kryptert.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>De angitte adgangsfrasene er ulike.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Opplåsing av lommebok feilet</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Adgangsfrasen angitt for dekryptering av lommeboken var feil.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dekryptering av lommebok feilet</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Adgangsfrase for lommebok endret.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Signer &melding...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Synkroniserer med nettverk...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Oversikt</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Vis generell oversikt over lommeboken</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transaksjoner</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Vis transaksjonshistorikk</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Rediger listen over adresser og deres merkelapper</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Vis listen over adresser for mottak av betalinger</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>&Avslutt</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Avslutt applikasjonen</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Podcoin</source>
<translation>Vis informasjon om Podcoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Om &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Vis informasjon om Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Innstillinger...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Krypter Lommebok...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>Lag &Sikkerhetskopi av Lommebok...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Endre Adgangsfrase...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importere blokker...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Re-indekserer blokker på disk...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Podcoin address</source>
<translation>Send til en Podcoin-adresse</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Podcoin</source>
<translation>Endre oppsett for Podcoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Sikkerhetskopiér lommebok til annet sted</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Endre adgangsfrasen brukt for kryptering av lommebok</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>&Feilsøkingsvindu</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Åpne konsoll for feilsøk og diagnostikk</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Verifiser melding...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Podcoin</source>
<translation>Podcoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Lommebok</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Send</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Motta</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Adressebok</translation>
</message>
<message>
<location line="+22"/>
<source>&About Podcoin</source>
<translation>&Om Podcoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Gjem / vis</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Vis eller skjul hovedvinduet</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Krypter de private nøklene som tilhører lommeboken din</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Podcoin addresses to prove you own them</source>
<translation>Signér en melding for å bevise at du eier denne adressen</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Podcoin addresses</source>
<translation>Bekreft meldinger for å være sikker på at de ble signert av en angitt Podcoin-adresse</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Fil</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Innstillinger</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Hjelp</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Verktøylinje for faner</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnett]</translation>
</message>
<message>
<location line="+47"/>
<source>Podcoin client</source>
<translation>Podcoinklient</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Podcoin network</source>
<translation><numerusform>%n aktiv forbindelse til Podcoin-nettverket</numerusform><numerusform>%n aktive forbindelser til Podcoin-nettverket</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Lastet %1 blokker med transaksjonshistorikk.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transaksjoner etter dette vil ikke være synlige enda.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Denne transaksjonen overstiger størrelsesbegrensningen. Du kan likevel sende den med et gebyr på %1, som går til nodene som prosesserer transaksjonen din og støtter nettverket. Vil du betale gebyret?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Ajour</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Kommer ajour...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Bekreft transaksjonsgebyr</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Sendt transaksjon</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Innkommende transaksjon</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Dato: %1
Beløp: %2
Type: %3
Adresse: %4
</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>URI håndtering</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Podcoin address or malformed URI parameters.</source>
<translation>URI kunne ikke tolkes! Dette kan forårsakes av en ugyldig Podcoin-adresse eller feil i URI-parametere.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Lommeboken er <b>kryptert</b> og for tiden <b>ulåst</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Lommeboken er <b>kryptert</b> og for tiden <b>låst</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Podcoin can no longer continue safely and will quit.</source>
<translation>En fatal feil har inntruffet. Det er ikke trygt å fortsette og Podcoin må derfor avslutte.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Nettverksvarsel</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Rediger adresse</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Merkelapp</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Merkelappen koblet til denne adressen i adresseboken</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adresse</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Adressen til denne oppføringen i adresseboken. Denne kan kun endres for utsendingsadresser.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Ny mottaksadresse</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Ny utsendingsadresse</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Rediger mottaksadresse</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Rediger utsendingsadresse</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Den oppgitte adressen "%1" er allerede i adresseboken.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Podcoin address.</source>
<translation>Den angitte adressed "%1" er ikke en gyldig Podcoin-adresse.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Kunne ikke låse opp lommeboken.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Generering av ny nøkkel feilet.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Podcoin-Qt</source>
<translation>Podcoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versjon</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Bruk:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>kommandolinjevalg</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>valg i brukergrensesnitt</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Sett språk, for eksempel "nb_NO" (standardverdi: fra operativsystem)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Start minimert
</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Vis splashskjerm ved oppstart (standardverdi: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Innstillinger</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Hoved</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Betal transaksjons&gebyr</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Podcoin after logging in to the system.</source>
<translation>Start Podcoin automatisk etter innlogging.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Podcoin on system login</source>
<translation>&Start Podcoin ved systeminnlogging</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>&Nettverk</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Podcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Åpne automatisk Podcoin klientporten på ruteren. Dette virker kun om din ruter støtter UPnP og dette er påslått.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Sett opp port vha. &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Podcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Koble til Podcoin-nettverket gjennom en SOCKS proxy (f.eks. ved tilkobling gjennom Tor).</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Koble til gjenom SOCKS proxy:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>IP-adresse for mellomtjener (f.eks. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Proxyens port (f.eks. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Versjon:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Proxyens SOCKS versjon (f.eks. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Vindu</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Vis kun ikon i systemkurv etter minimering av vinduet.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimer til systemkurv istedenfor oppgavelinjen</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimerer vinduet istedenfor å avslutte applikasjonen når vinduet lukkes. Når dette er slått på avsluttes applikasjonen kun ved å velge avslutt i menyen.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimer ved lukking</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Visning</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Språk for brukergrensesnitt</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Podcoin.</source>
<translation>Språket for brukergrensesnittet kan settes her. Innstillingen trer i kraft ved omstart av Podcoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Enhet for visning av beløper:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Velg standard delt enhet for visning i grensesnittet og for sending av podcoins.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Podcoin addresses in the transaction list or not.</source>
<translation>Om Podcoin-adresser skal vises i transaksjonslisten eller ikke.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Vis adresser i transaksjonslisten</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Avbryt</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Bruk</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>standardverdi</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Advarsel</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Podcoin.</source>
<translation>Denne innstillingen trer i kraft etter omstart av Podcoin.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Angitt proxyadresse er ugyldig.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Skjema</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Podcoin network after a connection is established, but this process has not completed yet.</source>
<translation>Informasjonen som vises kan være foreldet. Din lommebok synkroniseres automatisk med Podcoin-nettverket etter at tilkobling er opprettet, men denne prosessen er ikke ferdig enda.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Ubekreftet</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Lommebok</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Umoden:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Minet saldo har ikke modnet enda</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Siste transaksjoner</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Din nåværende saldo</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Totalt antall ubekreftede transaksjoner som ikke telles med i saldo enda</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>ute av synk</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start podcoin: click-to-pay handler</source>
<translation>Kan ikke starte podcoin: klikk-og-betal håndterer</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Dialog for QR Kode</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Etterspør Betaling</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Beløp:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Merkelapp:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Melding:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Lagre Som...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Feil ved koding av URI i QR kode.</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Angitt beløp er ugyldig.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resulterende URI for lang, prøv å redusere teksten for merkelapp / melding.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Lagre QR Kode</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>PNG bilder (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Klientnavn</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>-</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Klientversjon</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informasjon</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Bruker OpenSSL versjon</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Oppstartstidspunkt</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Nettverk</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Antall tilkoblinger</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>På testnett</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Blokkjeden</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Nåværende antall blokker</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Estimert totalt antall blokker</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Tidspunkt for siste blokk</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Åpne</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Kommandolinjevalg</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Podcoin-Qt help message to get a list with possible Podcoin command-line options.</source>
<translation>Vis Podcoin-Qt hjelpemelding for å få en liste med mulige kommandolinjevalg.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Vis</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konsoll</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Byggedato</translation>
</message>
<message>
<location line="-104"/>
<source>Podcoin - Debug window</source>
<translation>Podcoin - vindu for feilsøk</translation>
</message>
<message>
<location line="+25"/>
<source>Podcoin Core</source>
<translation>Podcoin Kjerne</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Loggfil for feilsøk</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Podcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Åpne Podcoin loggfil for feilsøk fra datamappen. Dette kan ta noen sekunder for store loggfiler.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Tøm konsoll</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Podcoin RPC console.</source>
<translation>Velkommen til Podcoin RPC konsoll.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Bruk opp og ned pil for å navigere historikken, og <b>Ctrl-L</b> for å tømme skjermen.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Skriv <b>help</b> for en oversikt over kommandoer.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Send Podcoins</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Send til flere enn én mottaker</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Legg til Mottaker</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Fjern alle transaksjonsfelter</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Fjern &Alt</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Bekreft sending</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>S&end</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> til %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Bekreft sending av podcoins</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Er du sikker på at du vil sende %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> og </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Adresse for mottaker er ugyldig.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Beløpen som skal betales må være over 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Beløpet overstiger saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Totalbeløpet overstiger saldo etter at %1 transaksjonsgebyr er lagt til.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Duplikate adresser funnet. Kan bare sende én gang til hver adresse per operasjon.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Feil: Opprettelse av transaksjon feilet </translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Feil: Transaksjonen ble avvist. Dette kan skje om noe av beløpet allerede var brukt, f.eks. hvis du kopierte wallet.dat og noen podcoins ble brukt i kopien men ikke ble markert som brukt her.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Skjema</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>&Beløp:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Betal &Til:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Adressen betalingen skal sendes til (f.eks. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Skriv inn en merkelapp for denne adressen for å legge den til i din adressebok</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Merkelapp:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Velg adresse fra adresseboken</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Lim inn adresse fra utklippstavlen</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Fjern denne mottakeren</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Podcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Skriv inn en Podcoin adresse (f.eks. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signaturer - Signer / Verifiser en melding</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Signér Melding</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Du kan signere meldinger med dine adresser for å bevise at du eier dem. Ikke signér vage meldinger da phishing-angrep kan prøve å lure deg til å signere din identitet over til andre. Signér kun fullt detaljerte utsagn som du er enig i.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Adressen for signering av meldingen (f.eks. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Velg en adresse fra adresseboken</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Lim inn adresse fra utklippstavlen</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Skriv inn meldingen du vil signere her</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopier valgt signatur til utklippstavle</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Podcoin address</source>
<translation>Signer meldingen for å bevise at du eier denne Podcoin-adressen</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Tilbakestill alle felter for meldingssignering</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Fjern &Alt</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Verifiser Melding</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Angi adresse for signering, melding (vær sikker på at du kopierer linjeskift, mellomrom, tab, etc. helt nøyaktig) og signatur under for å verifisere meldingen. Vær forsiktig med at du ikke gir signaturen mer betydning enn det som faktisk står i meldingen, for å unngå å bli lurt av såkalte "man-in-the-middle" angrep.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Adressen meldingen var signert med (f.eks. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Podcoin address</source>
<translation>Verifiser meldingen for å være sikker på at den ble signert av den angitte Podcoin-adressen</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Tilbakestill alle felter for meldingsverifikasjon</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Podcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Skriv inn en Podcoin adresse (f.eks. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Klikk "Signer Melding" for å generere signatur</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Podcoin signature</source>
<translation>Angi Podcoin signatur</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Angitt adresse er ugyldig.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Vennligst sjekk adressen og prøv igjen.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Angitt adresse refererer ikke til en nøkkel.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Opplåsing av lommebok ble avbrutt.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Privat nøkkel for den angitte adressen er ikke tilgjengelig.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Signering av melding feilet.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Melding signert.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Signaturen kunne ikke dekodes.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Vennligst sjekk signaturen og prøv igjen.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Signaturen passer ikke til meldingen.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verifikasjon av melding feilet.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Melding verifisert.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Podcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnett]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Åpen til %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/frakoblet</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/ubekreftet</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 bekreftelser</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, kringkast gjennom %n node</numerusform><numerusform>, kringkast gjennom %n noder</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Kilde</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generert</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Fra</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Til</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>egen adresse</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>merkelapp</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Kredit</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>blir moden om %n blokk</numerusform><numerusform>blir moden om %n blokker</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>ikke akseptert</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debet</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Transaksjonsgebyr</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Nettobeløp</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Melding</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Kommentar</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Transaksjons-ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Genererte podcoins må modnes 120 blokker før de kan brukes. Da du genererte denne blokken ble den kringkastet til nettverket for å legges til i blokkjeden. Hvis den ikke kommer inn i kjeden får den tilstanden "ikke akseptert" og vil ikke kunne brukes. Dette skjer noen ganger hvis en annen node genererer en blokk noen sekunder fra din.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informasjon for feilsøk</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transaksjon</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Inndata</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Beløp</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>sann</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>usann</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, har ikke blitt kringkastet uten problemer enda.</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>ukjent</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transaksjonsdetaljer</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Her vises en detaljert beskrivelse av transaksjonen</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Beløp</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Åpen til %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Frakoblet (%1 bekreftelser)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Ubekreftet (%1 av %2 bekreftelser)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Bekreftet (%1 bekreftelser)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Minet saldo blir tilgjengelig når den modner om %n blokk</numerusform><numerusform>Minet saldo blir tilgjengelig når den modner om %n blokker</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Denne blokken har ikke blitt mottatt av noen andre noder og vil sannsynligvis ikke bli akseptert!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generert men ikke akseptert</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Mottatt med</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Mottatt fra</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Sendt til</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Betaling til deg selv</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Utvunnet</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>-</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaksjonsstatus. Hold muspekeren over dette feltet for å se antall bekreftelser.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Dato og tid for da transaksjonen ble mottat.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Type transaksjon.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Mottaksadresse for transaksjonen</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Beløp fjernet eller lagt til saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Alle</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>I dag</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Denne uken</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Denne måneden</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Forrige måned</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Dette året</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervall...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Mottatt med</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Sendt til</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Til deg selv</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Utvunnet</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Andre</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Skriv inn adresse eller merkelapp for søk</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Minimumsbeløp</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopier adresse</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopier merkelapp</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopiér beløp</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Kopier transaksjons-ID</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Rediger merkelapp</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Vis transaksjonsdetaljer</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Eksporter transaksjonsdata</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommaseparert fil (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Bekreftet</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Merkelapp</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Beløp</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Feil ved eksport</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Kunne ikke skrive til filen %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervall:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>til</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Send Podcoins</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Eksporter data fra nåværende fane til fil</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Sikkerhetskopier lommebok</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Lommebokdata (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Sikkerhetskopiering feilet</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>En feil oppstod under lagringen av lommeboken til den nye plasseringen.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Sikkerhetskopiering fullført</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Lommebokdata ble lagret til den nye plasseringen. </translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Podcoin version</source>
<translation>Podcoin versjon</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Bruk:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or podcoind</source>
<translation>Send kommando til -server eller podcoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>List opp kommandoer</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Vis hjelpetekst for en kommando</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Innstillinger:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: podcoin.conf)</source>
<translation>Angi konfigurasjonsfil (standardverdi: podcoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: podcoind.pid)</source>
<translation>Angi pid-fil (standardverdi: podcoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Angi mappe for datafiler</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Sett størrelse på mellomlager for database i megabytes (standardverdi: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>Lytt etter tilkoblinger på <port> (standardverdi: 9333 eller testnet: 19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Hold maks <n> koblinger åpne til andre noder (standardverdi: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Koble til node for å hente adresser til andre noder, koble så fra igjen</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Angi din egen offentlige adresse</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Grenseverdi for å koble fra noder med dårlig oppførsel (standardverdi: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Antall sekunder noder med dårlig oppførsel hindres fra å koble til på nytt (standardverdi: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>En feil oppstod ved opprettelse av RPC port %u for lytting: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9552 or testnet: 19552)</source>
<translation>Lytt etter JSON-RPC tilkoblinger på <port> (standardverdi: 9552 or testnet: 19552)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Ta imot kommandolinje- og JSON-RPC-kommandoer</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Kjør i bakgrunnen som daemon og ta imot kommandoer</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Bruk testnettverket</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Ta imot tilkoblinger fra utsiden (standardverdi: 1 hvis uten -proxy eller -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=podcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Podcoin Alert" [email protected]
</source>
<translation>%s, du må angi rpcpassord i konfigurasjonsfilen.
%s
Det anbefales at du bruker det følgende tilfeldige passordet:
rpcbruker=podcoinrpc
rpcpassord=%s
(du behøver ikke å huske passordet)
Brukernavnet og passordet MÅ IKKE være like.
Om filen ikke eksisterer, opprett den nå med eier-kun-les filrettigheter.
Det er også anbefalt at å sette varselsmelding slik du får melding om problemer.
For eksempel: varselmelding=echo %%s | mail -s "Podcoin varsel" [email protected]</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>En feil oppstod under oppsettet av RPC port %u for IPv6, tilbakestilles til IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Bind til angitt adresse. Bruk [vertsmaskin]:port notasjon for IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Podcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Kjør kommando når relevant varsel blir mottatt (%s i cmd er erstattet med TxID)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Kjør kommando når en lommeboktransaksjon endres (%s i cmd er erstattet med TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Sett maks størrelse for transaksjoner med høy prioritet / lavt gebyr, i bytes (standardverdi: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Advarsel: -paytxfee er satt veldig høyt! Dette er transaksjonsgebyret du betaler når du sender transaksjoner.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Advarsel: Viste transaksjoner kan være feil! Du, eller andre noder, kan trenge en oppgradering.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Podcoin will not work properly.</source>
<translation>Advarsel: Vennligst undersøk at din datamaskin har riktig dato og klokkeslett! Hvis klokken er stilt feil vil ikke Podcoin fungere riktig.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Valg for opprettelse av blokker:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Koble kun til angitt(e) node(r)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Oppdaget korrupt blokkdatabase</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Oppdag egen IP-adresse (standardverdi: 1 ved lytting og uten -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Ønsker du å gjenopprette blokkdatabasen nå?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Feil under oppstart av lommebokdatabasemiljø %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Feil under åpning av blokkdatabase</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/><|fim▁hole|> <location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Kunne ikke lytte på noen port. Bruk -listen=0 hvis det er dette du vil.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Finn andre noder gjennom DNS-oppslag (standardverdi: 1 med mindre -connect er oppgit)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Gjenopprett blokkjedeindex fra blk000??.dat filer</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Verifiserer blokker...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Verifiserer lommebok...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Ugyldig -tor adresse: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Maks mottaksbuffer per forbindelse, <n>*1000 bytes (standardverdi: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Maks sendebuffer per forbindelse, <n>*1000 bytes (standardverdi: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Koble kun til noder i nettverket <nett> (IPv4, IPv6 eller Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Skriv ekstra informasjon for feilsøk. Medfører at alle -debug* valg tas med</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Skriv ekstra informasjon for feilsøk av nettverk</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Sett tidsstempel på debugmeldinger</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Podcoin Wiki for SSL setup instructions)</source>
<translation>SSL valg: (se Podcoin Wiki for instruksjoner for oppsett av SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Velg versjon av socks proxy (4-5, standardverdi 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Send spor/debug informasjon til konsollet istedenfor debug.log filen</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Send spor/debug informasjon til debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Sett maks blokkstørrelse i bytes (standardverdi: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Sett minimum blokkstørrelse i bytes (standardverdi: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Krymp debug.log filen når klienten starter (standardverdi: 1 hvis uten -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Angi tidsavbrudd for forbindelse i millisekunder (standardverdi: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Bruk UPnP for lytteport (standardverdi: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Bruk UPnP for lytteport (standardverdi: 1 ved lytting)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Bruk en proxy for å nå skjulte tor tjenester (standardverdi: samme som -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Brukernavn for JSON-RPC forbindelser</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Advarsel: Denne versjonen er foreldet, oppgradering kreves!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Passord for JSON-RPC forbindelser</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Tillat JSON-RPC tilkoblinger fra angitt IP-adresse</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Send kommandoer til node på <ip> (standardverdi: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Eksekvér kommando når beste blokk endrer seg (%s i kommandoen erstattes med blokkens hash)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Oppgradér lommebok til nyeste format</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Angi størrelse på nøkkel-lager til <n> (standardverdi: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Se gjennom blokk-kjeden etter manglende lommeboktransaksjoner</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Bruk OpenSSL (https) for JSON-RPC forbindelser</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Servers sertifikat (standardverdi: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Servers private nøkkel (standardverdi: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Akseptable krypteringsmetoder (standardverdi: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Denne hjelpemeldingen</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Kan ikke binde til %s på denne datamaskinen (bind returnerte feil %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Koble til gjennom socks proxy</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Tillat DNS oppslag for -addnode, -seednode og -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Laster adresser...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Feil ved lasting av wallet.dat: Lommeboken er skadet</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Podcoin</source>
<translation>Feil ved lasting av wallet.dat: Lommeboken krever en nyere versjon av Podcoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Podcoin to complete</source>
<translation>Lommeboken måtte skrives om: start Podcoin på nytt for å fullføre</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Feil ved lasting av wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Ugyldig -proxy adresse: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Ukjent nettverk angitt i -onlynet '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Ukjent -socks proxy versjon angitt: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kunne ikke slå opp -bind adresse: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kunne ikke slå opp -externalip adresse: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ugyldig beløp for -paytxfee=<beløp>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Ugyldig beløp</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Utilstrekkelige midler</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Laster blokkindeks...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Legg til node for tilkobling og hold forbindelsen åpen</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Podcoin is probably already running.</source>
<translation>Kan ikke binde til %s på denne datamaskinen. Sannsynligvis kjører Podcoin allerede.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Gebyr per KB for transaksjoner du sender</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Laster lommebok...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Kan ikke nedgradere lommebok</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Kan ikke skrive standardadresse</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Leser gjennom...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Ferdig med lasting</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>For å bruke %s opsjonen</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Feil</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Du må sette rpcpassword=<passord> i konfigurasjonsfilen:
%s
Hvis filen ikke finnes, opprett den med leserettighet kun for eier av filen.</translation>
</message>
</context>
</TS><|fim▁end|>
|
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
|
<|file_name|>result.rs<|end_file_name|><|fim▁begin|>use std::io::Error as IoError;
use std::sync::mpsc::{SendError, RecvError};
use std::result;
use mio::NotifyError;
use queue::Message;
use {Handler};
#[derive(Debug)]
pub enum Error<H: Handler> {
QueueOutOfService,
Io(IoError),
NotifyError(NotifyError<Message<H::Processor, H::Message, H::Response>>),
SendError(SendError<H::Response>),
RecvError(RecvError),
}
pub type Result<T, H: Handler> = result::Result<T, Error<H>>;<|fim▁hole|> }
}
impl<H: Handler> From<NotifyError<Message<H::Processor, H::Message, H::Response>>> for Error<H> {
fn from(err: NotifyError<Message<H::Processor, H::Message, H::Response>>) -> Error<H> {
Error::NotifyError(err)
}
}
impl<H: Handler> From<SendError<H::Response>> for Error<H> {
fn from(err: SendError<H::Response>) -> Error<H> {
Error::SendError(err)
}
}
impl<H: Handler> From<RecvError> for Error<H> {
fn from(err: RecvError) -> Error<H> {
Error::RecvError(err)
}
}
#[derive(Debug)]
pub struct ResponseError(pub &'static str);
pub type ResponseResult<T> = result::Result<T, ResponseError>;<|fim▁end|>
|
impl<H: Handler> From<IoError> for Error<H> {
fn from(err: IoError) -> Error<H> {
Error::Io(err)
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Django settings for tests2 project.
import django
import sys
sys.path.append("../..")
sys.path.append("../../../../..")
from siteconf import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB_W,
'USER': MYSQL_USER_W,
'PASSWORD': MYSQL_PASSWORD_W,
'HOST': MYSQL_HOST_W,
'PORT': MYSQL_PORT_W,
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"<|fim▁hole|>STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'xvds$ppv5ha75qg1yx3aax7ugr_2*fmdrc(lrc%x7kdez-63xn'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = ''
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'tests2.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# django.contrib.admin is needed because we call django_logout()
# and it expect some templates to be registered
'django.contrib.admin',
'djangosaml2',
'testprofiles',
)
AUTH_PROFILE_MODULE = 'testprofiles.TestProfile'
if django.VERSION >= (1, 7):
AUTH_USER_MODEL = 'testprofiles.TestUser'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'djangosaml2': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
import django
if django.VERSION < (1, 4):
del LOGGING['filters']['require_debug_false']
del LOGGING['handlers']['mail_admins']['filters']
AUTHENTICATION_BACKENDS = (
'djangosaml2.backends.Saml2Backend',
)<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description':'End to end solution for bitcoin data gathering, backtesting, and live trading',
'author': 'ross palmer',<|fim▁hole|> 'url':'http://rosspalmer.github.io/bitQuant/',
'license':'MIT',
'version': '0.2.10',
'install_requires': ['SQLAlchemy','pandas','numpy','scipy','PyMySQL'],
'packages': ['bitquant'],
'scripts': [],
'name':'bitquant'
}
setup(**config)<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
from django.core.exceptions import ImproperlyConfigured
from shoop.utils.setup import Setup
from . import base_settings
def configure(setup):
base_settings.configure(setup)<|fim▁hole|> # Backward compatibility: Find from current directory, if
# LOCAL_SETTINGS_FILE environment variables is unset
if local_settings_file is None:
cand = os.path.join(os.path.dirname(__file__), 'local_settings.py')
if os.path.exists(cand):
local_settings_file = cand
# Load local settings from file
if local_settings_file:
local_settings_ns = {
'__file__': local_settings_file,
}
with open(local_settings_file, 'rb') as fp:
compiled = compile(fp.read(), local_settings_file, 'exec')
exec(compiled, local_settings_ns)
if 'configure' not in local_settings_ns:
raise ImproperlyConfigured('No configure in local_settings')
local_configure = local_settings_ns['configure']
local_configure(setup)
return setup
globals().update(Setup.configure(configure))<|fim▁end|>
|
local_settings_file = os.getenv('LOCAL_SETTINGS_FILE')
|
<|file_name|>jquery-validation-unobtrusive.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Microsoft jQuery Unobtrusive Validation v3.2.3
// Project: http://aspnetwebstack.codeplex.com/
// Definitions by: Matt Brooks <https://github.com/EnableSoftware>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference path="../jquery.validation/jquery.validation.d.ts" />
declare namespace MicrosoftJQueryUnobtrusiveValidation {
type JQuerySelector = string | Document | Element | JQuery;
interface Adapter {
name: string;
params: string[];
adapt: Function
}
interface Adapters extends Array<Adapter> {
add(adapterName: string, fn: Function): Adapters;
add(adapterName: string, params: string[], fn: Function): Adapters;
addMinMax(adapterName: string, minRuleName: string, maxRuleName: string, minMaxRuleName: string, minAttribute?: string, maxAttribute?: string): Adapters;
addSingleVal(adapterName: string, ruleName: string): Adapters;
addSingleVal(adapterName: string, attribute: string, ruleName: string): Adapters;
addBool(adapterName: string, ruleName?: string): Adapters;
addMethod(adapterName: string, fn: (value: string, element: Element, params: any) => any): Adapters;
}
interface Validator {
adapters: Adapters;
parseElement(element: JQuerySelector, skipAttach?: boolean): void;<|fim▁hole|>declare namespace JQueryValidation {
interface ValidatorStatic {
unobtrusive: MicrosoftJQueryUnobtrusiveValidation.Validator;
}
}<|fim▁end|>
|
parse(selector: JQuerySelector): void;
}
}
|
<|file_name|>watcher.py<|end_file_name|><|fim▁begin|>import cPickle
import logging
import numpy
import os
import time
from collections import deque
from copy import deepcopy
from datetime import datetime
from pytz import timezone
from threading import Event, Thread
from coinbase.wallet.client import Client
from jarvis.utils.messaging.client import TwilioMessenger
from jarvis.modules.base import JarvisThreadedModule
def configure_debug_logging():
logging.basicConfig(level=logging.DEBUG)
def load_coinbase_config():
coinbase_key = os.environ.get('COINBASE_KEY')
coinbase_secret = os.environ.get('COINBASE_SECRET')
if not all([coinbase_key, coinbase_secret]):
raise Exception('Coinbase config not configured properly')
return (coinbase_key, coinbase_secret)
def load_from_file(path):
if os.path.exists(path):
with open(path,'r') as f:
return cPickle.loads(f.read())
return None
def store_to_file(path, obj):
with open(path,'w') as f:
f.write(cPickle.dumps(obj))
class CoinbaseClient(object):
def __init__(self):
self.api_key, self.api_secret = load_coinbase_config()
self.client = Client(self.api_key, self.api_secret)
def do(self, func, *args, **kwargs):
return getattr(self.client,func)(*args, **kwargs)
class TickerTimeseries(object):
def __init__(self, max_length, recent_cutoff,
load_path=None, poll_period=30, name=None):
self.timeseries = load_from_file(load_path)
if not self.timeseries:
self.timeseries = deque(maxlen=max_length)
self.large_movement_timeseries = deepcopy(self.timeseries)
self.recent_cutoff = recent_cutoff
self.max_length = max_length
self.poll_period = poll_period
self.name = name
def append(self, val):
self.timeseries.append(val)
self.large_movement_timeseries.append(val)
@property
def head(self):
return self.timeseries[-1]
@property
def tail(self):
return self.timeseries[0]
@property
def mean(self):
return numpy.mean(self.timeseries)
@property
def length(self):
return len(self.timeseries)
@classmethod
def anomaly(cls, series, recent_cutoff):
'''
Naive anomaly detection. Given a series it computes
the standard deviation and returns True if any of the values
in the last :recent_cutoff points are are more than
3 standard deviationsm above the mean
:series array of timeseries data
:recent_cutoff only consider anomalies on the most recent points
'''
std_dev = numpy.std(series)
mean = numpy.mean(series)
for point in series[-recent_cutoff:]:
abs_diff = abs(point - mean)
if abs_diff >= std_dev * 3 and abs_diff >= 3:
return True
return False
def is_anomalous(self):
# If we don't have enough data, don't do anything
if len(self.timeseries) < self.recent_cutoff:
return False
return self.anomaly(self.timeseries, self.recent_cutoff)
@classmethod
def large_movement(self, series):
if float(abs(series[0] - series[-1])) / series[0] > 0.03:
return True
return False
def is_large_movement(self):
if self.large_movement(self.large_movement_timeseries):
msg = MOVEMENT_NOTIFICATION % \
(self.name,
len(self.large_movement_timeseries) * self.poll_period / 60,
self.large_movement_timeseries[0],
self.large_movement_timeseries[-1])
self.large_movement_timeseries = deque(
[self.large_movement_timeseries[-1]],
maxlen=self.max_length)
return msg
return None
ANOMALY_NOTIFICATION = \
'''Anomalous bitcoin price activity detected. Mean price over the
past %d minutes is %.2f, current price is %.2f'''
MOVEMENT_NOTIFICATION = \
'''Large %s movement detected. Price %d minutes ago was %.2f,
current price is %.2f'''
class CoinbaseWatcher(object):
POLL_PERIOD = 30
RECENT_DATA = 60 * 5
MAX_LENGTH_MULTIPLE = 12 * 24
COOLDOWN_TICKS = 10
BTCTICKERPATH = "/tmp/bitccointicker"
ETHTICKERPATH = "/tmp/ethticker"
MSGPATH = "/tmp/bitcoinmsgs"
def __init__(self, stop):
recent_points = self.RECENT_DATA / self.POLL_PERIOD
self.twilio_client = TwilioMessenger()
self.coinbase_client = CoinbaseClient()
self.btc_timeseries = TickerTimeseries(
max_length=recent_points*self.MAX_LENGTH_MULTIPLE,
recent_cutoff=recent_points,
load_path=self.BTCTICKERPATH,
poll_period=self.POLL_PERIOD,
name='Bitcoin')
self.eth_timeseries = TickerTimeseries(
max_length=recent_points*self.MAX_LENGTH_MULTIPLE,
recent_cutoff=recent_points,
load_path=self.ETHTICKERPATH,
poll_period=self.POLL_PERIOD,
name='Ethereum')
self.cooldown = 0
self.stop = stop
self.sent_messages = load_from_file(self.MSGPATH)
if not self.sent_messages:
self.sent_messages = deque(maxlen=3)
@property
def raw_btc_timeseries(self):
return self.btc_timeseries.timeseries
@property
def raw_eth_timeseries(self):
return self.eth_timeseries.timeseries
@property
def in_cooldown(self):
self.cooldown = max(0,self.cooldown - 1)
if self.cooldown <= 0:
return False
return True
def initiate_cooldown(self):
self.cooldown = self.COOLDOWN_TICKS
def start(self):
while not self.stop.is_set():
try:<|fim▁hole|> spot_price = self.coinbase_client.do(
'get_spot_price',currency_pair='BTC-USD')
self.btc_timeseries.append(float(spot_price['amount']))
# coinbase client doesn't actually support currency_pair
rates = self.coinbase_client.do('get_exchange_rates')
self.eth_timeseries.append(1 / float(rates['rates']['ETH']))
if not self.in_cooldown:
msg = self.btc_timeseries.is_large_movement()
if msg:
self.twilio_client.send_message(msg)
self.sent_messages.append((msg, time.time()))
self.initiate_cooldown()
msg = self.eth_timeseries.is_large_movement()
if msg:
self.twilio_client.send_message(msg)
self.sent_messages.append((msg, time.time()))
self.initiate_cooldown()
except Exception:
logging.exception("Exception in main loop")
time.sleep(self.POLL_PERIOD)
store_to_file(self.MSGPATH,self.sent_messages)
store_to_file(self.BTCTICKERPATH,self.btc_timeseries.timeseries)
store_to_file(self.ETHTICKERPATH,self.eth_timeseries.timeseries)
class CoinbaseWatcherModule(JarvisThreadedModule):
def init_module(self, event):
self.coinbase_watcher = CoinbaseWatcher(event)
return Thread(target=self.coinbase_watcher.start)
def get_recent_messages(self):
return [
(msg, self.convert_timestamp(timestamp)) \
for (msg,timestamp) in \
reversed(self.coinbase_watcher.sent_messages)
]
@classmethod
def convert_timestamp(cls, timestamp):
pacific = timezone("US/Pacific-New")
utc = timezone("UTC")
return utc.localize(datetime.utcfromtimestamp(
timestamp)).astimezone(pacific).strftime('%Y-%m-%d %H:%M:%S')
def get_bitcoin_ticker_timeseries(self):
seconds_per_point = self.coinbase_watcher.POLL_PERIOD
now = time.time()
return [
{
'date' : self.convert_timestamp(now-seconds_per_point*i),
'value' : val
} for i,val in enumerate(reversed(
self.coinbase_watcher.raw_btc_timeseries))
][::-1]
def get_eth_ticker_timeseries(self):
seconds_per_point = self.coinbase_watcher.POLL_PERIOD
now = time.time()
return [
{
'date' : self.convert_timestamp(now-seconds_per_point*i),
'value' : val
} for i,val in enumerate(reversed(
self.coinbase_watcher.raw_eth_timeseries))
][::-1]
if __name__ == '__main__':
configure_debug_logging()
watcher = CoinbaseWatcher()
watcher.start()<|fim▁end|>
| |
<|file_name|>PersistDlg.cpp<|end_file_name|><|fim▁begin|>// Copyleft 2005 Chris Korda
// This program is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License as published by the Free
// Software Foundation; either version 2 of the License, or any later version.
/*
chris korda
revision history:
rev date comments
00 22apr05 initial version
01 07jul05 keep dialog within screen
02 23nov07 support Unicode
03 16dec08 in OnShowWindow, move LoadWnd within bShow block
04 24mar09 add special handling for main accelerators
05 21dec12 in OnShowWindow, don't clamp to work area if maximized
06 24jul15 override DoModal to save and restore focus
07 06jul17 remove update menu method
dialog that saves and restores its position
*/
// PersistDlg.cpp : implementation file
//
#include "stdafx.h"
#include "Resource.h"
#include "PersistDlg.h"
#include "Persist.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
/////////////////////////////////////////////////////////////////////////////
// CPersistDlg dialog
IMPLEMENT_DYNAMIC(CPersistDlg, CDialog);
CPersistDlg::CPersistDlg(UINT nIDTemplate, UINT nIDAccel, LPCTSTR RegKey, CWnd *pParent)
: CDialog(nIDTemplate, pParent), m_RegKey(RegKey)
{
//{{AFX_DATA_INIT(CPersistDlg)
//}}AFX_DATA_INIT
m_WasShown = FALSE;
m_IDAccel = nIDAccel;
m_Accel = nIDAccel != NULL && nIDAccel != IDR_MAINFRAME ?
LoadAccelerators(AfxGetApp()->m_hInstance, MAKEINTRESOURCE(nIDAccel)) : NULL;
}
void CPersistDlg::DoDataExchange(CDataExchange* pDX)<|fim▁hole|>}
BEGIN_MESSAGE_MAP(CPersistDlg, CDialog)
//{{AFX_MSG_MAP(CPersistDlg)
ON_WM_DESTROY()
ON_WM_SHOWWINDOW()
//}}AFX_MSG_MAP
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// CPersistDlg message handlers
void CPersistDlg::OnDestroy()
{
CDialog::OnDestroy();
if (m_WasShown)
CPersist::SaveWnd(REG_SETTINGS, this, m_RegKey);
}
void CPersistDlg::OnShowWindow(BOOL bShow, UINT nStatus)
{
CDialog::OnShowWindow(bShow, nStatus);
if (bShow) {
if (!m_WasShown && !IsWindowVisible()) {
m_WasShown = TRUE;
int Flags = (GetStyle() & WS_THICKFRAME) ? 0 : CPersist::NO_RESIZE;
CPersist::LoadWnd(REG_SETTINGS, this, m_RegKey, Flags);
}
if (!IsZoomed()) { // unless we're maximized, clamp to work area
// in case LoadWnd's SetWindowPlacement places us off-screen
CRect r, wr;
GetWindowRect(r);
if (SystemParametersInfo(SPI_GETWORKAREA, 0, wr, 0)) {
CRect br = wr;
br.right -= GetSystemMetrics(SM_CXSMICON);
br.bottom -= GetSystemMetrics(SM_CYCAPTION);
CPoint pt = r.TopLeft();
if (!br.PtInRect(pt)) { // if dialog is off-screen
pt.x = CLAMP(pt.x, wr.left, wr.right - r.Width());
pt.y = CLAMP(pt.y, wr.top, wr.bottom - r.Height());
r = CRect(pt, CSize(r.Width(), r.Height()));
MoveWindow(r);
}
}
}
}
}
BOOL CPersistDlg::PreTranslateMessage(MSG* pMsg)
{
if (pMsg->message >= WM_KEYFIRST && pMsg->message <= WM_KEYLAST) {
if (m_Accel != NULL) {
if (TranslateAccelerator(m_hWnd, m_Accel, pMsg))
return(TRUE);
} else { // no local accelerator table
// if non-system key down and main accelerators, give main a try
if (pMsg->message == WM_KEYDOWN && m_IDAccel == IDR_MAINFRAME
&& AfxGetMainWnd()->SendMessage(UWM_HANDLEDLGKEY, (WPARAM)pMsg))
return(TRUE);
}
}
return CDialog::PreTranslateMessage(pMsg);
}
W64INT CPersistDlg::DoModal()
{
HWND hWndFocus = ::GetFocus();
W64INT retc = CDialog::DoModal();
if (IsWindow(hWndFocus) && ::IsWindowVisible(hWndFocus)) // extra cautious
::SetFocus(hWndFocus);
return retc;
}<|fim▁end|>
|
{
CDialog::DoDataExchange(pDX);
//{{AFX_DATA_MAP(CPersistDlg)
//}}AFX_DATA_MAP
|
<|file_name|>iBeanRoastInformation.ts<|end_file_name|><|fim▁begin|>export interface IBeanRoastInformation {
/* On which temperature where the beans removed from the roaster **/
drop_temperature: number;
/* Roasting length **/
roast_length: number;
/* Roasting machine **/
roaster_machine: string;
/* The used green beans weight*/
green_bean_weight:number;
outside_temperature:number;
humidity:number;
/* Inherits the bean unique id, without this id, the roast information won't be displayed **/
bean_uuid: string;
/* Inherits the first crack minute **/
first_crack_minute: number;
/* Inherits the first crack temperature **/
first_crack_temperature: number;
/* Inherits the second crack minute **/
second_crack_minute: number;
/* Inherits the second crack temperatuer **/<|fim▁hole|><|fim▁end|>
|
second_crack_temperature: number;
}
|
<|file_name|>wemo.py<|end_file_name|><|fim▁begin|>"""
Support for WeMo switches.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/switch.wemo/
"""
import logging
from datetime import datetime, timedelta
from homeassistant.components.switch import SwitchDevice
from homeassistant.util import convert
from homeassistant.const import (
STATE_OFF, STATE_ON, STATE_STANDBY, STATE_UNKNOWN)
from homeassistant.loader import get_component
DEPENDENCIES = ['wemo']
_LOGGER = logging.getLogger(__name__)
ATTR_SENSOR_STATE = "sensor_state"
ATTR_SWITCH_MODE = "switch_mode"
ATTR_CURRENT_STATE_DETAIL = 'state_detail'
ATTR_COFFEMAKER_MODE = "coffeemaker_mode"
MAKER_SWITCH_MOMENTARY = "momentary"
MAKER_SWITCH_TOGGLE = "toggle"
WEMO_ON = 1
WEMO_OFF = 0
WEMO_STANDBY = 8
# pylint: disable=unused-argument, too-many-function-args
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Set up discovered WeMo switches."""
import pywemo.discovery as discovery
if discovery_info is not None:
location = discovery_info['ssdp_description']
mac = discovery_info['mac_address']
device = discovery.device_from_description(location, mac)
if device:
add_devices_callback([WemoSwitch(device)])
class WemoSwitch(SwitchDevice):
"""Representation of a WeMo switch."""
def __init__(self, device):
"""Initialize the WeMo switch."""
self.wemo = device
self.insight_params = None
self.maker_params = None
self.coffeemaker_mode = None
self._state = None
# look up model name once as it incurs network traffic
self._model_name = self.wemo.model_name
wemo = get_component('wemo')
wemo.SUBSCRIPTION_REGISTRY.register(self.wemo)
wemo.SUBSCRIPTION_REGISTRY.on(self.wemo, None, self._update_callback)
def _update_callback(self, _device, _type, _params):
"""Called by the Wemo device callback to update state."""
_LOGGER.info(
'Subscription update for %s',
_device)
updated = self.wemo.subscription_update(_type, _params)<|fim▁hole|>
if not hasattr(self, 'hass'):
return
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed with subscriptions."""
if self._model_name == 'Insight':
return True
return False
@property
def unique_id(self):
"""Return the ID of this WeMo switch."""
return "{}.{}".format(self.__class__, self.wemo.serialnumber)
@property
def name(self):
"""Return the name of the switch if any."""
return self.wemo.name
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if self.maker_params:
# Is the maker sensor on or off.
if self.maker_params['hassensor']:
# Note a state of 1 matches the WeMo app 'not triggered'!
if self.maker_params['sensorstate']:
attr[ATTR_SENSOR_STATE] = STATE_OFF
else:
attr[ATTR_SENSOR_STATE] = STATE_ON
# Is the maker switch configured as toggle(0) or momentary (1).
if self.maker_params['switchmode']:
attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_MOMENTARY
else:
attr[ATTR_SWITCH_MODE] = MAKER_SWITCH_TOGGLE
if self.insight_params or (self.coffeemaker_mode is not None):
attr[ATTR_CURRENT_STATE_DETAIL] = self.detail_state
if self.insight_params:
attr['on_latest_time'] = \
WemoSwitch.as_uptime(self.insight_params['onfor'])
attr['on_today_time'] = \
WemoSwitch.as_uptime(self.insight_params['ontoday'])
attr['on_total_time'] = \
WemoSwitch.as_uptime(self.insight_params['ontotal'])
attr['power_threshold_w'] = \
convert(
self.insight_params['powerthreshold'], float, 0.0
) / 1000.0
if self.coffeemaker_mode is not None:
attr[ATTR_COFFEMAKER_MODE] = self.coffeemaker_mode
return attr
@staticmethod
def as_uptime(_seconds):
"""Format seconds into uptime string in the format: 00d 00h 00m 00s."""
uptime = datetime(1, 1, 1) + timedelta(seconds=_seconds)
return "{:0>2d}d {:0>2d}h {:0>2d}m {:0>2d}s".format(uptime.day-1,
uptime.hour,
uptime.minute,
uptime.second)
@property
def current_power_w(self):
"""Current power usage in W."""
if self.insight_params:
return convert(
self.insight_params['currentpower'], float, 0.0
) / 1000.0
@property
def today_energy_kwh(self):
"""Today total energy usage in kWh."""
if self.insight_params:
miliwatts = convert(self.insight_params['todaymw'], float, 0.0)
return round(miliwatts / (1000.0 * 1000.0 * 60), 2)
@property
def detail_state(self):
"""Return the state of the device."""
if self.coffeemaker_mode is not None:
return self.wemo.mode_string
if self.insight_params:
standby_state = int(self.insight_params['state'])
if standby_state == WEMO_ON:
return STATE_ON
elif standby_state == WEMO_OFF:
return STATE_OFF
elif standby_state == WEMO_STANDBY:
return STATE_STANDBY
else:
return STATE_UNKNOWN
@property
def is_on(self):
"""Return true if switch is on. Standby is on."""
return self._state
@property
def available(self):
"""True if switch is available."""
if self._model_name == 'Insight' and self.insight_params is None:
return False
if self._model_name == 'Maker' and self.maker_params is None:
return False
if self._model_name == 'CoffeeMaker' and self.coffeemaker_mode is None:
return False
return True
@property
def icon(self):
"""Icon of device based on its type."""
if self._model_name == 'CoffeeMaker':
return 'mdi:coffee'
else:
return super().icon
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._state = WEMO_ON
self.wemo.on()
self.schedule_update_ha_state()
def turn_off(self):
"""Turn the switch off."""
self._state = WEMO_OFF
self.wemo.off()
self.schedule_update_ha_state()
def update(self):
"""Update WeMo state."""
self._update(force_update=True)
def _update(self, force_update=True):
try:
self._state = self.wemo.get_state(force_update)
if self._model_name == 'Insight':
self.insight_params = self.wemo.insight_params
self.insight_params['standby_state'] = (
self.wemo.get_standby_state)
elif self._model_name == 'Maker':
self.maker_params = self.wemo.maker_params
elif self._model_name == 'CoffeeMaker':
self.coffeemaker_mode = self.wemo.mode
except AttributeError as err:
_LOGGER.warning('Could not update status for %s (%s)',
self.name, err)<|fim▁end|>
|
self._update(force_update=(not updated))
|
<|file_name|>score_responsible.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.<|fim▁hole|>#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import factory.fuzzy
from base.tests.factories.learning_unit_year import LearningUnitYearFactory
from base.tests.factories.tutor import TutorFactory
from learning_unit.tests.factories.learning_class_year import LearningClassYearFactory
class ScoreResponsibleFactory(factory.DjangoModelFactory):
class Meta:
model = 'assessments.ScoreResponsible'
tutor = factory.SubFactory(TutorFactory)
learning_unit_year = factory.SubFactory(LearningUnitYearFactory)
learning_class_year = None
class ScoreResponsibleOfClassFactory(ScoreResponsibleFactory):
learning_class_year = factory.SubFactory(
LearningClassYearFactory,
learning_component_year__lecturing=True,
learning_component_year__learning_unit_year=factory.LazyAttribute(
lambda component: component.factory_parent.factory_parent.learning_unit_year
)
)<|fim▁end|>
|
# The core business involves the administration of students, teachers,
# courses, programs and so on.
|
<|file_name|>observing-proxy.js<|end_file_name|><|fim▁begin|>/*
A proxy for observing object state changes.
var obj={person:'Eddie',age:22};
_o.onUpdate(obj,{
age:function(value){
if(value>this.oldValue)
console.log('Happy birthday, Peter!')
},
person:function(value){
console.log(this.oldValue+' is now '+value);
}
});
_o(obj).person='Peter';
//> Eddie is now Peter
_o(obj).age++;
//> Happy birthday, Peter!
*/
!function(){
'use strict';
var tl=function(i,ln,ms){
return function(ms,f){
i++;
if(f()) ln='info',ms='Test '+i+' passed: '+ms;
else ln='error',ms='Test '+i+' failed: '+ms;
console[ln](ms)}}(0);
var cl=function(){
console.log.apply(console,arguments)};
var observingProxy=function(targetStack,proxyStack,changeStack,handlerStack,timeoutStack){
function getDeepPropertyDescriptors(o)
{
var ns;
if(o){
ns=getDeepPropertyDescriptors(Object.getPrototypeOf(o))||[];
Array.prototype.push.apply(ns,
Object.getOwnPropertyNames(o)
.filter(function(k){
return isNaN(parseInt(k))})
.map(function(k){
return {name:k,descriptor:Object.getOwnPropertyDescriptor(o,k)}}));
}
return ns;
}
function newProxy(t)
{
var p={},ns=getDeepPropertyDescriptors(t);
for(var i=ns.length;i--;){
delete ns[i].descriptor.value;
delete ns[i].descriptor.writable;
ns[i].descriptor.get=propertyGetter.bind({target:t,name:ns[i].name});
ns[i].descriptor.set=propertySetter.bind({target:t,name:ns[i].name});
try{
Object.defineProperty(p,ns[i].name,ns[i].descriptor);
}
catch(e){}
}
return p;
}
function notifyObservers(target)
{
var targetInd=targetIndex(target);
if(changeStack[targetInd].length){
for(var l=0;l<handlerStack[targetInd].length;l++)
handlerStack[targetInd][l].call({},changeStack[targetInd]);
changeStack[targetInd]=[];
}
}
function propertyGetter()
{
var r=this.target[this.name];
if(Array.isArray(this.target)&&['pop','push','shift','splice','unshift'].
indexOf(this.name)>-1)
r=function(){
var res=this.target[this.name].apply(this.target,arguments),
targetInd=targetIndex(this.target);
changeStack[targetInd].push(({
'pop':{object:this.target,type:'splice',index:this.target.length-1,
removed:[res],addedCount:0},
'push':{object:this.target,type:'splice',index:this.target.length-1,
removed:[],addedCount:1},
'shift':{object:this.target,type:'splice',index:0,removed:[res],
addedCount:0},
'splice':{object:this.target,type:'splice',index:arguments[0],
removed:res,addedCount:Array.prototype.slice.call(arguments,2).length},
'unshift':{object:this.target,type:'splice',index:0,removed:[],
addedCount:1}
})[this.name]);
clearTimeout(timeoutStack[targetInd]);
timeoutStack[targetInd]=setTimeout(function(){
notifyObservers(this.target)}.bind(this));
}.bind(this);
return r;
}
function propertySetter(userVal)
{<|fim▁hole|> this.target[this.name]=userVal;
changeStack[targetInd].push(
{name:this.name,object:this.target,type:'update',oldValue:val});
clearTimeout(timeoutStack[targetInd]);
timeoutStack[targetInd]=setTimeout(function(){
notifyObservers(this.target)}.bind(this));
}
}
function targetIndex(t)
{
var i=targetStack.indexOf(t);
if(i===-1&&t){
i=targetStack.push(t)-1;
proxyStack.push(newProxy(t));
changeStack.push([]);
handlerStack.push([]);
timeoutStack.push(0);
}
return i;
}
if(this.test_o)
tl('getDeepPropertyDescriptors',function(){
return getDeepPropertyDescriptors([1,2,3]).reduce(function(hasPush,item){
return hasPush||item.name==='push';
},false)});
if(this.test_o)
tl('Property getter',function(){
var s={p1:1};
return newProxy(s).p1===s.p1});
if(this.test_o)
tl('Property setter',function(){
var s={p1:1};
newProxy(s).p1=2;
return s.p1===2});
if(this.test_o)
tl('Array splice',function(){
var s=[];
newProxy(s).push(1);
return s.length===1});
return {
addChangeHandler:function(target,changeHandler,callOnInit){
var targetInd=targetIndex(target);
handlerStack[targetInd].indexOf(changeHandler)===-1&&
handlerStack[targetInd].push(changeHandler);
if(callOnInit){
var changes=Array.isArray(target)
?target.map(function(_,index){
return {object:target,type:'splice',index:index,removed:[],
addedCount:1}})
:Object.getOwnPropertyNames(target).map(function(key){
return {name:key,object:target,type:'update',oldValue:target[key]}
});
changeHandler.call({},changes);
}
},
getProxy:function(target){
return proxyStack[targetIndex(target)]||target;
},
removeChangeHandler:function(target,changeHandler){
var targetInd=targetIndex(target),rmInd;
if((rmInd=handlerStack[targetInd].indexOf(changeHandler))>-1)
handlerStack[targetInd].splice(rmInd,1);
else if(!changeHandler)
handlerStack[targetInd]=[];
clearTimeout(timeoutStack[targetInd]);
}
}
}.bind(this)([],[],[],[],[]);
function _o(target)
{
return observingProxy.getProxy(target);
}
_o.observe=function(target,changeHandler,callOnInit){
if(!target)
throw 'Observing proxy error: cannot _o.observe '+target+' object';
return observingProxy.addChangeHandler.apply(observingProxy,arguments);
};
_o.unobserve=function(target,changeHandler){
if(!target)
throw 'Observing proxy error: cannot _o.unobserve '+target+' object';
return observingProxy.removeChangeHandler.apply(observingProxy,arguments);
};
_o.onUpdate=function(target,onChangeCollection,callOnInit){
var onPropertyChange;
if(typeof onChangeCollection==='string'){
onChangeCollection={};
onChangeCollection[arguments[1]]=arguments[2];
callOnInit=arguments[3];
}
callOnInit=callOnInit===undefined&&true||callOnInit;
if(target)
observingProxy.addChangeHandler(target,onPropertyChange=function(changes){
for(var key in onChangeCollection)
for(var i=changes.length;i--;)
if(changes[i].name===key&&changes[i].type==='update'){
onChangeCollection[key].call(changes[i],changes[i].object[changes[i].name]);
break;
}
},callOnInit);
return{
destroy:function(){
observingProxy.removeChangeHandler(target,onPropertyChange);
},
report:function(){
if(!target)
throw 'Observing proxy error: cannot _o.onUpdate '+target+' object';
},
restore:function(){
observingProxy.addChangeHandler(target,onPropertyChange,callOnInit);
}
};
};
if(typeof Object.defineProperty!=='function')
throw 'Object.defineProperty is not a function';
if(this.exports&&this.module)
this.module.exports=_o;
else if(this.define&&this.define.amd)
this.define(function(){return _o});
else
this._o=_o;
if(this.test_o)
tl('getProxy',function(){
var s={p1:1};
return observingProxy.getProxy(s).p1===s.p1});
if(this.test_o)
!function(){
var u;
var s={p1:1};
observingProxy.addChangeHandler(s,function(changes){
clearTimeout(u);
tl('addChangeHandler',function(){return true});
});
observingProxy.getProxy(s).p1=101;
u=setTimeout(function(){
tl('addChangeHandler',function(){return false});
});
}();
if(this.test_o)
!function(){
var u;
var f=function(){
clearTimeout(u);
tl('removeChangeHandler',function(){return false});
};
var s={p1:1};
observingProxy.addChangeHandler(s,f);
observingProxy.removeChangeHandler(s,f);
observingProxy.getProxy(s).p1=2;
u=setTimeout(function(){
tl('removeChangeHandler',function(){return true});
});
}();
if(this.test_o)
!function(){
var s={p1:1};
var u=setTimeout(function(){
tl('callOnInit',function(){return false});
});
observingProxy.addChangeHandler(s,function(changes){
clearTimeout(u);
tl('callOnInit',function(){return true});
},true);
}();
if(this.test_o)
!function(){
var s={p1:0},proxy=observingProxy.getProxy(s),n=0;
observingProxy.addChangeHandler(s,function(ch){
n++;
});
for(var i=10;i--;)
proxy.p1++;
setTimeout(function(){
tl('Delayed notify',function(){return n===1});
})
}();
if(this.test_o)
!function(){
var s={p1:1};
_o.onUpdate(s,'p1',function(value){
if(value===2){
clearTimeout(u);
tl('onUpdateHandler',function(){return value===2});
}
});
observingProxy.getProxy(s).p1=2;
var u=setTimeout(function(){
tl('onUpdateHandler',function(){return false});
});
}();
if(this.test_o)
!function(){
var s={p1:1},proxy=observingProxy.getProxy(s),i=0;
var observer=_o.onUpdate(s,'p1',function(value){
i++;
});
setTimeout(function(){
proxy.p1=2;
setTimeout(function(){
observer.destroy();
proxy.p1=3;
setTimeout(function(){
observer.restore();
proxy.p1=4;
setTimeout(function(){
tl('onUpdateHandler destructor',function(){return i==4});
});
});
});
});
}();
}.bind(this)()<|fim▁end|>
|
var val=this.target[this.name],
targetInd=targetIndex(this.target);
if(val!==userVal){
|
<|file_name|>container.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e_node
import (
"errors"
"fmt"
"k8s.io/kubernetes/pkg/api"
apierrs "k8s.io/kubernetes/pkg/api/errors"
client "k8s.io/kubernetes/pkg/client/unversioned"
"k8s.io/kubernetes/pkg/util"
"github.com/onsi/gomega/format"
"github.com/onsi/gomega/types"
)<|fim▁hole|>// One pod one container
type ConformanceContainer struct {
Container api.Container
Client *client.Client
RestartPolicy api.RestartPolicy
Volumes []api.Volume
NodeName string
Namespace string
podName string
}
type ConformanceContainerEqualMatcher struct {
Expected interface{}
}
func CContainerEqual(expected interface{}) types.GomegaMatcher {
return &ConformanceContainerEqualMatcher{
Expected: expected,
}
}
func (matcher *ConformanceContainerEqualMatcher) Match(actual interface{}) (bool, error) {
if actual == nil && matcher.Expected == nil {
return false, fmt.Errorf("Refusing to compare <nil> to <nil>.\nBe explicit and use BeNil() instead. This is to avoid mistakes where both sides of an assertion are erroneously uninitialized.")
}
val := api.Semantic.DeepDerivative(matcher.Expected, actual)
return val, nil
}
func (matcher *ConformanceContainerEqualMatcher) FailureMessage(actual interface{}) (message string) {
return format.Message(actual, "to equal", matcher.Expected)
}
func (matcher *ConformanceContainerEqualMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return format.Message(actual, "not to equal", matcher.Expected)
}
func (cc *ConformanceContainer) Create() error {
cc.podName = cc.Container.Name + string(util.NewUUID())
pod := &api.Pod{
ObjectMeta: api.ObjectMeta{
Name: cc.podName,
Namespace: cc.Namespace,
},
Spec: api.PodSpec{
NodeName: cc.NodeName,
RestartPolicy: cc.RestartPolicy,
Containers: []api.Container{
cc.Container,
},
Volumes: cc.Volumes,
},
}
_, err := cc.Client.Pods(cc.Namespace).Create(pod)
return err
}
//Same with 'delete'
func (cc *ConformanceContainer) Stop() error {
return cc.Client.Pods(cc.Namespace).Delete(cc.podName, &api.DeleteOptions{})
}
func (cc *ConformanceContainer) Delete() error {
return cc.Client.Pods(cc.Namespace).Delete(cc.podName, &api.DeleteOptions{})
}
func (cc *ConformanceContainer) Get() (ConformanceContainer, error) {
pod, err := cc.Client.Pods(cc.Namespace).Get(cc.podName)
if err != nil {
return ConformanceContainer{}, err
}
containers := pod.Spec.Containers
if containers == nil || len(containers) != 1 {
return ConformanceContainer{}, errors.New("Failed to get container")
}
return ConformanceContainer{containers[0], cc.Client, pod.Spec.RestartPolicy, pod.Spec.Volumes, pod.Spec.NodeName, cc.Namespace, cc.podName}, nil
}
func (cc *ConformanceContainer) GetStatus() (api.ContainerStatus, api.PodPhase, error) {
pod, err := cc.Client.Pods(cc.Namespace).Get(cc.podName)
if err != nil {
return api.ContainerStatus{}, api.PodUnknown, err
}
statuses := pod.Status.ContainerStatuses
if len(statuses) != 1 {
return api.ContainerStatus{}, api.PodUnknown, errors.New("Failed to get container status")
}
return statuses[0], pod.Status.Phase, nil
}
func (cc *ConformanceContainer) Present() (bool, error) {
_, err := cc.Client.Pods(cc.Namespace).Get(cc.podName)
if err == nil {
return true, nil
}
if apierrs.IsNotFound(err) {
return false, nil
}
return false, err
}
type ContainerState uint32
const (
ContainerStateWaiting ContainerState = 1 << iota
ContainerStateRunning
ContainerStateTerminated
ContainerStateUnknown
)
func GetContainerState(state api.ContainerState) ContainerState {
if state.Waiting != nil {
return ContainerStateWaiting
}
if state.Running != nil {
return ContainerStateRunning
}
if state.Terminated != nil {
return ContainerStateTerminated
}
return ContainerStateUnknown
}<|fim▁end|>
| |
<|file_name|>test_KeepOpenTCP.py<|end_file_name|><|fim▁begin|>import dns
import os
import socket
import struct
from recursortests import RecursorTest
class testKeepOpenTCP(RecursorTest):
_confdir = 'KeepOpenTCP'
_config_template = """dnssec=validate
packetcache-ttl=10
packetcache-servfail-ttl=10
auth-zones=authzone.example=configs/%s/authzone.zone""" % _confdir
@classmethod
def generateRecursorConfig(cls, confdir):
authzonepath = os.path.join(confdir, 'authzone.zone')
with open(authzonepath, 'w') as authzone:
authzone.write("""$ORIGIN authzone.example.
@ 3600 IN SOA {soa}
@ 3600 IN A 192.0.2.88
""".format(soa=cls._SOA))
super(testKeepOpenTCP, cls).generateRecursorConfig(confdir)
def sendTCPQueryKeepOpen(cls, sock, query, timeout=2.0):
try:
wire = query.to_wire()
sock.send(struct.pack("!H", len(wire)))
sock.send(wire)
data = sock.recv(2)
if data:
(datalen,) = struct.unpack("!H", data)
data = sock.recv(datalen)
except socket.timeout as e:
print("Timeout: %s" % (str(e)))
data = None
except socket.error as e:
print("Network error: %s" % (str(e)))
data = None
message = None
if data:
message = dns.message.from_wire(data)
return message
def testNoTrailingData(self):
count = 10
sock = [None] * count
expected = dns.rrset.from_text('ns.secure.example.', 0, dns.rdataclass.IN, 'A', '{prefix}.9'.format(prefix=self._PREFIX))
query = dns.message.make_query('ns.secure.example', 'A', want_dnssec=True)
query.flags |= dns.flags.AD
for i in range(count):
sock[i] = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock[i].settimeout(2.0)
sock[i].connect(("127.0.0.1", self._recursorPort))
res = self.sendTCPQueryKeepOpen(sock[i], query)
self.assertMessageIsAuthenticated(res)
self.assertRRsetInAnswer(res, expected)
self.assertMatchingRRSIGInAnswer(res, expected)
sock[i].settimeout(0.1)
try:
data = sock[i].recv(1)
self.assertTrue(False)
except socket.timeout as e:
print("ok")
for i in range(count):<|fim▁hole|> self.assertMatchingRRSIGInAnswer(res, expected)
sock[i].settimeout(0.1)
try:
data = sock[i].recv(1)
self.assertTrue(False)
except socket.timeout as e:
print("ok")
for i in range(count):
sock[i].close()<|fim▁end|>
|
sock[i].settimeout(2.0)
res = self.sendTCPQueryKeepOpen(sock[i], query)
self.assertMessageIsAuthenticated(res)
self.assertRRsetInAnswer(res, expected)
|
<|file_name|>Java.py<|end_file_name|><|fim▁begin|>from coalib.bearlib.languages.Language import Language
@Language
class Java:<|fim▁hole|> extensions = '.java',
comment_delimiter = '//'
multiline_comment_delimiters = {'/*': '*/'}
string_delimiters = {'"': '"'}
multiline_string_delimiters = {}
indent_types = {'{': '}'}
encapsulators = {'(': ')', '[': ']'}<|fim▁end|>
| |
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, ViewChild, ElementRef, Renderer } from '@angular/core';
import { Task } from './shared/task.model';
import { Param } from './shared/param.model';
import { Class, ClassTypes } from './shared/class.model';
import { ClassParams } from './shared/class-params.model';
import { Rule } from './shared/rule.model';
import { RuleExpression, ExpressionTypes } from './shared/rule-expression.model';
import { ClassGraphArguments } from './shared/class-graph-arguments.model';
import { TaskService } from './shared/task.service';
import { SyncService } from './shared/sync/sync.service';
import { StorageService } from './shared/storage.service';
@Component({
selector: 'my-app',
templateUrl: './app/app.component.html',
styleUrls: ['./app/app.component.css']
})
export class AppComponent implements OnInit {
@ViewChild('syncButton') syncButton: ElementRef;
constructor(
private service: TaskService,
private syncService: SyncService,
private storageService: StorageService,
private renderer: Renderer) { }
ngOnInit() {
// let task: Task = this.createTestTask();
// console.log(task);
// this.service
// .solveTask(task)
// .then(console.log)
// .catch(error =>
// console.log(`Server error: ${error}`));<|fim▁hole|> // classType: task.in_vars[0].classes[0].type,
// params: task.in_vars[0].classes[0].params,
// from: task.in_vars[0].from,
// to: task.in_vars[0].to,
// step: 0.5,
// });
// console.log(graphArguments);
// this.service
// .buildClassGraph(graphArguments)
// .then(console.log)
// .catch(error => console.log(`Server error: ${error}`));;
this.storageService.saveTask(this.createTestTask());
this.syncService.setElement(this.syncButton);
}
enableSync(): void {
this.buttonState = true;
setTimeout(() => {
this.changeAnimationState();
}, 2000);
}
private createTestTask(): Task {
return new Task({
name: "engine_pressure",
in_vars: [
new Param({
name: "temperature",
from: 0,
to: 175,
value: 80,
classes: [
new Class({
name: "low",
type: ClassTypes.trapezoidal,
params: new ClassParams({
a: -1,
b: 0,
c: 50,
d: 100
})
}),
new Class({
name: "middle",
type: ClassTypes.trapezoidal,
params: new ClassParams({
a: 25,
b: 75,
c: 125,
d: 175
})
}),
new Class({
name: "high",
type: ClassTypes.trapezoidal,
params: new ClassParams({
a: 75,
b: 150,
c: 175,
d: 176
})
})
]
}),
new Param({
name: "fuel_consumption",
from: 0,
to: 8,
value: 3.5,
classes: [
new Class({
name: "low",
type: ClassTypes.triangular,
params: new ClassParams({
a: 0,
b: 2,
c: 4
})
}),
new Class({
name: "middle",
type: ClassTypes.triangular,
params: new ClassParams({
a: 2,
b: 4,
c: 6
})
}),
new Class({
name: "high",
type: ClassTypes.triangular,
params: new ClassParams({
a: 4,
b: 6,
c: 8
})
})
]
})
],
out_vars: [
new Param({
name: "pressure",
from: 0,
to: 150,
classes: [
new Class({
name: "low",
type: ClassTypes.triangular,
params: new ClassParams({
a: -1,
b: 0,
c: 100
})
}),
new Class({
name: "middle",
type: ClassTypes.triangular,
params: new ClassParams({
a: 50,
b: 100,
c: 150
})
}),
new Class({
name: "high",
type: ClassTypes.triangular,
params: new ClassParams({
a: 100,
b: 150,
c: 151
})
})
]
})
],
rules: [
new Rule({
var_name: "pressure",
class_name: "low",
expr: new RuleExpression({
type: ExpressionTypes.and,
left: new RuleExpression({
type: ExpressionTypes.state,
var_name: "temperature",
class_name: "low"
}),
right: new RuleExpression({
type: ExpressionTypes.state,
var_name: "fuel_consumption",
class_name: "low"
})
})
}),
new Rule({
var_name: "pressure",
class_name: "middle",
expr: new RuleExpression({
type: ExpressionTypes.state,
var_name: "temperature",
class_name: "middle"
})
}),
new Rule({
var_name: "pressure",
class_name: "high",
expr: new RuleExpression({
type: ExpressionTypes.or,
left: new RuleExpression({
type: ExpressionTypes.state,
var_name: "temperature",
class_name: "high"
}),
right: new RuleExpression({
type: ExpressionTypes.state,
var_name: "fuel_consumption",
class_name: "high"
})
})
})
]
});
}
changeAnimationState(): void {
this.buttonState = !this.buttonState;
}
buttonState: boolean = false;
}<|fim▁end|>
|
// let graphArguments: ClassGraphArguments = new ClassGraphArguments({
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for configuring Heat
"""
import logging as sys_logging
import os
from eventlet.green import socket
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from heat.common import exception
from heat.common.i18n import _
from heat.common.i18n import _LW
from heat.common import wsgi
LOG = logging.getLogger(__name__)
paste_deploy_group = cfg.OptGroup('paste_deploy')
paste_deploy_opts = [
cfg.StrOpt('flavor',
help=_("The flavor to use.")),
cfg.StrOpt('api_paste_config', default="api-paste.ini",
help=_("The API paste config file to use."))]
service_opts = [
cfg.IntOpt('periodic_interval',
default=60,
help=_('Seconds between running periodic tasks.')),
cfg.StrOpt('heat_metadata_server_url',
default="",
help=_('URL of the Heat metadata server.')),
cfg.StrOpt('heat_waitcondition_server_url',
help=_('URL of the Heat waitcondition server.')),
cfg.StrOpt('heat_watch_server_url',
default="",
help=_('URL of the Heat CloudWatch server.')),
cfg.StrOpt('instance_connection_is_secure',
default="0",
help=_('Instance connection to CFN/CW API via https.')),
cfg.StrOpt('instance_connection_https_validate_certificates',
default="1",
help=_('Instance connection to CFN/CW API validate certs if '
'SSL is used.')),
cfg.StrOpt('region_name_for_services',
help=_('Default region name used to get services endpoints.')),
cfg.StrOpt('heat_stack_user_role',
default="heat_stack_user",
help=_('Keystone role for heat template-defined users.')),
cfg.StrOpt('stack_user_domain_id',
deprecated_opts=[cfg.DeprecatedOpt('stack_user_domain',
group=None)],
help=_('Keystone domain ID which contains heat '
'template-defined users. If this option is set, '
'stack_user_domain_name option will be ignored.')),
cfg.StrOpt('stack_user_domain_name',
help=_('Keystone domain name which contains heat '
'template-defined users. If `stack_user_domain_id` '
'option is set, this option is ignored.')),
cfg.StrOpt('stack_domain_admin',
help=_('Keystone username, a user with roles sufficient to '
'manage users and projects in the stack_user_domain.')),
cfg.StrOpt('stack_domain_admin_password',
secret=True,
help=_('Keystone password for stack_domain_admin user.')),
cfg.IntOpt('max_template_size',
default=524288,
help=_('Maximum raw byte size of any template.')),
cfg.IntOpt('max_nested_stack_depth',
default=5,
help=_('Maximum depth allowed when using nested stacks.')),
cfg.IntOpt('num_engine_workers',
default=processutils.get_worker_count(),
help=_('Number of heat-engine processes to fork and run.'))]
engine_opts = [
cfg.StrOpt('instance_user',
default='',
help=_("The default user for new instances. This option "
"is deprecated and will be removed in the Juno release. "
"If it's empty, Heat will use the default user set up "
"with your cloud image (for OS::Nova::Server) or "
"'ec2-user' (for AWS::EC2::Instance).")),
cfg.ListOpt('plugin_dirs',
default=['/usr/lib64/heat', '/usr/lib/heat',
'/usr/local/lib/heat', '/usr/local/lib64/heat'],
help=_('List of directories to search for plug-ins.')),
cfg.StrOpt('environment_dir',
default='/etc/heat/environment.d',
help=_('The directory to search for environment files.')),
cfg.StrOpt('deferred_auth_method',
choices=['password', 'trusts'],
default='trusts',
help=_('Select deferred auth method, '
'stored password or trusts.')),
cfg.ListOpt('trusts_delegated_roles',
default=[],
help=_('Subset of trustor roles to be delegated to heat.'
' If left unset, all roles of a user will be'
' delegated to heat when creating a stack.')),
cfg.IntOpt('max_resources_per_stack',
default=1000,
help=_('Maximum resources allowed per top-level stack. '
'-1 stands for unlimited.')),
cfg.IntOpt('max_stacks_per_tenant',
default=100,
help=_('Maximum number of stacks any one tenant may have'
' active at one time.')),
cfg.IntOpt('action_retry_limit',
default=5,
help=_('Number of times to retry to bring a '
'resource to a non-error state. Set to 0 to disable '
'retries.')),
cfg.IntOpt('event_purge_batch_size',
default=10,
help=_("Controls how many events will be pruned whenever a "
"stack's events exceed max_events_per_stack. Set this "
"lower to keep more events at the expense of more "
"frequent purges.")),
cfg.IntOpt('max_events_per_stack',
default=1000,
help=_('Maximum events that will be available per stack. Older'
' events will be deleted when this is reached. Set to 0'
' for unlimited events per stack.')),
cfg.IntOpt('stack_action_timeout',
default=3600,
help=_('Timeout in seconds for stack action (ie. create or'
' update).')),
cfg.IntOpt('error_wait_time',
default=240,
help=_('Error wait time in seconds for stack action (ie. create'
' or update).')),
cfg.IntOpt('engine_life_check_timeout',
default=2,
help=_('RPC timeout for the engine liveness check that is used'
' for stack locking.')),
cfg.BoolOpt('enable_cloud_watch_lite',
default=False,
help=_('Enable the legacy OS::Heat::CWLiteAlarm resource.')),
cfg.BoolOpt('enable_stack_abandon',
default=False,
help=_('Enable the preview Stack Abandon feature.')),
cfg.BoolOpt('enable_stack_adopt',
default=False,
help=_('Enable the preview Stack Adopt feature.')),
cfg.BoolOpt('convergence_engine',
default=False,
help=_('Enables engine with convergence architecture. All '
'stacks with this option will be created using '
'convergence engine .')),
cfg.StrOpt('default_software_config_transport',
choices=['POLL_SERVER_CFN',
'POLL_SERVER_HEAT',
'POLL_TEMP_URL'],
default='POLL_SERVER_CFN',
help=_('Template default for how the server should receive the '
'metadata required for software configuration. '
'POLL_SERVER_CFN will allow calls to the cfn API action '
'DescribeStackResource authenticated with the provided '
'keypair (requires enabled heat-api-cfn). '
'POLL_SERVER_HEAT will allow calls to the '
'Heat API resource-show using the provided keystone '
'credentials (requires keystone v3 API, and configured '
'stack_user_* config options). '
'POLL_TEMP_URL will create and populate a '
'Swift TempURL with metadata for polling (requires '
'object-store endpoint which supports TempURL).')),
cfg.StrOpt('default_deployment_signal_transport',
choices=['CFN_SIGNAL',
'TEMP_URL_SIGNAL',
'HEAT_SIGNAL'],
default='CFN_SIGNAL',
help=_('Template default for how the server should signal to '
'heat with the deployment output values. CFN_SIGNAL '
'will allow an HTTP POST to a CFN keypair signed URL '
'(requires enabled heat-api-cfn). '
'TEMP_URL_SIGNAL will create a Swift TempURL to be '
'signaled via HTTP PUT (requires object-store endpoint '
'which supports TempURL). '
'HEAT_SIGNAL will allow calls to the Heat API '
'resource-signal using the provided keystone '<|fim▁hole|> help=_('Stacks containing these tag names will be hidden. '
'Multiple tags should be given in a comma-delimited '
'list (eg. hidden_stack_tags=hide_me,me_too).')),
cfg.StrOpt('onready',
help=_('Deprecated.')),
cfg.BoolOpt('stack_scheduler_hints',
default=False,
help=_('When this feature is enabled, scheduler hints'
' identifying the heat stack context of a server'
' resource are passed to the configured schedulers in'
' nova, for server creates done using heat resource'
' types OS::Nova::Server and AWS::EC2::Instance.'
' heat_root_stack_id will be set to the id of the root'
' stack of the resource, heat_stack_id will be set to'
' the id of the resource\'s parent stack,'
' heat_stack_name will be set to the name of the'
' resource\'s parent stack, heat_path_in_stack will be'
' set to a list of tuples,'
' (stackresourcename, stackname) with list[0] being'
' (None, rootstackname), and heat_resource_name will'
' be set to the resource\'s name.')),
cfg.BoolOpt('encrypt_parameters_and_properties',
default=False,
help=_('Encrypt template parameters that were marked as'
' hidden and also all the resource properties before'
' storing them in database.'))]
rpc_opts = [
cfg.StrOpt('host',
default=socket.gethostname(),
help=_('Name of the engine node. '
'This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, '
'or IP address.'))]
profiler_group = cfg.OptGroup('profiler')
profiler_opts = [
cfg.BoolOpt("profiler_enabled", default=False,
help=_('If False fully disable profiling feature.')),
cfg.BoolOpt("trace_sqlalchemy", default=False,
help=_("If False do not trace SQL requests."))
]
auth_password_group = cfg.OptGroup('auth_password')
auth_password_opts = [
cfg.BoolOpt('multi_cloud',
default=False,
help=_('Allow orchestration of multiple clouds.')),
cfg.ListOpt('allowed_auth_uris',
default=[],
help=_('Allowed keystone endpoints for auth_uri when '
'multi_cloud is enabled. At least one endpoint needs '
'to be specified.'))]
# these options define baseline defaults that apply to all clients
default_clients_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.')),
cfg.StrOpt('ca_file',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.StrOpt('cert_file',
help=_('Optional PEM-formatted certificate chain file.')),
cfg.StrOpt('key_file',
help=_('Optional PEM-formatted file that contains the '
'private key.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate will not "
"be verified."))]
# these options can be defined for each client
# they must not specify defaults, since any options not defined in a client
# specific group is looked up on the generic group above
clients_opts = [
cfg.StrOpt('endpoint_type',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.')),
cfg.StrOpt('ca_file',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.StrOpt('cert_file',
help=_('Optional PEM-formatted certificate chain file.')),
cfg.StrOpt('key_file',
help=_('Optional PEM-formatted file that contains the '
'private key.')),
cfg.BoolOpt('insecure',
help=_("If set, then the server's certificate will not "
"be verified."))]
heat_client_opts = [
cfg.StrOpt('url',
default='',
help=_('Optional heat url in format like'
' http://0.0.0.0:8004/v1/%(tenant_id)s.'))]
client_http_log_debug_opts = [
cfg.BoolOpt('http_log_debug',
default=False,
help=_("Allow client's debug log output."))]
revision_group = cfg.OptGroup('revision')
revision_opts = [
cfg.StrOpt('heat_revision',
default='unknown',
help=_('Heat build revision. '
'If you would prefer to manage your build revision '
'separately, you can move this section to a different '
'file and add it as another config option.'))]
def startup_sanity_check():
if (not cfg.CONF.stack_user_domain_id and
not cfg.CONF.stack_user_domain_name):
# FIXME(shardy): Legacy fallback for folks using old heat.conf
# files which lack domain configuration
LOG.warn(_LW('stack_user_domain_id or stack_user_domain_name not '
'set in heat.conf falling back to using default'))
else:
domain_admin_user = cfg.CONF.stack_domain_admin
domain_admin_password = cfg.CONF.stack_domain_admin_password
if not (domain_admin_user and domain_admin_password):
raise exception.Error(_('heat.conf misconfigured, cannot '
'specify "stack_user_domain_id" or '
'"stack_user_domain_name" without '
'"stack_domain_admin" and '
'"stack_domain_admin_password"'))
auth_key_len = len(cfg.CONF.auth_encryption_key)
if auth_key_len in (16, 24):
LOG.warn(
_LW('Please update auth_encryption_key to be 32 characters.'))
elif auth_key_len != 32:
raise exception.Error(_('heat.conf misconfigured, auth_encryption_key '
'must be 32 characters'))
def list_opts():
yield None, rpc_opts
yield None, engine_opts
yield None, service_opts
yield paste_deploy_group.name, paste_deploy_opts
yield auth_password_group.name, auth_password_opts
yield revision_group.name, revision_opts
yield profiler_group.name, profiler_opts
yield 'clients', default_clients_opts
for client in ('nova', 'swift', 'neutron', 'cinder',
'ceilometer', 'keystone', 'heat', 'glance', 'trove',
'sahara'):
client_specific_group = 'clients_' + client
yield client_specific_group, clients_opts
yield 'clients_heat', heat_client_opts
yield 'clients_nova', client_http_log_debug_opts
yield 'clients_cinder', client_http_log_debug_opts
cfg.CONF.register_group(paste_deploy_group)
cfg.CONF.register_group(auth_password_group)
cfg.CONF.register_group(revision_group)
cfg.CONF.register_group(profiler_group)
for group, opts in list_opts():
cfg.CONF.register_opts(opts, group=group)
def _get_deployment_flavor():
"""
Retrieve the paste_deploy.flavor config item, formatted appropriately
for appending to the application name.
"""
flavor = cfg.CONF.paste_deploy.flavor
return '' if not flavor else ('-' + flavor)
def _get_deployment_config_file():
"""
Retrieve the deployment_config_file config item, formatted as an
absolute pathname.
"""
config_path = cfg.CONF.find_file(
cfg.CONF.paste_deploy['api_paste_config'])
if config_path is None:
return None
return os.path.abspath(config_path)
def load_paste_app(app_name=None):
"""
Builds and returns a WSGI app from a paste config file.
We assume the last config file specified in the supplied ConfigOpts
object is the paste config file.
:param app_name: name of the application to load
:raises RuntimeError when config file cannot be located or application
cannot be loaded from config file
"""
if app_name is None:
app_name = cfg.CONF.prog
# append the deployment flavor to the application name,
# in order to identify the appropriate paste pipeline
app_name += _get_deployment_flavor()
conf_file = _get_deployment_config_file()
if conf_file is None:
raise RuntimeError(_("Unable to locate config file"))
try:
app = wsgi.paste_deploy_app(conf_file, app_name, cfg.CONF)
# Log the options used when starting if we're in debug mode...
if cfg.CONF.debug:
cfg.CONF.log_opt_values(logging.getLogger(app_name),
sys_logging.DEBUG)
return app
except (LookupError, ImportError) as e:
raise RuntimeError(_("Unable to load %(app_name)s from "
"configuration file %(conf_file)s."
"\nGot: %(e)r") % {'app_name': app_name,
'conf_file': conf_file,
'e': e})<|fim▁end|>
|
'credentials')),
cfg.ListOpt('hidden_stack_tags',
default=[],
|
<|file_name|>image_ordering_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import cv2
import numpy as np
from vision import camera_message_framework
import itertools
import time
shape = (500, 500, 3)
size = 1
for dim in shape:
size *= dim
def image_of(axes):
im = np.zeros(shape, dtype=np.uint8)
im[:, :, axes] = 255
return im<|fim▁hole|>black = image_of([]), 'black'
red = image_of([2]), 'red'
green = image_of([1]), 'green'
blue = image_of([0]), 'blue'
yellow = image_of([2, 1]), 'yellow'
cyan = image_of([1, 0]), 'cyan'
pink = image_of([0, 2]), 'pink'
white = image_of([0, 1, 2]), 'white'
images = [black, red, green, blue, yellow, cyan, pink, white]
f = camera_message_framework.Creator('forward', size)
def main():
for im, name in itertools.cycle(images):
f.write_frame(im, int(time.time() * 1000))
print('wrote {}'.format(name))
time.sleep(1)
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>chess.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import struct
class GGZChessFeedback:
def onSeat(self, seatNum, version):
pass
def onPlayers(self, whiteType, whiteName, blackType, blackName):
pass
def onClockRequest(self):
pass
def onClock(self, mode, seconds):
pass
def onStart(self):
pass
def onMove(self, move):
pass
class Chess:
CLOCK_NONE = 0
CLOCK_CLIENT = 1
CLOCK_SERVERLAG = 2
CLOCK_SERVER = 3
# Player codes (copied from GGZ_SEAT_*)
GGZ_SEAT_NONE = '\x00' # This seat does not exist */
GGZ_SEAT_OPEN = '\x01' # The seat is open (unoccupied).
GGZ_SEAT_BOT = '\x02' # The seat has a bot (AI) in it.
GGZ_SEAT_PLAYER = '\x03' # The seat has a regular player in it.
GGZ_SEAT_RESERVED = '\x04' # The seat is reserved for a player.
GGZ_SEAT_ABANDONED = '\x05' # The seat is abandoned by a player.
def sendClock(self, mode, seconds):
return '\x04' + struct.pack('!I', (mode << 24) | (seconds & 0x00FFFFFF))
def sendMove(self, move, time = None):
cmd = '\x06' + struct.pack('!I', len(move)) + move
if time is not None:
cmd += struct.pack('!I', time)
return cmd
def sendStart(self):
return '\x05'
def __init__(self, feedback):
self.feedback = feedback
self.decodeMethod = None
self.decodeMethods = {'\x01': self.decodeSeat,
'\x02': self.decodePlayers,
'\x03': self.decodeClockRequest,
'\x04': self.decodeClock,
'\x05': self.decodeStart,
# 6: Move request
'\x07': self.decodeMove,
'\x08': self.decodeGameEnd,
#9: Update request
'\x0a': self.decodeUpdate,
#11: server time update?
#12: self.decodeFlag,
'\x0d': self.decodeDraw}
def decode(self, char):
if self.decodeMethod is None:
try:
self.decodeMethod = self.decodeMethods[char]
except KeyError:
self.decodeMethod = None
print 'Unknown data received: %s' % repr(char)
return
self.command = ''
self.command += char
self.decodeMethod()
def getGGZString(self, buffer):
if len(buffer) < 4:
return (None, 0)
(length,) = struct.unpack("!I", buffer[:4])
if len(buffer) < length + 4:
return (None, 0)
string = buffer[4:length + 4]
# Strip C null characters
while string.endswith('\x00'):
string = string[:-1]
return (string, length + 4)
def decodeSeat(self):
# seat [01][num][version]
if len(self.command) == 3:
self.decodeMethod = None
(num, version) = struct.unpack('!xBB', self.command)
self.feedback.onSeat(num, version)
def decodePlayers(self):
# players [02][code1][name1(18)][code2][name2(18)]
# name is ommitted if code == 01 (open)
requiredLength = 2
if len(self.command) < requiredLength:
return
<|fim▁hole|> else:
(whiteName, offset) = self.getGGZString(self.command[requiredLength:])
if whiteName is None:
return
requiredLength += 1 + offset
if len(self.command) < requiredLength:
return
blackCode = self.command[requiredLength - 1]
if blackCode == self.GGZ_SEAT_OPEN:
blackName = ''
else:
(blackName, offset) = self.getGGZString(self.command[requiredLength:])
if blackName is None:
return
requiredLength += offset
if len(self.command) >= requiredLength:
self.decodeMethod = None
self.feedback.onPlayers(whiteCode, whiteName, blackCode, blackName)
def decodeClockRequest(self):
# [3]
self.decodeMethod = None
self.feedback.onClockRequest()
def decodeClock(self):
# [4][mode][seconds]
if len(self.command) == 5:
(value,) = struct.unpack("!xI", self.command)
mode = value >> 24
seconds = value & 0x00FFFFFF
self.feedback.onClock(mode, seconds)
self.decodeMethod = None
def decodeStart(self):
# [5]
self.decodeMethod = None
self.feedback.onStart()
def decodeMove(self):
# [07][move(8)]
# or [07][move(8)][seconds]
(move, _) = self.getGGZString(self.command[1:])
if move is None:
return
self.decodeMethod = None
self.feedback.onMove(move)
def decodeGameEnd(self):
# [08][result]
if len(self.command) == 2:
self.decodeMethod = None
def decodeUpdate(self):
# [0A][wtime][btime]
if len(self.command) == 9:
self.decodeMethod = None
def decodeDraw(self):
# [0D]
self.decodeMethod = None<|fim▁end|>
|
whiteCode = self.command[1]
if whiteCode == self.GGZ_SEAT_OPEN:
requiredLength += 1
whiteName = ''
|
<|file_name|>0004_auto_20170913_1043.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-13 10:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0003_auto_20170913_1007'),
]
operations = [
migrations.AlterField(<|fim▁hole|> ),
]<|fim▁end|>
|
model_name='company',
name='basic_material',
field=models.IntegerField(choices=[(0, 'Plastic / Resin'), (1, 'Metal'), (3, 'Other'), (4, '---')], default=0),
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-<|fim▁hole|>from . import res_partner_bank
from . import account_bank_statement_import<|fim▁end|>
| |
<|file_name|>DateUtil.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Created on 2015年8月24日
@author: hustcc
'''
import datetime
import time
# 当前时间,可用于mysql datetime
def now_datetime_string():
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def now_datetime():
return datetime.datetime.now()
def now_date_string():
return datetime.datetime.now().strftime("%Y-%m-%d")
def now_timestamp():
return time.time()
<|fim▁hole|>if __name__ == '__main__':
print(now_datetime())
print(now_timestamp())
print(now_date_string())<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2009 by the FIFE team
# http://www.fifengine.de
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
<|fim▁hole|>#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
"""
Pychan extension widgets.
Extension widgets are partly experimental, partly rarely used widgets
which are added here. They are by default not included in the widgets
registry and thus cannot be loaded from XML files. Use L{pychan.widgets.registerWidget}
to enable that.
Not the same care to keep the API stable will be taken for them and
before and if they are added (or replace) the standard widgets they
will have to be reviewed in detail.
"""<|fim▁end|>
| |
<|file_name|>medication_usage.pb.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0-devel
// protoc v3.12.3
// source: proto/google/fhir/proto/r5/core/resources/medication_usage.proto
package medication_usage_go_proto
import (
any "github.com/golang/protobuf/ptypes/any"
_ "github.com/google/fhir/go/proto/google/fhir/proto/annotations_go_proto"
codes_go_proto "github.com/google/fhir/go/proto/google/fhir/proto/r5/core/codes_go_proto"
datatypes_go_proto "github.com/google/fhir/go/proto/google/fhir/proto/r5/core/datatypes_go_proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Auto-generated from StructureDefinition for MedicationUsage, last updated
// 2019-12-31T21:03:40.621+11:00. Record of medication being taken by a patient.
// See http://hl7.org/fhir/StructureDefinition/MedicationUsage
type MedicationUsage struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Logical id of this artifact
Id *datatypes_go_proto.Id `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
// Metadata about the resource
Meta *datatypes_go_proto.Meta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"`
// A set of rules under which this content was created
ImplicitRules *datatypes_go_proto.Uri `protobuf:"bytes,3,opt,name=implicit_rules,json=implicitRules,proto3" json:"implicit_rules,omitempty"`
// Language of the resource content
Language *datatypes_go_proto.Code `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"`
// Text summary of the resource, for human interpretation
Text *datatypes_go_proto.Narrative `protobuf:"bytes,5,opt,name=text,proto3" json:"text,omitempty"`
// Contained, inline Resources
Contained []*any.Any `protobuf:"bytes,6,rep,name=contained,proto3" json:"contained,omitempty"`
// Additional content defined by implementations
Extension []*datatypes_go_proto.Extension `protobuf:"bytes,8,rep,name=extension,proto3" json:"extension,omitempty"`
// Extensions that cannot be ignored
ModifierExtension []*datatypes_go_proto.Extension `protobuf:"bytes,9,rep,name=modifier_extension,json=modifierExtension,proto3" json:"modifier_extension,omitempty"`
// External identifier
Identifier []*datatypes_go_proto.Identifier `protobuf:"bytes,10,rep,name=identifier,proto3" json:"identifier,omitempty"`
// Fulfils plan, proposal or order
BasedOn []*datatypes_go_proto.Reference `protobuf:"bytes,11,rep,name=based_on,json=basedOn,proto3" json:"based_on,omitempty"`
// Part of referenced event
PartOf []*datatypes_go_proto.Reference `protobuf:"bytes,12,rep,name=part_of,json=partOf,proto3" json:"part_of,omitempty"`
Status *MedicationUsage_StatusCode `protobuf:"bytes,13,opt,name=status,proto3" json:"status,omitempty"`
// Reason for current status
StatusReason []*datatypes_go_proto.CodeableConcept `protobuf:"bytes,14,rep,name=status_reason,json=statusReason,proto3" json:"status_reason,omitempty"`
// Type of medication usage
Category []*datatypes_go_proto.CodeableConcept `protobuf:"bytes,15,rep,name=category,proto3" json:"category,omitempty"`
Medication *MedicationUsage_MedicationX `protobuf:"bytes,16,opt,name=medication,proto3" json:"medication,omitempty"`
// Who is/was taking the medication
Subject *datatypes_go_proto.Reference `protobuf:"bytes,17,opt,name=subject,proto3" json:"subject,omitempty"`
// Encounter associated with MedicationUsage
Encounter *datatypes_go_proto.Reference `protobuf:"bytes,18,opt,name=encounter,proto3" json:"encounter,omitempty"`
Effective *MedicationUsage_EffectiveX `protobuf:"bytes,19,opt,name=effective,proto3" json:"effective,omitempty"`
// When the usage was asserted?
DateAsserted *datatypes_go_proto.DateTime `protobuf:"bytes,20,opt,name=date_asserted,json=dateAsserted,proto3" json:"date_asserted,omitempty"`
// Person or organization that provided the information about the taking of
// this medication
InformationSource *datatypes_go_proto.Reference `protobuf:"bytes,21,opt,name=information_source,json=informationSource,proto3" json:"information_source,omitempty"`
// Link to information used to derive the MedicationUsage
DerivedFrom []*datatypes_go_proto.Reference `protobuf:"bytes,22,rep,name=derived_from,json=derivedFrom,proto3" json:"derived_from,omitempty"`
// Reason for why the medication is being/was taken
Reason []*datatypes_go_proto.CodeableReference `protobuf:"bytes,23,rep,name=reason,proto3" json:"reason,omitempty"`
// Further information about the usage
Note []*datatypes_go_proto.Annotation `protobuf:"bytes,24,rep,name=note,proto3" json:"note,omitempty"`
// Full representation of the dosage instructions
RenderedDosageInstruction *datatypes_go_proto.String `protobuf:"bytes,25,opt,name=rendered_dosage_instruction,json=renderedDosageInstruction,proto3" json:"rendered_dosage_instruction,omitempty"`
// Details of how medication is/was taken or should be taken
Dosage []*datatypes_go_proto.Dosage `protobuf:"bytes,26,rep,name=dosage,proto3" json:"dosage,omitempty"`
// Indicates if the medication is being consumed or administered as prescribed
TakenAsOrdered *datatypes_go_proto.Boolean `protobuf:"bytes,27,opt,name=taken_as_ordered,json=takenAsOrdered,proto3" json:"taken_as_ordered,omitempty"`
}
func (x *MedicationUsage) Reset() {
*x = MedicationUsage{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MedicationUsage) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MedicationUsage) ProtoMessage() {}
func (x *MedicationUsage) ProtoReflect() protoreflect.Message {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MedicationUsage.ProtoReflect.Descriptor instead.
func (*MedicationUsage) Descriptor() ([]byte, []int) {
return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0}
}
func (x *MedicationUsage) GetId() *datatypes_go_proto.Id {
if x != nil {
return x.Id
}
return nil
}
func (x *MedicationUsage) GetMeta() *datatypes_go_proto.Meta {
if x != nil {
return x.Meta
}
return nil
}
func (x *MedicationUsage) GetImplicitRules() *datatypes_go_proto.Uri {
if x != nil {
return x.ImplicitRules
}
return nil
}
func (x *MedicationUsage) GetLanguage() *datatypes_go_proto.Code {
if x != nil {
return x.Language
}
return nil
}
func (x *MedicationUsage) GetText() *datatypes_go_proto.Narrative {
if x != nil {
return x.Text
}
return nil
}
func (x *MedicationUsage) GetContained() []*any.Any {
if x != nil {
return x.Contained
}
return nil
}
func (x *MedicationUsage) GetExtension() []*datatypes_go_proto.Extension {
if x != nil {
return x.Extension
}
return nil
}
func (x *MedicationUsage) GetModifierExtension() []*datatypes_go_proto.Extension {
if x != nil {
return x.ModifierExtension
}
return nil
}
func (x *MedicationUsage) GetIdentifier() []*datatypes_go_proto.Identifier {
if x != nil {
return x.Identifier
}
return nil
}
func (x *MedicationUsage) GetBasedOn() []*datatypes_go_proto.Reference {
if x != nil {
return x.BasedOn
}
return nil
}
func (x *MedicationUsage) GetPartOf() []*datatypes_go_proto.Reference {
if x != nil {
return x.PartOf
}
return nil
}
func (x *MedicationUsage) GetStatus() *MedicationUsage_StatusCode {
if x != nil {
return x.Status
}
return nil
}
func (x *MedicationUsage) GetStatusReason() []*datatypes_go_proto.CodeableConcept {
if x != nil {
return x.StatusReason
}
return nil
}
func (x *MedicationUsage) GetCategory() []*datatypes_go_proto.CodeableConcept {
if x != nil {
return x.Category
}
return nil
}
func (x *MedicationUsage) GetMedication() *MedicationUsage_MedicationX {
if x != nil {
return x.Medication
}
return nil
}
func (x *MedicationUsage) GetSubject() *datatypes_go_proto.Reference {
if x != nil {
return x.Subject
}
return nil
}
func (x *MedicationUsage) GetEncounter() *datatypes_go_proto.Reference {
if x != nil {
return x.Encounter
}
return nil
}
func (x *MedicationUsage) GetEffective() *MedicationUsage_EffectiveX {
if x != nil {
return x.Effective
}
return nil
}
func (x *MedicationUsage) GetDateAsserted() *datatypes_go_proto.DateTime {
if x != nil {
return x.DateAsserted
}
return nil
}
func (x *MedicationUsage) GetInformationSource() *datatypes_go_proto.Reference {
if x != nil {
return x.InformationSource
}
return nil
}
func (x *MedicationUsage) GetDerivedFrom() []*datatypes_go_proto.Reference {
if x != nil {
return x.DerivedFrom
}
return nil
}
func (x *MedicationUsage) GetReason() []*datatypes_go_proto.CodeableReference {
if x != nil {
return x.Reason
}
return nil
}
func (x *MedicationUsage) GetNote() []*datatypes_go_proto.Annotation {
if x != nil {
return x.Note
}
return nil
}
func (x *MedicationUsage) GetRenderedDosageInstruction() *datatypes_go_proto.String {
if x != nil {
return x.RenderedDosageInstruction
}
return nil
}
func (x *MedicationUsage) GetDosage() []*datatypes_go_proto.Dosage {
if x != nil {
return x.Dosage
}
return nil
}
func (x *MedicationUsage) GetTakenAsOrdered() *datatypes_go_proto.Boolean {
if x != nil {
return x.TakenAsOrdered
}
return nil
}
// active | completed | entered-in-error | intended | stopped | on-hold |
// unknown | not-taken
type MedicationUsage_StatusCode struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Value codes_go_proto.MedicationUsageStatusCode_Value `protobuf:"varint,1,opt,name=value,proto3,enum=google.fhir.r5.core.MedicationUsageStatusCode_Value" json:"value,omitempty"`
Id *datatypes_go_proto.String `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
Extension []*datatypes_go_proto.Extension `protobuf:"bytes,3,rep,name=extension,proto3" json:"extension,omitempty"`
}
func (x *MedicationUsage_StatusCode) Reset() {
*x = MedicationUsage_StatusCode{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MedicationUsage_StatusCode) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MedicationUsage_StatusCode) ProtoMessage() {}
func (x *MedicationUsage_StatusCode) ProtoReflect() protoreflect.Message {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MedicationUsage_StatusCode.ProtoReflect.Descriptor instead.
func (*MedicationUsage_StatusCode) Descriptor() ([]byte, []int) {
return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 0}
}
func (x *MedicationUsage_StatusCode) GetValue() codes_go_proto.MedicationUsageStatusCode_Value {
if x != nil {
return x.Value
}
return codes_go_proto.MedicationUsageStatusCode_INVALID_UNINITIALIZED
}
func (x *MedicationUsage_StatusCode) GetId() *datatypes_go_proto.String {
if x != nil {
return x.Id
}
return nil
}
func (x *MedicationUsage_StatusCode) GetExtension() []*datatypes_go_proto.Extension {
if x != nil {
return x.Extension
}
return nil
}
// What medication was taken
type MedicationUsage_MedicationX struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Choice:
// *MedicationUsage_MedicationX_CodeableConcept
// *MedicationUsage_MedicationX_Reference
Choice isMedicationUsage_MedicationX_Choice `protobuf_oneof:"choice"`
}
func (x *MedicationUsage_MedicationX) Reset() {
*x = MedicationUsage_MedicationX{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MedicationUsage_MedicationX) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MedicationUsage_MedicationX) ProtoMessage() {}
func (x *MedicationUsage_MedicationX) ProtoReflect() protoreflect.Message {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MedicationUsage_MedicationX.ProtoReflect.Descriptor instead.
func (*MedicationUsage_MedicationX) Descriptor() ([]byte, []int) {
return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 1}
}
func (m *MedicationUsage_MedicationX) GetChoice() isMedicationUsage_MedicationX_Choice {
if m != nil {
return m.Choice
}
return nil
}
func (x *MedicationUsage_MedicationX) GetCodeableConcept() *datatypes_go_proto.CodeableConcept {
if x, ok := x.GetChoice().(*MedicationUsage_MedicationX_CodeableConcept); ok {
return x.CodeableConcept
}
return nil
}
func (x *MedicationUsage_MedicationX) GetReference() *datatypes_go_proto.Reference {
if x, ok := x.GetChoice().(*MedicationUsage_MedicationX_Reference); ok {
return x.Reference
}
return nil
}
type isMedicationUsage_MedicationX_Choice interface {
isMedicationUsage_MedicationX_Choice()
}
type MedicationUsage_MedicationX_CodeableConcept struct {
CodeableConcept *datatypes_go_proto.CodeableConcept `protobuf:"bytes,1,opt,name=codeable_concept,json=codeableConcept,proto3,oneof"`
}
type MedicationUsage_MedicationX_Reference struct {
Reference *datatypes_go_proto.Reference `protobuf:"bytes,2,opt,name=reference,proto3,oneof"`
}
func (*MedicationUsage_MedicationX_CodeableConcept) isMedicationUsage_MedicationX_Choice() {}
func (*MedicationUsage_MedicationX_Reference) isMedicationUsage_MedicationX_Choice() {}
// The date/time or interval when the medication is/was/will be taken
type MedicationUsage_EffectiveX struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Choice:
// *MedicationUsage_EffectiveX_DateTime
// *MedicationUsage_EffectiveX_Period
Choice isMedicationUsage_EffectiveX_Choice `protobuf_oneof:"choice"`
}
func (x *MedicationUsage_EffectiveX) Reset() {
*x = MedicationUsage_EffectiveX{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MedicationUsage_EffectiveX) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MedicationUsage_EffectiveX) ProtoMessage() {}
func (x *MedicationUsage_EffectiveX) ProtoReflect() protoreflect.Message {
mi := &file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MedicationUsage_EffectiveX.ProtoReflect.Descriptor instead.
func (*MedicationUsage_EffectiveX) Descriptor() ([]byte, []int) {
return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP(), []int{0, 2}
}
func (m *MedicationUsage_EffectiveX) GetChoice() isMedicationUsage_EffectiveX_Choice {
if m != nil {
return m.Choice
}
return nil
}
func (x *MedicationUsage_EffectiveX) GetDateTime() *datatypes_go_proto.DateTime {
if x, ok := x.GetChoice().(*MedicationUsage_EffectiveX_DateTime); ok {
return x.DateTime
}
return nil
}
func (x *MedicationUsage_EffectiveX) GetPeriod() *datatypes_go_proto.Period {
if x, ok := x.GetChoice().(*MedicationUsage_EffectiveX_Period); ok {
return x.Period
}
return nil
}
type isMedicationUsage_EffectiveX_Choice interface {
isMedicationUsage_EffectiveX_Choice()
}
type MedicationUsage_EffectiveX_DateTime struct {
DateTime *datatypes_go_proto.DateTime `protobuf:"bytes,1,opt,name=date_time,json=dateTime,proto3,oneof"`
}
type MedicationUsage_EffectiveX_Period struct {
Period *datatypes_go_proto.Period `protobuf:"bytes,2,opt,name=period,proto3,oneof"`
}
func (*MedicationUsage_EffectiveX_DateTime) isMedicationUsage_EffectiveX_Choice() {}
func (*MedicationUsage_EffectiveX_Period) isMedicationUsage_EffectiveX_Choice() {}
var File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto protoreflect.FileDescriptor
var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc = []byte{
0x0a, 0x40, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66,
0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72,
0x65, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69,
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x12, 0x13, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e,
0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x29, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2b, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72,
0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63,
0x6f, 0x64, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2f, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x64, 0x61, 0x74, 0x61,
0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd8, 0x15, 0x0a, 0x0f,
0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12,
0x27, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72,
0x65, 0x2e, 0x49, 0x64, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x74,
0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x12, 0x3f, 0x0a, 0x0e, 0x69, 0x6d, 0x70, 0x6c, 0x69,
0x63, 0x69, 0x74, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x18, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35,
0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x72, 0x69, 0x52, 0x0d, 0x69, 0x6d, 0x70, 0x6c, 0x69,
0x63, 0x69, 0x74, 0x52, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, 0x6c, 0x61, 0x6e, 0x67,
0x75, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65,
0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x12,
0x32, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63,
0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x61, 0x72, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x52, 0x04, 0x74,
0x65, 0x78, 0x74, 0x12, 0x32, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x64,
0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x09, 0x63, 0x6f,
0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x64, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e,
0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65,
0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65,
0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x4d, 0x0a, 0x12, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65,
0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e,
0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f,
0x6e, 0x52, 0x11, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e,
0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69,
0x65, 0x72, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49,
0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74,
0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x74, 0x0a, 0x08, 0x62, 0x61, 0x73, 0x65, 0x64, 0x5f, 0x6f,
0x6e, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65,
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x39, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x11, 0x4d,
0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x08, 0x43, 0x61, 0x72, 0x65, 0x50, 0x6c, 0x61, 0x6e, 0xf2, 0xff,
0xfc, 0xc2, 0x06, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x52, 0x07, 0x62, 0x61, 0x73, 0x65, 0x64, 0x4f, 0x6e, 0x12, 0xa4, 0x01, 0x0a, 0x07,
0x70, 0x61, 0x72, 0x74, 0x5f, 0x6f, 0x66, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63,
0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x6b, 0xf2,
0xff, 0xfc, 0xc2, 0x06, 0x18, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41,
0x64, 0x6d, 0x69, 0x6e, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0xf2, 0xff, 0xfc,
0xc2, 0x06, 0x12, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x73,
0x70, 0x65, 0x6e, 0x73, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0f, 0x4d, 0x65, 0x64, 0x69, 0x63,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x09,
0x50, 0x72, 0x6f, 0x63, 0x65, 0x64, 0x75, 0x72, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0b, 0x4f,
0x62, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x70, 0x61, 0x72, 0x74,
0x4f, 0x66, 0x12, 0x4f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0d, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72,
0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43,
0x6f, 0x64, 0x65, 0x42, 0x06, 0xf0, 0xd0, 0x87, 0xeb, 0x04, 0x01, 0x52, 0x06, 0x73, 0x74, 0x61,
0x74, 0x75, 0x73, 0x12, 0x49, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x72, 0x65,
0x61, 0x73, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65,
0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74,
0x52, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x40,
0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x0b,
0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72,
0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43,
0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79,
0x12, 0x58, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x10,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68,
0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x58, 0x42, 0x06, 0xf0, 0xd0, 0x87, 0xeb, 0x04, 0x01, 0x52, 0x0a,
0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x58, 0x0a, 0x07, 0x73, 0x75,
0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72,
0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x1e, 0xf0, 0xd0, 0x87,
0xeb, 0x04, 0x01, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x07, 0x50, 0x61, 0x74, 0x69, 0x65, 0x6e, 0x74,
0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x05, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x07, 0x73, 0x75, 0x62,
0x6a, 0x65, 0x63, 0x74, 0x12, 0x4d, 0x0a, 0x09, 0x65, 0x6e, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65,
0x72, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65,
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0f, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x09, 0x45,
0x6e, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x09, 0x65, 0x6e, 0x63, 0x6f, 0x75, 0x6e,
0x74, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x09, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65,
0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64,
0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x45, 0x66, 0x66,
0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x58, 0x52, 0x09, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69,
0x76, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x61, 0x73, 0x73, 0x65, 0x72,
0x74, 0x65, 0x64, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e,
0x44, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x0c, 0x64, 0x61, 0x74, 0x65, 0x41, 0x73,
0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x12, 0xa9, 0x01, 0x0a, 0x12, 0x69, 0x6e, 0x66, 0x6f, 0x72,
0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x15, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69,
0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65,
0x6e, 0x63, 0x65, 0x42, 0x5a, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x07, 0x50, 0x61, 0x74, 0x69, 0x65,
0x6e, 0x74, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0c, 0x50, 0x72, 0x61, 0x63, 0x74, 0x69, 0x74, 0x69,
0x6f, 0x6e, 0x65, 0x72, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x10, 0x50, 0x72, 0x61, 0x63, 0x74, 0x69,
0x74, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x52, 0x6f, 0x6c, 0x65, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0d,
0x52, 0x65, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x50, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0xf2, 0xff, 0xfc,
0xc2, 0x06, 0x0c, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52,
0x11, 0x69, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x64, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x5f, 0x66, 0x72,
0x6f, 0x6d, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52,
0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x0e, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x08,
0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0b, 0x64, 0x65, 0x72, 0x69, 0x76, 0x65,
0x64, 0x46, 0x72, 0x6f, 0x6d, 0x12, 0x3e, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18,
0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66,
0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x64, 0x65,
0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x06, 0x72,
0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x04, 0x6e, 0x6f, 0x74, 0x65, 0x18, 0x18, 0x20,
0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69,
0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x6e, 0x6f, 0x74, 0x65, 0x12, 0x5b, 0x0a, 0x1b, 0x72, 0x65,
0x6e, 0x64, 0x65, 0x72, 0x65, 0x64, 0x5f, 0x64, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x69, 0x6e,
0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35,
0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x19, 0x72, 0x65,
0x6e, 0x64, 0x65, 0x72, 0x65, 0x64, 0x44, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x73, 0x74,
0x72, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x06, 0x64, 0x6f, 0x73, 0x61, 0x67,
0x65, 0x18, 0x1a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x6f,
0x73, 0x61, 0x67, 0x65, 0x52, 0x06, 0x64, 0x6f, 0x73, 0x61, 0x67, 0x65, 0x12, 0x46, 0x0a, 0x10,
0x74, 0x61, 0x6b, 0x65, 0x6e, 0x5f, 0x61, 0x73, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64,
0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x6f, 0x6f,
0x6c, 0x65, 0x61, 0x6e, 0x52, 0x0e, 0x74, 0x61, 0x6b, 0x65, 0x6e, 0x41, 0x73, 0x4f, 0x72, 0x64,
0x65, 0x72, 0x65, 0x64, 0x1a, 0xb7, 0x02, 0x0a, 0x0a, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43,
0x6f, 0x64, 0x65, 0x12, 0x4a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72,
0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f,
0x64, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12,
0x2b, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72,
0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3c, 0x0a, 0x09,
0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32,
0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35,
0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52,
0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x72, 0xc0, 0x9f, 0xe3, 0xb6,
0x05, 0x01, 0x8a, 0xf9, 0x83, 0xb2, 0x05, 0x34, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x68,
0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x56, 0x61, 0x6c, 0x75,
0x65, 0x53, 0x65, 0x74, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d,
0x75, 0x73, 0x61, 0x67, 0x65, 0x2d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x9a, 0xb5, 0x8e, 0x93,
0x06, 0x2c, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x68, 0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67,
0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x75, 0x72, 0x65, 0x44,
0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x1a, 0xc4,
0x01, 0x0a, 0x0b, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x58, 0x12, 0x51,
0x0a, 0x10, 0x63, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x63, 0x65,
0x70, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43,
0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70, 0x74, 0x48, 0x00,
0x52, 0x0f, 0x63, 0x6f, 0x64, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x63, 0x65, 0x70,
0x74, 0x12, 0x50, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68,
0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x42, 0x10, 0xf2, 0xff, 0xfc, 0xc2, 0x06, 0x0a, 0x4d, 0x65, 0x64, 0x69,
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65,
0x6e, 0x63, 0x65, 0x3a, 0x06, 0xa0, 0x83, 0x83, 0xe8, 0x06, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x63,
0x68, 0x6f, 0x69, 0x63, 0x65, 0x1a, 0x93, 0x01, 0x0a, 0x0a, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74,
0x69, 0x76, 0x65, 0x58, 0x12, 0x3c, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61,
0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x48, 0x00, 0x52, 0x08, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69,
0x6d, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72,
0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x48,
0x00, 0x52, 0x06, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x3a, 0x06, 0xa0, 0x83, 0x83, 0xe8, 0x06,
0x01, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x3a, 0x43, 0xc0, 0x9f, 0xe3,
0xb6, 0x05, 0x03, 0xb2, 0xfe, 0xe4, 0x97, 0x06, 0x37, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f,
0x68, 0x6c, 0x37, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x53, 0x74, 0x72,
0x75, 0x63, 0x74, 0x75, 0x72, 0x65, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e,
0x2f, 0x4d, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65,
0x4a, 0x04, 0x08, 0x07, 0x10, 0x08, 0x42, 0x80, 0x01, 0x0a, 0x17, 0x63, 0x6f, 0x6d, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x66, 0x68, 0x69, 0x72, 0x2e, 0x72, 0x35, 0x2e, 0x63, 0x6f,
0x72, 0x65, 0x50, 0x01, 0x5a, 0x5d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69, 0x72, 0x2f, 0x67, 0x6f, 0x2f,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x66, 0x68, 0x69,
0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x35, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f,
0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x63, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x6f, 0x5f, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x98, 0xc6, 0xb0, 0xb5, 0x07, 0x05, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
}
var (
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescOnce sync.Once
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData = file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc
)
func file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescGZIP() []byte {
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescOnce.Do(func() {
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData)
})
return file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDescData
}
var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes = make([]protoimpl.MessageInfo, 4)
var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes = []interface{}{
(*MedicationUsage)(nil), // 0: google.fhir.r5.core.MedicationUsage
(*MedicationUsage_StatusCode)(nil), // 1: google.fhir.r5.core.MedicationUsage.StatusCode
(*MedicationUsage_MedicationX)(nil), // 2: google.fhir.r5.core.MedicationUsage.MedicationX
(*MedicationUsage_EffectiveX)(nil), // 3: google.fhir.r5.core.MedicationUsage.EffectiveX
(*datatypes_go_proto.Id)(nil), // 4: google.fhir.r5.core.Id
(*datatypes_go_proto.Meta)(nil), // 5: google.fhir.r5.core.Meta
(*datatypes_go_proto.Uri)(nil), // 6: google.fhir.r5.core.Uri
(*datatypes_go_proto.Code)(nil), // 7: google.fhir.r5.core.Code
(*datatypes_go_proto.Narrative)(nil), // 8: google.fhir.r5.core.Narrative
(*any.Any)(nil), // 9: google.protobuf.Any
(*datatypes_go_proto.Extension)(nil), // 10: google.fhir.r5.core.Extension
(*datatypes_go_proto.Identifier)(nil), // 11: google.fhir.r5.core.Identifier
(*datatypes_go_proto.Reference)(nil), // 12: google.fhir.r5.core.Reference
(*datatypes_go_proto.CodeableConcept)(nil), // 13: google.fhir.r5.core.CodeableConcept
(*datatypes_go_proto.DateTime)(nil), // 14: google.fhir.r5.core.DateTime
(*datatypes_go_proto.CodeableReference)(nil), // 15: google.fhir.r5.core.CodeableReference
(*datatypes_go_proto.Annotation)(nil), // 16: google.fhir.r5.core.Annotation
(*datatypes_go_proto.String)(nil), // 17: google.fhir.r5.core.String
(*datatypes_go_proto.Dosage)(nil), // 18: google.fhir.r5.core.Dosage
(*datatypes_go_proto.Boolean)(nil), // 19: google.fhir.r5.core.Boolean
(codes_go_proto.MedicationUsageStatusCode_Value)(0), // 20: google.fhir.r5.core.MedicationUsageStatusCode.Value
(*datatypes_go_proto.Period)(nil), // 21: google.fhir.r5.core.Period
}
var file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs = []int32{
4, // 0: google.fhir.r5.core.MedicationUsage.id:type_name -> google.fhir.r5.core.Id
5, // 1: google.fhir.r5.core.MedicationUsage.meta:type_name -> google.fhir.r5.core.Meta
6, // 2: google.fhir.r5.core.MedicationUsage.implicit_rules:type_name -> google.fhir.r5.core.Uri
7, // 3: google.fhir.r5.core.MedicationUsage.language:type_name -> google.fhir.r5.core.Code
8, // 4: google.fhir.r5.core.MedicationUsage.text:type_name -> google.fhir.r5.core.Narrative
9, // 5: google.fhir.r5.core.MedicationUsage.contained:type_name -> google.protobuf.Any
10, // 6: google.fhir.r5.core.MedicationUsage.extension:type_name -> google.fhir.r5.core.Extension
10, // 7: google.fhir.r5.core.MedicationUsage.modifier_extension:type_name -> google.fhir.r5.core.Extension
11, // 8: google.fhir.r5.core.MedicationUsage.identifier:type_name -> google.fhir.r5.core.Identifier
12, // 9: google.fhir.r5.core.MedicationUsage.based_on:type_name -> google.fhir.r5.core.Reference
12, // 10: google.fhir.r5.core.MedicationUsage.part_of:type_name -> google.fhir.r5.core.Reference
1, // 11: google.fhir.r5.core.MedicationUsage.status:type_name -> google.fhir.r5.core.MedicationUsage.StatusCode
13, // 12: google.fhir.r5.core.MedicationUsage.status_reason:type_name -> google.fhir.r5.core.CodeableConcept
13, // 13: google.fhir.r5.core.MedicationUsage.category:type_name -> google.fhir.r5.core.CodeableConcept
2, // 14: google.fhir.r5.core.MedicationUsage.medication:type_name -> google.fhir.r5.core.MedicationUsage.MedicationX
12, // 15: google.fhir.r5.core.MedicationUsage.subject:type_name -> google.fhir.r5.core.Reference
12, // 16: google.fhir.r5.core.MedicationUsage.encounter:type_name -> google.fhir.r5.core.Reference
3, // 17: google.fhir.r5.core.MedicationUsage.effective:type_name -> google.fhir.r5.core.MedicationUsage.EffectiveX
14, // 18: google.fhir.r5.core.MedicationUsage.date_asserted:type_name -> google.fhir.r5.core.DateTime
12, // 19: google.fhir.r5.core.MedicationUsage.information_source:type_name -> google.fhir.r5.core.Reference
12, // 20: google.fhir.r5.core.MedicationUsage.derived_from:type_name -> google.fhir.r5.core.Reference
15, // 21: google.fhir.r5.core.MedicationUsage.reason:type_name -> google.fhir.r5.core.CodeableReference
16, // 22: google.fhir.r5.core.MedicationUsage.note:type_name -> google.fhir.r5.core.Annotation
17, // 23: google.fhir.r5.core.MedicationUsage.rendered_dosage_instruction:type_name -> google.fhir.r5.core.String
18, // 24: google.fhir.r5.core.MedicationUsage.dosage:type_name -> google.fhir.r5.core.Dosage
19, // 25: google.fhir.r5.core.MedicationUsage.taken_as_ordered:type_name -> google.fhir.r5.core.Boolean
20, // 26: google.fhir.r5.core.MedicationUsage.StatusCode.value:type_name -> google.fhir.r5.core.MedicationUsageStatusCode.Value
17, // 27: google.fhir.r5.core.MedicationUsage.StatusCode.id:type_name -> google.fhir.r5.core.String
10, // 28: google.fhir.r5.core.MedicationUsage.StatusCode.extension:type_name -> google.fhir.r5.core.Extension
13, // 29: google.fhir.r5.core.MedicationUsage.MedicationX.codeable_concept:type_name -> google.fhir.r5.core.CodeableConcept
12, // 30: google.fhir.r5.core.MedicationUsage.MedicationX.reference:type_name -> google.fhir.r5.core.Reference
14, // 31: google.fhir.r5.core.MedicationUsage.EffectiveX.date_time:type_name -> google.fhir.r5.core.DateTime
21, // 32: google.fhir.r5.core.MedicationUsage.EffectiveX.period:type_name -> google.fhir.r5.core.Period
33, // [33:33] is the sub-list for method output_type
33, // [33:33] is the sub-list for method input_type
33, // [33:33] is the sub-list for extension type_name
33, // [33:33] is the sub-list for extension extendee
0, // [0:33] is the sub-list for field type_name
}
func init() { file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_init() }
func file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_init() {
if File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MedicationUsage); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MedicationUsage_StatusCode); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MedicationUsage_MedicationX); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MedicationUsage_EffectiveX); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[2].OneofWrappers = []interface{}{
(*MedicationUsage_MedicationX_CodeableConcept)(nil),
(*MedicationUsage_MedicationX_Reference)(nil),
}
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes[3].OneofWrappers = []interface{}{
(*MedicationUsage_EffectiveX_DateTime)(nil),
(*MedicationUsage_EffectiveX_Period)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc,
NumEnums: 0,
NumMessages: 4,
NumExtensions: 0,<|fim▁hole|> },
GoTypes: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes,
DependencyIndexes: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs,
MessageInfos: file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_msgTypes,
}.Build()
File_proto_google_fhir_proto_r5_core_resources_medication_usage_proto = out.File
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_rawDesc = nil
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_goTypes = nil
file_proto_google_fhir_proto_r5_core_resources_medication_usage_proto_depIdxs = nil
}<|fim▁end|>
|
NumServices: 0,
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from django.test.utils import override_settings
from rest_framework import status
from rest_framework.test import APIClient
from axes.signals import user_locked_out
import json
import time
from family_tree.models.family import Family
from family_tree.models.person import Person
from custom_user.models import User
@override_settings(SECURE_SSL_REDIRECT=False, AXES_BEHIND_REVERSE_PROXY=False)
class JWTAuthTest(TestCase):
'''
Tests JWT auth
'''
def setUp(self):
self.family = Family()
self.family.save()
self.user = User.objects.create_user(email='[email protected]',
password='compiler',
name='Grace Hopper',
family_id = self.family.id)
self.person = Person(name='Grace Hopper',
gender='F',
email='[email protected]',
family_id=self.family.id,
language='en',
user_id=self.user.id)
self.person.save()
def test_jwt_auth_and_refresh_token_created_on_correct_auth_details(self):
client = APIClient(HTTP_X_REAL_IP='127.0.0.1')
auth_details = {
'email': '[email protected]',
'password': 'compiler'
}
response = client.post('/api/auth/obtain_token/', auth_details, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
access_token = json.loads(response.content)["access"]
refresh_token = json.loads(response.content)["refresh"]
auth_token = {
'refresh': refresh_token
}
# Sleep to ensure new token is different
time.sleep(1)
refresh_response = client.post('/api/auth/refresh_token/', auth_token, format='json')
refresh_token = json.loads(refresh_response.content)["access"]
self.assertEqual(refresh_response.status_code, status.HTTP_200_OK)
self.assertNotEqual(refresh_token, access_token)
# Check verify token
new_auth_token ={#
'token': refresh_token
}
verify_new_token_response = client.post('/api/auth/verify_token/', new_auth_token, format='json')
self.assertEqual(verify_new_token_response.status_code, status.HTTP_200_OK)
<|fim▁hole|> # Check ip not locked
locked_response = client.get('/api/auth/is_locked/', format='json')
self.assertEqual(b'false', locked_response.content)
self.assertEqual(locked_response.status_code, status.HTTP_200_OK)
def test_jwt_fails_on_auth_incorrect_password(self):
client = APIClient(HTTP_X_REAL_IP='127.0.0.1')
payload = {
'email': '[email protected]',
'password': 'COBOL'
}
response = client.post('/api/auth/obtain_token/', payload, format='json')
self.assertNotEqual(response.status_code, status.HTTP_200_OK)
def test_verify_fails_on_invalid_token(self):
client = APIClient(HTTP_X_REAL_IP='127.0.0.1')
invalid_auth_token ={#
'token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWUsImp0aSI6IjM1ODU0ODc3LWQyZjQtNDIxZS04ZDI5LWY3YTgxNTk3NzdhYyIsImlhdCI6MTU1NDM4NzU4NCwiZXhwIjoxNTU0MzkxMTg0fQ.yIr0TMbalatx7alU1TMGIxxaelqquMJfz3m4H7AA9v4'
}
verify_old_token_response = client.post('/api/auth/verify_token/', invalid_auth_token, format='json')
self.assertNotEqual(verify_old_token_response.status_code, status.HTTP_200_OK)
def test_account_locks_out_on_multiple_invalid_login_attempts(self):
user = User.objects.create_user(email='[email protected]',
password='smalltalk',
name='Adele Goldberg',
family_id = self.family.id)
person = Person(name='Adele Goldberg',
gender='F',
email='[email protected]',
family_id=self.family.id,
language='en',
user_id=user.id)
person.save()
# 127.0.0.1 is whitelisted
client = APIClient(HTTP_X_REAL_IP='127.0.0.2')
wrong_auth_details = {
'email': '[email protected]',
'password': 'compiler'
}
for x in range(0, 6):
response = client.post('/api/auth/obtain_token/', wrong_auth_details, format='json')
correct_auth_details = {
'email': '[email protected]',
'password': 'smalltalk'
}
final_response = client.post('/api/auth/obtain_token/', correct_auth_details, format='json')
self.assertNotEqual(final_response.status_code, status.HTTP_200_OK)
# Check ip locked
locked_response = client.get('/api/auth/is_locked/', format='json')
self.assertNotEqual(b'false', locked_response.content)
def test_api_docs_loads(self):
client = APIClient(HTTP_X_REAL_IP='127.0.0.1')
client.force_authenticate(user=self.user)
response = client.get('/api/docs/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_api_schema_loads(self):
client = APIClient(HTTP_X_REAL_IP='127.0.0.1')
client.force_authenticate(user=self.user)
response = client.get('/api/schema/')
self.assertEqual(response.status_code, status.HTTP_200_OK)<|fim▁end|>
| |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::sync::Arc;
use cpython::*;
use cpython_ext::{ExtractInner, PyPath, PyPathBuf, ResultPyErrExt};
use edenapi::{Progress, ProgressCallback, ResponseMeta};
use edenapi_types::TreeAttributes;
use pyrevisionstore::{mutabledeltastore, mutablehistorystore};
use revisionstore::{HgIdMutableDeltaStore, HgIdMutableHistoryStore};
use types::{HgId, Key, RepoPathBuf};
pub fn to_path(py: Python, name: &PyPath) -> PyResult<RepoPathBuf> {
name.to_repo_path()
.map_pyerr(py)
.map(|path| path.to_owned())
}
pub fn to_hgid(py: Python, hgid: &PyBytes) -> HgId {
let mut bytes = [0u8; 20];
bytes.copy_from_slice(&hgid.data(py)[0..20]);
HgId::from(&bytes)
}
pub fn to_tree_attrs(py: Python, attrs: &PyDict) -> PyResult<TreeAttributes> {
let mut attributes = TreeAttributes::default();
attributes.manifest_blob = attrs
.get_item(py, "manifest_blob")
.map(|v| v.extract::<bool>(py))
.transpose()?
.unwrap_or(attributes.manifest_blob);
attributes.parents = attrs
.get_item(py, "parents")
.map(|v| v.extract::<bool>(py))
.transpose()?
.unwrap_or(attributes.parents);
attributes.child_metadata = attrs
.get_item(py, "child_metadata")
.map(|v| v.extract::<bool>(py))
.transpose()?
.unwrap_or(attributes.child_metadata);
Ok(attributes)
}
pub fn to_hgids(py: Python, hgids: impl IntoIterator<Item = PyBytes>) -> Vec<HgId> {
hgids.into_iter().map(|hgid| to_hgid(py, &hgid)).collect()
}
pub fn to_key(py: Python, path: &PyPath, hgid: &PyBytes) -> PyResult<Key> {
let hgid = to_hgid(py, hgid);
let path = to_path(py, path)?;
Ok(Key::new(path, hgid))
}
pub fn to_keys<'a>(
py: Python,
keys: impl IntoIterator<Item = &'a (PyPathBuf, PyBytes)>,
) -> PyResult<Vec<Key>> {
keys.into_iter()
.map(|(path, hgid)| to_key(py, path, hgid))
.collect()
}
pub fn wrap_callback(callback: PyObject) -> ProgressCallback {
Box::new(move |progress: Progress| {
let gil = Python::acquire_gil();
let py = gil.python();
let _ = callback.call(py, progress.as_tuple(), None);
})
}
pub fn as_deltastore(py: Python, store: PyObject) -> PyResult<Arc<dyn HgIdMutableDeltaStore>> {
Ok(store.extract::<mutabledeltastore>(py)?.extract_inner(py))
}
pub fn as_historystore(py: Python, store: PyObject) -> PyResult<Arc<dyn HgIdMutableHistoryStore>> {
Ok(store.extract::<mutablehistorystore>(py)?.extract_inner(py))
}
pub fn meta_to_dict(py: Python, meta: &ResponseMeta) -> PyResult<PyDict> {
let dict = PyDict::new(py);
dict.set_item(py, "version", format!("{:?}", &meta.version))?;
dict.set_item(py, "status", meta.status.as_u16())?;
dict.set_item(py, "server", &meta.server)?;
dict.set_item(py, "request_id", &meta.request_id)?;
dict.set_item(py, "tw_task_handle", &meta.tw_task_handle)?;
dict.set_item(py, "tw_task_version", &meta.tw_task_version)?;
dict.set_item(py, "tw_canary_id", &meta.tw_canary_id)?;
dict.set_item(py, "server_load", &meta.server_load)?;
dict.set_item(py, "content_length", &meta.content_length)?;<|fim▁hole|><|fim▁end|>
|
Ok(dict)
}
|
<|file_name|>RequisitionNewIndividualItemValidation.java<|end_file_name|><|fim▁begin|>/*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2017 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as<|fim▁hole|> * License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.purap.document.validation.impl;
import org.kuali.kfs.coreservice.framework.parameter.ParameterService;
import org.kuali.kfs.module.purap.PurapConstants;
import org.kuali.kfs.module.purap.PurapParameterConstants;
import org.kuali.kfs.module.purap.PurapRuleConstants;
import org.kuali.kfs.module.purap.document.RequisitionDocument;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent;
public class RequisitionNewIndividualItemValidation extends PurchasingNewIndividualItemValidation {
public boolean validate(AttributedDocumentEvent event) {
return super.validate(event);
}
@Override
protected boolean commodityCodeIsRequired() {
//if the ENABLE_COMMODITY_CODE_IND parameter is N then we don't
//need to check for the ITEMS_REQUIRE_COMMODITY_CODE_IND parameter anymore, just return false.
boolean enableCommodityCode = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean(PurapConstants.PURAP_NAMESPACE, "Document", PurapParameterConstants.ENABLE_COMMODITY_CODE_IND);
if (!enableCommodityCode) {
return false;
} else {
return super.getParameterService().getParameterValueAsBoolean(RequisitionDocument.class, PurapRuleConstants.ITEMS_REQUIRE_COMMODITY_CODE_IND);
}
}
}<|fim▁end|>
|
* published by the Free Software Foundation, either version 3 of the
|
<|file_name|>dbscan.rs<|end_file_name|><|fim▁begin|>use rm::linalg::Matrix;
use rm::learning::dbscan::DBSCAN;
use rm::learning::UnSupModel;
#[test]
fn test_basic_clusters() {
let inputs = Matrix::new(6, 2, vec![1.0, 2.0,
1.1, 2.2,
0.9, 1.9,
1.0, 2.1,
-2.0, 3.0,<|fim▁hole|> -2.2, 3.1]);
let mut model = DBSCAN::new(0.5, 2);
model.train(&inputs);
let clustering = model.clusters().unwrap();
assert!(clustering.data().iter().take(4).all(|x| *x == Some(0)));
assert!(clustering.data().iter().skip(4).all(|x| *x == Some(1)));
}
#[test]
fn test_basic_prediction() {
let inputs = Matrix::new(6, 2, vec![1.0, 2.0,
1.1, 2.2,
0.9, 1.9,
1.0, 2.1,
-2.0, 3.0,
-2.2, 3.1]);
let mut model = DBSCAN::new(0.5, 2);
model.set_predictive(true);
model.train(&inputs);
let new_points = Matrix::new(2,2, vec![1.0, 2.0, 4.0, 4.0]);
let classes = model.predict(&new_points);
assert!(classes[0] == Some(0));
assert!(classes[1] == None);
}<|fim▁end|>
| |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_private)]
#![feature(libc)]
extern crate libc;
extern crate rustc;
extern crate rustc_driver;
extern crate rustc_lint;
extern crate rustc_resolve;
extern crate syntax;
use std::ffi::{CStr, CString};
use std::mem::transmute;
use std::path::PathBuf;
use std::thread::Builder;
use rustc::ast_map;
use rustc::llvm;
use rustc::metadata::cstore::RequireDynamic;
use rustc::middle::ty;
use rustc::session::config::{self, basic_options, build_configuration, Input, Options};
use rustc::session::build_session;
use rustc_driver::driver;
use rustc_resolve::MakeGlobMap;
use libc::c_void;
use syntax::diagnostics::registry::Registry;
fn main() {
let program = r#"
#[no_mangle]
pub static TEST_STATIC: i32 = 42;
"#;
let program2 = r#"
#[no_mangle]
pub fn test_add(a: i32, b: i32) -> i32 { a + b }
"#;
<|fim▁hole|> let mut path = match std::env::args().nth(2) {
Some(path) => PathBuf::from(&path),
None => panic!("missing rustc path")
};
// Remove two segments from rustc path to get sysroot.
path.pop();
path.pop();
let mut ee = ExecutionEngine::new(program, path);
let test_static = match ee.get_global("TEST_STATIC") {
Some(g) => g as *const i32,
None => panic!("failed to get global")
};
assert_eq!(unsafe { *test_static }, 42);
ee.add_module(program2);
let test_add: fn(i32, i32) -> i32;
test_add = match ee.get_function("test_add") {
Some(f) => unsafe { transmute(f) },
None => panic!("failed to get function")
};
assert_eq!(test_add(1, 2), 3);
}
struct ExecutionEngine {
ee: llvm::ExecutionEngineRef,
modules: Vec<llvm::ModuleRef>,
sysroot: PathBuf,
}
impl ExecutionEngine {
pub fn new(program: &str, sysroot: PathBuf) -> ExecutionEngine {
let (llmod, deps) = compile_program(program, sysroot.clone())
.expect("failed to compile program");
let ee = unsafe { llvm::LLVMBuildExecutionEngine(llmod) };
if ee.is_null() {
panic!("Failed to create ExecutionEngine: {}", llvm_error());
}
let ee = ExecutionEngine{
ee: ee,
modules: vec![llmod],
sysroot: sysroot,
};
ee.load_deps(&deps);
ee
}
pub fn add_module(&mut self, program: &str) {
let (llmod, deps) = compile_program(program, self.sysroot.clone())
.expect("failed to compile program in add_module");
unsafe { llvm::LLVMExecutionEngineAddModule(self.ee, llmod); }
self.modules.push(llmod);
self.load_deps(&deps);
}
/// Returns a raw pointer to the named function.
pub fn get_function(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let fv = unsafe { llvm::LLVMGetNamedFunction(m, s.as_ptr()) };
if !fv.is_null() {
let fp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, fv) };
assert!(!fp.is_null());
return Some(fp);
}
}
None
}
/// Returns a raw pointer to the named global item.
pub fn get_global(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let gv = unsafe { llvm::LLVMGetNamedGlobal(m, s.as_ptr()) };
if !gv.is_null() {
let gp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, gv) };
assert!(!gp.is_null());
return Some(gp);
}
}
None
}
/// Loads all dependencies of compiled code.
/// Expects a series of paths to dynamic library files.
fn load_deps(&self, deps: &[PathBuf]) {
for path in deps {
let s = match path.as_os_str().to_str() {
Some(s) => s,
None => panic!(
"Could not convert crate path to UTF-8 string: {:?}", path)
};
let cs = CString::new(s).unwrap();
let res = unsafe { llvm::LLVMRustLoadDynamicLibrary(cs.as_ptr()) };
if res == 0 {
panic!("Failed to load crate {:?}: {}",
path.display(), llvm_error());
}
}
}
}
impl Drop for ExecutionEngine {
fn drop(&mut self) {
unsafe { llvm::LLVMDisposeExecutionEngine(self.ee) };
}
}
/// Returns last error from LLVM wrapper code.
fn llvm_error() -> String {
String::from_utf8_lossy(
unsafe { CStr::from_ptr(llvm::LLVMRustGetLastError()).to_bytes() })
.into_owned()
}
fn build_exec_options(sysroot: PathBuf) -> Options {
let mut opts = basic_options();
// librustc derives sysroot from the executable name.
// Since we are not rustc, we must specify it.
opts.maybe_sysroot = Some(sysroot);
// Prefer faster build time
opts.optimize = config::No;
// Don't require a `main` function
opts.crate_types = vec![config::CrateTypeDylib];
opts
}
/// Compiles input up to phase 4, translation to LLVM.
///
/// Returns the LLVM `ModuleRef` and a series of paths to dynamic libraries
/// for crates used in the given input.
fn compile_program(input: &str, sysroot: PathBuf)
-> Option<(llvm::ModuleRef, Vec<PathBuf>)> {
let input = Input::Str(input.to_string());
let thread = Builder::new().name("compile_program".to_string());
let handle = thread.spawn(move || {
let opts = build_exec_options(sysroot);
let sess = build_session(opts, None, Registry::new(&rustc::DIAGNOSTICS));
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let cfg = build_configuration(&sess);
let id = "input".to_string();
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let krate = driver::phase_2_configure_and_expand(&sess, krate, &id, None)
.expect("phase_2 returned `None`");
let mut forest = ast_map::Forest::new(krate);
let arenas = ty::CtxtArenas::new();
let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest);
driver::phase_3_run_analysis_passes(
sess, ast_map, &arenas, id, MakeGlobMap::No, |tcx, analysis| {
let trans = driver::phase_4_translate_to_llvm(tcx, analysis);
let crates = tcx.sess.cstore.get_used_crates(RequireDynamic);
// Collect crates used in the session.
// Reverse order finds dependencies first.
let deps = crates.into_iter().rev()
.filter_map(|(_, p)| p).collect();
assert_eq!(trans.modules.len(), 1);
let llmod = trans.modules[0].llmod;
// Workaround because raw pointers do not impl Send
let modp = llmod as usize;
(modp, deps)
}).1
}).unwrap();
match handle.join() {
Ok((llmod, deps)) => Some((llmod as llvm::ModuleRef, deps)),
Err(_) => None
}
}<|fim▁end|>
| |
<|file_name|>router.rs<|end_file_name|><|fim▁begin|>use regex::Regex;
use hyper::Method;
use hyper::server::Request;
use context::{Context, Params, ParamType};
type Handler = fn(Context) -> ::Response;
pub struct Router {
pub routes: Vec<Route>
}
impl Router {
pub fn new() -> Router {
Router {
routes: Vec::new()
}
}
pub fn get(&mut self, path: &str, handler: Handler) {
self.routes.push(
Route::new(Method::Get, path, handler)
);
}
}
#[derive(Clone)]
pub struct Route {
pub method: Method,
pub path: RoutePath,
pub handler: Handler
}
impl Route {
pub fn new(method: Method, path: &str, handler: Handler) -> Route {
Route {
method: method,
path: RoutePath::new(path),
handler: handler
}
}
pub fn matches_request(&self, request: &Request) -> Option<Params> {
if self.method != *request.method() {
return None;
}
return self.path.matches_path(request.path());
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum PathToken {
Str(String),
Var {
key: String,
datatype: String
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct RoutePath {
pub tokenized_path: Vec<PathToken>
}
impl RoutePath {
pub fn new(path: &str) -> Self {
let vec_path = RoutePath::tokenize_path(path);
RoutePath {
tokenized_path: vec_path
}
}
fn tokenize_path(path: &str) -> Vec<PathToken> {
let path = &path[1..]; // Remove root
let re = Regex::new(r"^\{([a-zA-Z_]+)\}$").unwrap();
let path_vec = path.split("/")
.map(|t| {<|fim▁hole|> if re.is_match(t) {
// Capture the variable name between {}
let cap = re.captures(t).unwrap();
// There should be only one, grab it as str
let key = cap.get(1).unwrap().as_str();
return PathToken::Var { key: String::from(key), datatype: String::from("string") }
}
PathToken::Str(String::from(t))
})
.collect::<Vec<PathToken>>();
path_vec
}
pub fn matches_path(&self, request_path: &str) -> Option<Params> {
// Remove /, split on /, into vec of Strings
let incoming_path = &request_path[1..].split("/").map(|i| {
String::from(i)
}).collect::<Vec<String>>();
// Both RoutePath and Request should have equal length tokenized paths
if self.tokenized_path.len() != incoming_path.len() {
return None;
}
// Save url params while processing
let mut params = Params::new();
for (index, token) in self.tokenized_path.iter().enumerate() {
match token {
&PathToken::Str(ref s) => {
if *s != incoming_path[index] {
return None
}
},
&PathToken::Var {ref key, ref datatype} => {
if datatype == "string" {
params.insert(
key.to_string(),
ParamType::Str(incoming_path[index].to_string())
);
}
}
}
}
Some(params)
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use hyper::{Request, Response, Method, Uri};
use super::{Route, RoutePath, PathToken, Context};
fn generic_handler(_context: Context) -> Response {
return Response::new();
}
#[test]
fn routepath_for_root() {
let routepath = RoutePath::new("/");
assert_eq!(routepath.tokenized_path, vec![PathToken::Str(String::from(""))]);
}
#[test]
fn routepath_for_user_profile() {
let routepath = RoutePath::new("/user/profile");
assert_eq!(routepath.tokenized_path, vec![
PathToken::Str(String::from("user")),
PathToken::Str(String::from("profile"))
]
)
}
#[test]
fn route_should_be_matched() {
let route = Route::new(Method::Get, "/monkeys", generic_handler);
let path = Uri::from_str("http://example.com/monkeys").unwrap();
let request: Request = Request::new(Method::Get, path);
assert!(route.matches_request(&request).is_some());
}
#[test]
fn route_should_not_be_matched() {
let route = Route::new(Method::Get, "/monkeys", generic_handler);
let path = Uri::from_str("http://example.com/nomatch").unwrap();
let request: Request = Request::new(Method::Get, path);
assert!(route.matches_request(&request).is_none());
}
#[test]
fn route_should_match_with_variables() {
let route = Route::new(Method::Get, "/user/{username}", generic_handler);
let path = Uri::from_str("http://example.com/user/johndoe").unwrap();
let request: Request = Request::new(Method::Get, path);
assert!(route.path.matches_path(request.path()).is_some());
assert!(route.matches_request(&request).is_some());
}
}<|fim▁end|>
| |
<|file_name|>test_local_file.py<|end_file_name|><|fim▁begin|>"""The tests for local file camera component."""
import asyncio
from unittest import mock
# Using third party package because of a bug reading binary data in Python 3.4
# https://bugs.python.org/issue23004
from mock_open import MockOpen
from homeassistant.bootstrap import setup_component
from tests.common import mock_http_component
import logging
@asyncio.coroutine
def test_loading_file(hass, test_client):
"""Test that it loads image from disk."""
@mock.patch('os.path.isfile', mock.Mock(return_value=True))
@mock.patch('os.access', mock.Mock(return_value=True))
def setup_platform():
"""Setup platform inside callback."""
assert setup_component(hass, 'camera', {
'camera': {
'name': 'config_test',
'platform': 'local_file',
'file_path': 'mock.file',
}})
yield from hass.loop.run_in_executor(None, setup_platform)
client = yield from test_client(hass.http.app)
m_open = MockOpen(read_data=b'hello')
with mock.patch(
'homeassistant.components.camera.local_file.open',
m_open, create=True
):
resp = yield from client.get('/api/camera_proxy/camera.config_test')
assert resp.status == 200
body = yield from resp.text()
assert body == 'hello'
@asyncio.coroutine
def test_file_not_readable(hass, caplog):
"""Test a warning is shown setup when file is not readable."""
mock_http_component(hass)
@mock.patch('os.path.isfile', mock.Mock(return_value=True))
@mock.patch('os.access', mock.Mock(return_value=False))
def run_test():
caplog.set_level(
logging.WARNING, logger='requests.packages.urllib3.connectionpool')
assert setup_component(hass, 'camera', {
'camera': {
'name': 'config_test',
'platform': 'local_file',
'file_path': 'mock.file',<|fim▁hole|> assert 'Could not read' in caplog.text
assert 'config_test' in caplog.text
assert 'mock.file' in caplog.text
yield from hass.loop.run_in_executor(None, run_test)<|fim▁end|>
|
}})
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![crate_name = "webdriver_server"]
#![crate_type = "rlib"]
#![deny(unsafe_code)]
extern crate base64;
extern crate cookie as cookie_rs;
extern crate euclid;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate regex;
extern crate rustc_serialize;
extern crate script_traits;
extern crate servo_config;
extern crate servo_url;
extern crate uuid;
extern crate webdriver;
mod keys;
use euclid::Size2D;
use hyper::method::Method::{self, Post};
use image::{DynamicImage, ImageFormat, RgbImage};
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use keys::keycodes_to_keys;
use msg::constellation_msg::{BrowsingContextId, TopLevelBrowsingContextId, TraversalDirection};
use net_traits::image::base::PixelFormat;
use regex::Captures;
use rustc_serialize::json::{Json, ToJson};
use script_traits::{ConstellationMsg, LoadData, WebDriverCommandMsg};
use script_traits::webdriver_msg::{LoadStatus, WebDriverCookieError, WebDriverFrameId};
use script_traits::webdriver_msg::{WebDriverJSError, WebDriverJSResult, WebDriverScriptCommand};
use servo_config::prefs::{PREFS, PrefValue};
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::collections::BTreeMap;
use std::net::{SocketAddr, SocketAddrV4};
use std::sync::mpsc::Sender;
use std::thread;
use std::time::Duration;
use uuid::Uuid;
use webdriver::command::{AddCookieParameters, GetParameters, JavascriptCommandParameters};
use webdriver::command::{LocatorParameters, Parameters};
use webdriver::command::{SendKeysParameters, SwitchToFrameParameters, TimeoutsParameters};
use webdriver::command::{WebDriverCommand, WebDriverExtensionCommand, WebDriverMessage};
use webdriver::command::WindowSizeParameters;
use webdriver::common::{Date, LocatorStrategy, Nullable, WebElement};
use webdriver::error::{ErrorStatus, WebDriverError, WebDriverResult};
use webdriver::httpapi::WebDriverExtensionRoute;
use webdriver::response::{Cookie, CookieResponse};
use webdriver::response::{ElementRectResponse, NewSessionResponse, ValueResponse};
use webdriver::response::{WebDriverResponse, WindowSizeResponse};
use webdriver::server::{self, Session, WebDriverHandler};
fn extension_routes() -> Vec<(Method, &'static str, ServoExtensionRoute)> {
return vec![(Post, "/session/{sessionId}/servo/prefs/get", ServoExtensionRoute::GetPrefs),
(Post, "/session/{sessionId}/servo/prefs/set", ServoExtensionRoute::SetPrefs),
(Post, "/session/{sessionId}/servo/prefs/reset", ServoExtensionRoute::ResetPrefs)]
}
fn cookie_msg_to_cookie(cookie: cookie_rs::Cookie) -> Cookie {
Cookie {
name: cookie.name().to_owned(),
value: cookie.value().to_owned(),
path: match cookie.path() {
Some(path) => Nullable::Value(path.to_string()),
None => Nullable::Null
},
domain: match cookie.domain() {
Some(domain) => Nullable::Value(domain.to_string()),
None => Nullable::Null
},
expiry: match cookie.expires() {
Some(time) => Nullable::Value(Date::new(time.to_timespec().sec as u64)),
None => Nullable::Null
},
secure: cookie.secure(),
httpOnly: cookie.http_only(),
}
}
pub fn start_server(port: u16, constellation_chan: Sender<ConstellationMsg>) {
let handler = Handler::new(constellation_chan);
thread::Builder::new().name("WebdriverHttpServer".to_owned()).spawn(move || {
let address = SocketAddrV4::new("0.0.0.0".parse().unwrap(), port);
match server::start(SocketAddr::V4(address), handler, &extension_routes()) {
Ok(listening) => info!("WebDriver server listening on {}", listening.socket),
Err(_) => panic!("Unable to start WebDriver HTTPD server"),
}
}).expect("Thread spawning failed");
}
/// Represents the current WebDriver session and holds relevant session state.
struct WebDriverSession {
id: Uuid,
browsing_context_id: BrowsingContextId,
top_level_browsing_context_id: TopLevelBrowsingContextId,
/// Time to wait for injected scripts to run before interrupting them. A [`None`] value
/// specifies that the script should run indefinitely.
script_timeout: Option<u64>,
/// Time to wait for a page to finish loading upon navigation.
load_timeout: Option<u64>,
/// Time to wait for the element location strategy when retrieving elements, and when
/// waiting for an element to become interactable.
implicit_wait_timeout: Option<u64>,
}
impl WebDriverSession {
pub fn new(browsing_context_id: BrowsingContextId,
top_level_browsing_context_id: TopLevelBrowsingContextId)
-> WebDriverSession
{
WebDriverSession {
id: Uuid::new_v4(),
browsing_context_id: browsing_context_id,
top_level_browsing_context_id: top_level_browsing_context_id,
script_timeout: Some(30_000),
load_timeout: Some(300_000),
implicit_wait_timeout: Some(0),
}
}
}
struct Handler {
session: Option<WebDriverSession>,
constellation_chan: Sender<ConstellationMsg>,
resize_timeout: u32,
}
#[derive(Clone, Copy, PartialEq)]
enum ServoExtensionRoute {
GetPrefs,
SetPrefs,
ResetPrefs,
}
impl WebDriverExtensionRoute for ServoExtensionRoute {
type Command = ServoExtensionCommand;
fn command(&self,
_captures: &Captures,
body_data: &Json) -> WebDriverResult<WebDriverCommand<ServoExtensionCommand>> {
let command = match *self {
ServoExtensionRoute::GetPrefs => {
let parameters: GetPrefsParameters = try!(Parameters::from_json(&body_data));
ServoExtensionCommand::GetPrefs(parameters)
}
ServoExtensionRoute::SetPrefs => {
let parameters: SetPrefsParameters = try!(Parameters::from_json(&body_data));
ServoExtensionCommand::SetPrefs(parameters)
}
ServoExtensionRoute::ResetPrefs => {
let parameters: GetPrefsParameters = try!(Parameters::from_json(&body_data));
ServoExtensionCommand::ResetPrefs(parameters)
}
};
Ok(WebDriverCommand::Extension(command))
}
}
#[derive(Clone, PartialEq)]
enum ServoExtensionCommand {
GetPrefs(GetPrefsParameters),
SetPrefs(SetPrefsParameters),
ResetPrefs(GetPrefsParameters),
}
impl WebDriverExtensionCommand for ServoExtensionCommand {
fn parameters_json(&self) -> Option<Json> {
match *self {
ServoExtensionCommand::GetPrefs(ref x) => Some(x.to_json()),
ServoExtensionCommand::SetPrefs(ref x) => Some(x.to_json()),
ServoExtensionCommand::ResetPrefs(ref x) => Some(x.to_json()),
}
}
}
#[derive(Clone, PartialEq)]
struct GetPrefsParameters {
prefs: Vec<String>
}
impl Parameters for GetPrefsParameters {
fn from_json(body: &Json) -> WebDriverResult<GetPrefsParameters> {
let data = try!(body.as_object().ok_or(
WebDriverError::new(ErrorStatus::InvalidArgument,
"Message body was not an object")));
let prefs_value = try!(data.get("prefs").ok_or(
WebDriverError::new(ErrorStatus::InvalidArgument,
"Missing prefs key")));
let items = try!(prefs_value.as_array().ok_or(
WebDriverError::new(
ErrorStatus::InvalidArgument,
"prefs was not an array")));
let params = try!(items.iter().map(|x| x.as_string().map(|y| y.to_owned()).ok_or(
WebDriverError::new(ErrorStatus::InvalidArgument,
"Pref is not a string"))).collect::<Result<Vec<_>, _>>());
Ok(GetPrefsParameters {
prefs: params
})
}
}
impl ToJson for GetPrefsParameters {
fn to_json(&self) -> Json {
let mut data = BTreeMap::new();
data.insert("prefs".to_owned(), self.prefs.to_json());
Json::Object(data)
}
}
#[derive(Clone, PartialEq)]
struct SetPrefsParameters {
prefs: Vec<(String, PrefValue)>
}
impl Parameters for SetPrefsParameters {
fn from_json(body: &Json) -> WebDriverResult<SetPrefsParameters> {
let data = try!(body.as_object().ok_or(
WebDriverError::new(ErrorStatus::InvalidArgument,
"Message body was not an object")));
let items = try!(try!(data.get("prefs").ok_or(
WebDriverError::new(ErrorStatus::InvalidArgument,
"Missing prefs key"))).as_object().ok_or(
WebDriverError::new(
ErrorStatus::InvalidArgument,
"prefs was not an array")));
let mut params = Vec::with_capacity(items.len());
for (name, val) in items.iter() {
let value = try!(PrefValue::from_json(val.clone()).or(
Err(WebDriverError::new(ErrorStatus::InvalidArgument,
"Pref is not a boolean or string"))));
let key = name.to_owned();
params.push((key, value));
}
Ok(SetPrefsParameters {
prefs: params
})
}
}
impl ToJson for SetPrefsParameters {
fn to_json(&self) -> Json {
let mut data = BTreeMap::new();
data.insert("prefs".to_owned(), self.prefs.to_json());
Json::Object(data)
}
}
impl Handler {
pub fn new(constellation_chan: Sender<ConstellationMsg>) -> Handler {
Handler {
session: None,
constellation_chan: constellation_chan,
resize_timeout: 500,
}
}
fn focus_top_level_browsing_context_id(&self) -> WebDriverResult<TopLevelBrowsingContextId> {
debug!("Getting focused context.");
let interval = 20;
let iterations = 30_000 / interval;
let (sender, receiver) = ipc::channel().unwrap();
for _ in 0..iterations {
let msg = ConstellationMsg::GetFocusTopLevelBrowsingContext(sender.clone());
self.constellation_chan.send(msg).unwrap();
// Wait until the document is ready before returning the top-level browsing context id.
if let Some(x) = receiver.recv().unwrap() {
debug!("Focused context is {}", x);
return Ok(x);
}
thread::sleep(Duration::from_millis(interval));
}
debug!("Timed out getting focused context.");
Err(WebDriverError::new(ErrorStatus::Timeout,
"Failed to get window handle"))
}
fn session(&self) -> WebDriverResult<&WebDriverSession> {
match self.session {
Some(ref x) => Ok(x),
None => Err(WebDriverError::new(ErrorStatus::SessionNotCreated,
"Session not created"))
}
}
fn session_mut(&mut self) -> WebDriverResult<&mut WebDriverSession> {
match self.session {
Some(ref mut x) => Ok(x),
None => Err(WebDriverError::new(ErrorStatus::SessionNotCreated,
"Session not created"))
}
}
fn handle_new_session(&mut self) -> WebDriverResult<WebDriverResponse> {
debug!("new session");
if self.session.is_none() {
let top_level_browsing_context_id = self.focus_top_level_browsing_context_id()?;
let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id);
let session = WebDriverSession::new(browsing_context_id, top_level_browsing_context_id);
let mut capabilities = BTreeMap::new();
capabilities.insert("browserName".to_owned(), "servo".to_json());
capabilities.insert("browserVersion".to_owned(), "0.0.1".to_json());
capabilities.insert("acceptInsecureCerts".to_owned(), false.to_json());
let response = NewSessionResponse::new(session.id.to_string(), Json::Object(capabilities));
debug!("new session created {}.", session.id);
self.session = Some(session);
Ok(WebDriverResponse::NewSession(response))
} else {
debug!("new session failed.");
Err(WebDriverError::new(ErrorStatus::UnknownError,
"Session already created"))
}
}
fn handle_delete_session(&mut self) -> WebDriverResult<WebDriverResponse> {
self.session = None;
Ok(WebDriverResponse::Void)
}
fn browsing_context_script_command(&self, cmd_msg: WebDriverScriptCommand) -> WebDriverResult<()> {
let browsing_context_id = self.session()?.browsing_context_id;
let msg = ConstellationMsg::WebDriverCommand(WebDriverCommandMsg::ScriptCommand(browsing_context_id, cmd_msg));
self.constellation_chan.send(msg).unwrap();
Ok(())
}
fn top_level_script_command(&self, cmd_msg: WebDriverScriptCommand) -> WebDriverResult<()> {
let browsing_context_id = BrowsingContextId::from(self.session()?.top_level_browsing_context_id);
let msg = ConstellationMsg::WebDriverCommand(WebDriverCommandMsg::ScriptCommand(browsing_context_id, cmd_msg));
self.constellation_chan.send(msg).unwrap();
Ok(())
}
fn handle_get(&self, parameters: &GetParameters) -> WebDriverResult<WebDriverResponse> {
let url = match ServoUrl::parse(¶meters.url[..]) {
Ok(url) => url,
Err(_) => return Err(WebDriverError::new(ErrorStatus::InvalidArgument,
"Invalid URL"))
};
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let (sender, receiver) = ipc::channel().unwrap();
let load_data = LoadData::new(url, None, None, None);
let cmd_msg = WebDriverCommandMsg::LoadUrl(top_level_browsing_context_id, load_data, sender.clone());
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
self.wait_for_load(sender, receiver)
}
fn wait_for_load(&self,
sender: IpcSender<LoadStatus>,
receiver: IpcReceiver<LoadStatus>)
-> WebDriverResult<WebDriverResponse> {
let timeout = self.session()?.load_timeout;
thread::spawn(move || {
thread::sleep(Duration::from_millis(timeout.unwrap()));
let _ = sender.send(LoadStatus::LoadTimeout);
});
// wait to get a load event
match receiver.recv().unwrap() {
LoadStatus::LoadComplete => Ok(WebDriverResponse::Void),
LoadStatus::LoadTimeout => {
Err(WebDriverError::new(ErrorStatus::Timeout, "Load timed out"))
}
}
}
fn handle_current_url(&self) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
self.top_level_script_command(WebDriverScriptCommand::GetUrl(sender))?;
let url = receiver.recv().unwrap();
Ok(WebDriverResponse::Generic(ValueResponse::new(url.as_str().to_json())))
}
fn handle_window_size(&self) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let cmd_msg = WebDriverCommandMsg::GetWindowSize(top_level_browsing_context_id, sender);
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
let window_size = receiver.recv().unwrap();
let vp = window_size.initial_viewport;
let window_size_response = WindowSizeResponse::new(vp.width as u64, vp.height as u64);
Ok(WebDriverResponse::WindowSize(window_size_response))
}
fn handle_set_window_size(&self, params: &WindowSizeParameters) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let size = Size2D::new(params.width as u32, params.height as u32);
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let cmd_msg = WebDriverCommandMsg::SetWindowSize(top_level_browsing_context_id, size, sender.clone());
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
let timeout = self.resize_timeout;
let constellation_chan = self.constellation_chan.clone();
thread::spawn(move || {
// On timeout, we send a GetWindowSize message to the constellation,
// which will give the current window size.
thread::sleep(Duration::from_millis(timeout as u64));
let cmd_msg = WebDriverCommandMsg::GetWindowSize(top_level_browsing_context_id, sender);
constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
});
let window_size = receiver.recv().unwrap();
let vp = window_size.initial_viewport;
let window_size_response = WindowSizeResponse::new(vp.width as u64, vp.height as u64);
Ok(WebDriverResponse::WindowSize(window_size_response))
}
fn handle_is_enabled(&self, element: &WebElement) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
self.top_level_script_command(WebDriverScriptCommand::IsEnabled(element.id.clone(), sender))?;
match receiver.recv().unwrap() {
Ok(is_enabled) => Ok(WebDriverResponse::Generic(ValueResponse::new(is_enabled.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference, "Element not found"))
}
}
fn handle_is_selected(&self, element: &WebElement) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
self.top_level_script_command(WebDriverScriptCommand::IsSelected(element.id.clone(), sender))?;
match receiver.recv().unwrap() {
Ok(is_selected) => Ok(WebDriverResponse::Generic(ValueResponse::new(is_selected.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference, "Element not found"))
}
}
fn handle_go_back(&self) -> WebDriverResult<WebDriverResponse> {
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let direction = TraversalDirection::Back(1);
let msg = ConstellationMsg::TraverseHistory(top_level_browsing_context_id, direction);
self.constellation_chan.send(msg).unwrap();
Ok(WebDriverResponse::Void)
}
fn handle_go_forward(&self) -> WebDriverResult<WebDriverResponse> {
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let direction = TraversalDirection::Forward(1);
let msg = ConstellationMsg::TraverseHistory(top_level_browsing_context_id, direction);
self.constellation_chan.send(msg).unwrap();
Ok(WebDriverResponse::Void)
}
fn handle_refresh(&self) -> WebDriverResult<WebDriverResponse> {
let top_level_browsing_context_id = self.session()?.top_level_browsing_context_id;
let (sender, receiver) = ipc::channel().unwrap();
let cmd_msg = WebDriverCommandMsg::Refresh(top_level_browsing_context_id, sender.clone());
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
self.wait_for_load(sender, receiver)
}
fn handle_title(&self) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
self.top_level_script_command(WebDriverScriptCommand::GetTitle(sender))?;
let value = receiver.recv().unwrap();
Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json())))
}
fn handle_window_handle(&self) -> WebDriverResult<WebDriverResponse> {
// For now we assume there's only one window so just use the session
// id as the window id
let handle = self.session.as_ref().unwrap().id.to_string();
Ok(WebDriverResponse::Generic(ValueResponse::new(handle.to_json())))
}
fn handle_window_handles(&self) -> WebDriverResult<WebDriverResponse> {
// For now we assume there's only one window so just use the session
// id as the window id
let handles = vec![self.session.as_ref().unwrap().id.to_string().to_json()];
Ok(WebDriverResponse::Generic(ValueResponse::new(handles.to_json())))
}
fn handle_find_element(&self, parameters: &LocatorParameters) -> WebDriverResult<WebDriverResponse> {
if parameters.using != LocatorStrategy::CSSSelector {
return Err(WebDriverError::new(ErrorStatus::UnsupportedOperation,
"Unsupported locator strategy"))
}
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::FindElementCSS(parameters.value.clone(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => {
let value_resp = value.map(|x| WebElement::new(x).to_json()).to_json();
Ok(WebDriverResponse::Generic(ValueResponse::new(value_resp)))
}
Err(_) => Err(WebDriverError::new(ErrorStatus::InvalidSelector,
"Invalid selector"))
}
}
fn handle_switch_to_frame(&mut self, parameters: &SwitchToFrameParameters) -> WebDriverResult<WebDriverResponse> {
use webdriver::common::FrameId;
let frame_id = match parameters.id {
FrameId::Null => {
let session = self.session_mut()?;
session.browsing_context_id = BrowsingContextId::from(session.top_level_browsing_context_id);
return Ok(WebDriverResponse::Void);
},
FrameId::Short(ref x) => WebDriverFrameId::Short(*x),
FrameId::Element(ref x) => WebDriverFrameId::Element(x.id.clone())
};
self.switch_to_frame(frame_id)
}
fn handle_switch_to_parent_frame(&mut self) -> WebDriverResult<WebDriverResponse> {
self.switch_to_frame(WebDriverFrameId::Parent)
}
fn switch_to_frame(&mut self, frame_id: WebDriverFrameId) -> WebDriverResult<WebDriverResponse> {
if let WebDriverFrameId::Short(_) = frame_id {
return Err(WebDriverError::new(ErrorStatus::UnsupportedOperation,
"Selecting frame by id not supported"));
}
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetBrowsingContextId(frame_id, sender);
self.browsing_context_script_command(cmd)?;
let browsing_context_id = receiver.recv().unwrap()<|fim▁hole|>
self.session_mut()?.browsing_context_id = browsing_context_id;
Ok(WebDriverResponse::Void)
}
fn handle_find_elements(&self, parameters: &LocatorParameters) -> WebDriverResult<WebDriverResponse> {
if parameters.using != LocatorStrategy::CSSSelector {
return Err(WebDriverError::new(ErrorStatus::UnsupportedOperation,
"Unsupported locator strategy"))
}
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::FindElementsCSS(parameters.value.clone(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => {
let resp_value: Vec<Json> = value.into_iter().map(
|x| WebElement::new(x).to_json()).collect();
Ok(WebDriverResponse::Generic(ValueResponse::new(resp_value.to_json())))
}
Err(_) => Err(WebDriverError::new(ErrorStatus::InvalidSelector,
"Invalid selector"))
}
}
// https://w3c.github.io/webdriver/webdriver-spec.html#get-element-rect
fn handle_element_rect(&self, element: &WebElement) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetElementRect(element.id.clone(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(rect) => {
let response = ElementRectResponse::new(rect.origin.x, rect.origin.y,
rect.size.width, rect.size.height);
Ok(WebDriverResponse::ElementRect(response))
},
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference,
"Unable to find element in document"))
}
}
fn handle_element_text(&self, element: &WebElement) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetElementText(element.id.clone(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference,
"Unable to find element in document"))
}
}
fn handle_active_element(&self) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetActiveElement(sender);
self.browsing_context_script_command(cmd)?;
let value = receiver.recv().unwrap().map(|x| WebElement::new(x).to_json());
Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json())))
}
fn handle_element_tag_name(&self, element: &WebElement) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetElementTagName(element.id.clone(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference,
"Unable to find element in document"))
}
}
fn handle_element_attribute(&self, element: &WebElement, name: &str) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetElementAttribute(element.id.clone(), name.to_owned(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference,
"Unable to find element in document"))
}
}
fn handle_element_css(&self, element: &WebElement, name: &str) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetElementCSS(element.id.clone(), name.to_owned(), sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(value) => Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json()))),
Err(_) => Err(WebDriverError::new(ErrorStatus::StaleElementReference,
"Unable to find element in document"))
}
}
fn handle_get_cookies(&self) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetCookies(sender);
self.browsing_context_script_command(cmd)?;
let cookies = receiver.recv().unwrap();
let response = cookies.into_iter().map(|cookie| {
cookie_msg_to_cookie(cookie.into_inner())
}).collect::<Vec<Cookie>>();
Ok(WebDriverResponse::Cookie(CookieResponse::new(response)))
}
fn handle_get_cookie(&self, name: &str) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::GetCookie(name.to_owned(), sender);
self.browsing_context_script_command(cmd)?;
let cookies = receiver.recv().unwrap();
let response = cookies.into_iter().map(|cookie| {
cookie_msg_to_cookie(cookie.into_inner())
}).collect::<Vec<Cookie>>();
Ok(WebDriverResponse::Cookie(CookieResponse::new(response)))
}
fn handle_add_cookie(&self, params: &AddCookieParameters) -> WebDriverResult<WebDriverResponse> {
let (sender, receiver) = ipc::channel().unwrap();
let cookie = cookie_rs::Cookie::build(params.name.to_owned(), params.value.to_owned())
.secure(params.secure)
.http_only(params.httpOnly);
let cookie = match params.domain {
Nullable::Value(ref domain) => cookie.domain(domain.to_owned()),
_ => cookie,
};
let cookie = match params.path {
Nullable::Value(ref path) => cookie.path(path.to_owned()).finish(),
_ => cookie.finish(),
};
let cmd = WebDriverScriptCommand::AddCookie(cookie, sender);
self.browsing_context_script_command(cmd)?;
match receiver.recv().unwrap() {
Ok(_) => Ok(WebDriverResponse::Void),
Err(response) => match response {
WebDriverCookieError::InvalidDomain => Err(WebDriverError::new(ErrorStatus::InvalidCookieDomain,
"Invalid cookie domain")),
WebDriverCookieError::UnableToSetCookie => Err(WebDriverError::new(ErrorStatus::UnableToSetCookie,
"Unable to set cookie"))
}
}
}
fn handle_set_timeouts(&mut self,
parameters: &TimeoutsParameters)
-> WebDriverResult<WebDriverResponse> {
let mut session = try!(self.session
.as_mut()
.ok_or(WebDriverError::new(ErrorStatus::SessionNotCreated, "")));
session.script_timeout = parameters.script;
session.load_timeout = parameters.page_load;
session.implicit_wait_timeout = parameters.implicit;
Ok(WebDriverResponse::Void)
}
fn handle_execute_script(&self, parameters: &JavascriptCommandParameters)
-> WebDriverResult<WebDriverResponse> {
let func_body = ¶meters.script;
let args_string = "";
// This is pretty ugly; we really want something that acts like
// new Function() and then takes the resulting function and executes
// it with a vec of arguments.
let script = format!("(function() {{ {} }})({})", func_body, args_string);
let (sender, receiver) = ipc::channel().unwrap();
let command = WebDriverScriptCommand::ExecuteScript(script, sender);
self.browsing_context_script_command(command)?;
let result = receiver.recv().unwrap();
self.postprocess_js_result(result)
}
fn handle_execute_async_script(&self,
parameters: &JavascriptCommandParameters)
-> WebDriverResult<WebDriverResponse> {
let func_body = ¶meters.script;
let args_string = "window.webdriverCallback";
let script = match self.session()?.script_timeout {
Some(timeout) => {
format!("setTimeout(webdriverTimeout, {}); (function(callback) {{ {} }})({})",
timeout,
func_body,
args_string)
}
None => format!("(function(callback) {{ {} }})({})", func_body, args_string),
};
let (sender, receiver) = ipc::channel().unwrap();
let command = WebDriverScriptCommand::ExecuteAsyncScript(script, sender);
self.browsing_context_script_command(command)?;
let result = receiver.recv().unwrap();
self.postprocess_js_result(result)
}
fn postprocess_js_result(&self, result: WebDriverJSResult) -> WebDriverResult<WebDriverResponse> {
match result {
Ok(value) => Ok(WebDriverResponse::Generic(ValueResponse::new(value.to_json()))),
Err(WebDriverJSError::Timeout) => Err(WebDriverError::new(ErrorStatus::Timeout, "")),
Err(WebDriverJSError::UnknownType) => Err(WebDriverError::new(
ErrorStatus::UnsupportedOperation, "Unsupported return type")),
Err(WebDriverJSError::BrowsingContextNotFound) => Err(WebDriverError::new(
ErrorStatus::JavascriptError, "Pipeline id not found in browsing context"))
}
}
fn handle_element_send_keys(&self,
element: &WebElement,
keys: &SendKeysParameters) -> WebDriverResult<WebDriverResponse> {
let browsing_context_id = self.session()?.browsing_context_id;
let (sender, receiver) = ipc::channel().unwrap();
let cmd = WebDriverScriptCommand::FocusElement(element.id.clone(), sender);
let cmd_msg = WebDriverCommandMsg::ScriptCommand(browsing_context_id, cmd);
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
// TODO: distinguish the not found and not focusable cases
try!(receiver.recv().unwrap().or_else(|_| Err(WebDriverError::new(
ErrorStatus::StaleElementReference, "Element not found or not focusable"))));
let keys = try!(keycodes_to_keys(&keys.value).or_else(|_|
Err(WebDriverError::new(ErrorStatus::UnsupportedOperation, "Failed to convert keycodes"))));
// TODO: there's a race condition caused by the focus command and the
// send keys command being two separate messages,
// so the constellation may have changed state between them.
let cmd_msg = WebDriverCommandMsg::SendKeys(browsing_context_id, keys);
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
Ok(WebDriverResponse::Void)
}
fn handle_take_screenshot(&self) -> WebDriverResult<WebDriverResponse> {
let mut img = None;
let top_level_id = self.session()?.top_level_browsing_context_id;
let interval = 1000;
let iterations = 30_000 / interval;
for _ in 0..iterations {
let (sender, receiver) = ipc::channel().unwrap();
let cmd_msg = WebDriverCommandMsg::TakeScreenshot(top_level_id, sender);
self.constellation_chan.send(ConstellationMsg::WebDriverCommand(cmd_msg)).unwrap();
if let Some(x) = receiver.recv().unwrap() {
img = Some(x);
break;
};
thread::sleep(Duration::from_millis(interval))
}
let img = match img {
Some(img) => img,
None => return Err(WebDriverError::new(ErrorStatus::Timeout,
"Taking screenshot timed out")),
};
// The compositor always sends RGB pixels.
assert!(img.format == PixelFormat::RGB8, "Unexpected screenshot pixel format");
let rgb = RgbImage::from_raw(img.width, img.height, img.bytes.to_vec()).unwrap();
let mut png_data = Vec::new();
DynamicImage::ImageRgb8(rgb).save(&mut png_data, ImageFormat::PNG).unwrap();
let encoded = base64::encode(&png_data);
Ok(WebDriverResponse::Generic(ValueResponse::new(encoded.to_json())))
}
fn handle_get_prefs(&self,
parameters: &GetPrefsParameters) -> WebDriverResult<WebDriverResponse> {
let prefs = parameters.prefs
.iter()
.map(|item| (item.clone(), PREFS.get(item).to_json()))
.collect::<BTreeMap<_, _>>();
Ok(WebDriverResponse::Generic(ValueResponse::new(prefs.to_json())))
}
fn handle_set_prefs(&self,
parameters: &SetPrefsParameters) -> WebDriverResult<WebDriverResponse> {
for &(ref key, ref value) in parameters.prefs.iter() {
PREFS.set(key, value.clone());
}
Ok(WebDriverResponse::Void)
}
fn handle_reset_prefs(&self,
parameters: &GetPrefsParameters) -> WebDriverResult<WebDriverResponse> {
let prefs = if parameters.prefs.len() == 0 {
PREFS.reset_all();
BTreeMap::new()
} else {
parameters.prefs
.iter()
.map(|item| (item.clone(), PREFS.reset(item).to_json()))
.collect::<BTreeMap<_, _>>()
};
Ok(WebDriverResponse::Generic(ValueResponse::new(prefs.to_json())))
}
}
impl WebDriverHandler<ServoExtensionRoute> for Handler {
fn handle_command(&mut self,
_session: &Option<Session>,
msg: WebDriverMessage<ServoExtensionRoute>) -> WebDriverResult<WebDriverResponse> {
// Unless we are trying to create a new session, we need to ensure that a
// session has previously been created
match msg.command {
WebDriverCommand::NewSession(_) => {},
_ => {
try!(self.session());
}
}
match msg.command {
WebDriverCommand::NewSession(_) => self.handle_new_session(),
WebDriverCommand::DeleteSession => self.handle_delete_session(),
WebDriverCommand::AddCookie(ref parameters) => self.handle_add_cookie(parameters),
WebDriverCommand::Get(ref parameters) => self.handle_get(parameters),
WebDriverCommand::GetCurrentUrl => self.handle_current_url(),
WebDriverCommand::GetWindowSize => self.handle_window_size(),
WebDriverCommand::SetWindowSize(ref size) => self.handle_set_window_size(size),
WebDriverCommand::IsEnabled(ref element) => self.handle_is_enabled(element),
WebDriverCommand::IsSelected(ref element) => self.handle_is_selected(element),
WebDriverCommand::GoBack => self.handle_go_back(),
WebDriverCommand::GoForward => self.handle_go_forward(),
WebDriverCommand::Refresh => self.handle_refresh(),
WebDriverCommand::GetTitle => self.handle_title(),
WebDriverCommand::GetWindowHandle => self.handle_window_handle(),
WebDriverCommand::GetWindowHandles => self.handle_window_handles(),
WebDriverCommand::SwitchToFrame(ref parameters) => self.handle_switch_to_frame(parameters),
WebDriverCommand::SwitchToParentFrame => self.handle_switch_to_parent_frame(),
WebDriverCommand::FindElement(ref parameters) => self.handle_find_element(parameters),
WebDriverCommand::FindElements(ref parameters) => self.handle_find_elements(parameters),
WebDriverCommand::GetNamedCookie(ref name) => self.handle_get_cookie(name),
WebDriverCommand::GetCookies => self.handle_get_cookies(),
WebDriverCommand::GetActiveElement => self.handle_active_element(),
WebDriverCommand::GetElementRect(ref element) => self.handle_element_rect(element),
WebDriverCommand::GetElementText(ref element) => self.handle_element_text(element),
WebDriverCommand::GetElementTagName(ref element) => self.handle_element_tag_name(element),
WebDriverCommand::GetElementAttribute(ref element, ref name) =>
self.handle_element_attribute(element, name),
WebDriverCommand::GetCSSValue(ref element, ref name) =>
self.handle_element_css(element, name),
WebDriverCommand::ExecuteScript(ref x) => self.handle_execute_script(x),
WebDriverCommand::ExecuteAsyncScript(ref x) => self.handle_execute_async_script(x),
WebDriverCommand::ElementSendKeys(ref element, ref keys) =>
self.handle_element_send_keys(element, keys),
WebDriverCommand::SetTimeouts(ref x) => self.handle_set_timeouts(x),
WebDriverCommand::TakeScreenshot => self.handle_take_screenshot(),
WebDriverCommand::Extension(ref extension) => {
match *extension {
ServoExtensionCommand::GetPrefs(ref x) => self.handle_get_prefs(x),
ServoExtensionCommand::SetPrefs(ref x) => self.handle_set_prefs(x),
ServoExtensionCommand::ResetPrefs(ref x) => self.handle_reset_prefs(x),
}
}
_ => Err(WebDriverError::new(ErrorStatus::UnsupportedOperation,
"Command not implemented"))
}
}
fn delete_session(&mut self, _session: &Option<Session>) {
// Servo doesn't support multiple sessions, so we exit on session deletion
let _ = self.constellation_chan.send(ConstellationMsg::Exit);
self.session = None;
}
}<|fim▁end|>
|
.or(Err(WebDriverError::new(ErrorStatus::NoSuchFrame, "Frame does not exist")))?;
|
<|file_name|>encrypt_decrypt_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package crypto
import (
"bytes"
"fmt"
"testing"
"github.com/uther/go-uther/common"
)
func TestBox(t *testing.T) {
prv1 := ToECDSA(common.Hex2Bytes("4b50fa71f5c3eeb8fdc452224b2395af2fcc3d125e06c32c82e048c0559db03f"))
prv2 := ToECDSA(common.Hex2Bytes("d0b043b4c5d657670778242d82d68a29d25d7d711127d17b8e299f156dad361a"))
pub2 := ToECDSAPub(common.Hex2Bytes("04bd27a63c91fe3233c5777e6d3d7b39204d398c8f92655947eb5a373d46e1688f022a1632d264725cbc7dc43ee1cfebde42fa0a86d08b55d2acfbb5e9b3b48dc5"))
message := []byte("Hello, world.")
ct, err := Encrypt(pub2, message)
if err != nil {
fmt.Println(err.Error())
t.FailNow()
}
pt, err := Decrypt(prv2, ct)
if err != nil {
fmt.Println(err.Error())
t.FailNow()
}
<|fim▁hole|> fmt.Println("ecies: plaintext doesn't match message")
t.FailNow()
}
_, err = Decrypt(prv1, pt)
if err == nil {
fmt.Println("ecies: encryption should not have succeeded")
t.FailNow()
}
}<|fim▁end|>
|
if !bytes.Equal(pt, message) {
|
<|file_name|>NexaRemapButton.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2005-2013, Stefan Strömberg <[email protected]>
*
* This file is part of OpenNetHome (http://www.nethome.nu)
*
* OpenNetHome is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenNetHome is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nu.nethome.home.items.nexa;
import nu.nethome.home.item.HomeItem;
import nu.nethome.home.item.HomeItemType;
import nu.nethome.home.items.RemapButton;
import nu.nethome.home.system.Event;
import nu.nethome.util.plugin.Plugin;
/**
* @author Stefan
*/
@SuppressWarnings("UnusedDeclaration")
@Plugin
@HomeItemType(value = "Controls", creationEvents = "Nexa_Message")
public class NexaRemapButton extends RemapButton implements HomeItem {
private static final String MODEL = ("<?xml version = \"1.0\"?> \n"
+ "<HomeItem Class=\"NexaRemapButton\" Category=\"Controls\" >"
+ " <Attribute Name=\"State\" Type=\"String\" Get=\"getState\" Init=\"setState\" Default=\"true\" />"
+ " <Attribute Name=\"HouseCode\" Type=\"StringList\" Get=\"getHouseCode\" Set=\"setHouseCode\" >"
+ " <item>A</item> <item>B</item> <item>C</item> <item>D</item> <item>E</item> <item>F</item> <item>G</item> <item>H</item> </Attribute>"
+ " <Attribute Name=\"Button\" Type=\"StringList\" Get=\"getButton\" Set=\"setButton\" >"
+ " <item>1</item> <item>2</item> <item>3</item> <item>4</item> <item>5</item> <item>6</item> <item>7</item> <item>8</item> </Attribute>"
+ " <Attribute Name=\"OnCommand\" Type=\"Command\" Get=\"getOnCommand\" Set=\"setOnCommand\" />"
+ " <Attribute Name=\"OffCommand\" Type=\"Command\" Get=\"getOffCommand\" Set=\"setOffCommand\" />"
+ " <Attribute Name=\"HoldOffTime\" Type=\"StringList\" Get=\"getHoldOffTime\" Set=\"setHoldOffTime\" >"
+ " <item>0</item> <item>100</item> <item>150</item> <item>200</item> <item>300</item> <item>400</item> </Attribute>"
+ " <Action Name=\"on\" Method=\"on\" />"
+ " <Action Name=\"off\" Method=\"off\" />"
+ " <Action Name=\"enable\" Method=\"enable\" />"
+ " <Action Name=\"disable\" Method=\"disable\" />"
+ "</HomeItem> ");
// Public attributes
private int buttonHouseCode = 0;
private int buttonNumber = 1;
public NexaRemapButton() {
}
public boolean receiveEvent(Event event) {
// Check the event and see if they affect our current state.
if (event.getAttribute(Event.EVENT_TYPE_ATTRIBUTE).equals("Nexa_Message") &&
event.getAttribute("Direction").equals("In") &&
(event.getAttributeInt("Nexa.HouseCode") == buttonHouseCode) &&
(event.getAttributeInt("Nexa.Button") == buttonNumber)) {
processEvent(event);
return true;
} else {
return handleInit(event);
}
}
@Override
protected boolean initAttributes(Event event) {
buttonHouseCode = event.getAttributeInt("Nexa.HouseCode");
buttonNumber = event.getAttributeInt("Nexa.Button");
return true;
}
@Override
protected void actOnEvent(Event event) {
if (event.getAttribute("Nexa.Command").equals("1")) {
this.on();
} else {
this.off();
}
}
public String getModel() {
return MODEL;
}
/**
* @return Returns the deviceCode.
*/
@SuppressWarnings("UnusedDeclaration")
public String getButton() {
return Integer.toString(buttonNumber);
}
/**
* @param deviceCode The deviceCode to set.
*/<|fim▁hole|> try {
int result = Integer.parseInt(deviceCode);
if ((result > 0) && (result <= 8)) {
buttonNumber = result;
}
} catch (NumberFormatException e) {
// Ignore
}
}
/**
* @return Returns the houseCode.
*/
@SuppressWarnings("UnusedDeclaration")
public String getHouseCode() {
if ((buttonHouseCode >= 0) && (buttonHouseCode <= 7)) {
return Character.toString("ABCDEFGH".charAt(buttonHouseCode));
}
return "A";
}
/**
* @param houseCode The HouseCode to set.
*/
@SuppressWarnings("UnusedDeclaration")
public void setHouseCode(String houseCode) {
String hc = houseCode.toUpperCase();
if ((hc.length() == 1) && (hc.compareTo("A") >= 0) &&
(hc.compareTo("H") <= 0)) {
buttonHouseCode = (int) hc.charAt(0) - (int) 'A';
}
}
}<|fim▁end|>
|
@SuppressWarnings("UnusedDeclaration")
public void setButton(String deviceCode) {
|
<|file_name|>speed_mixture.py<|end_file_name|><|fim▁begin|>'''
testing speedup of code
Created on Sep 17, 2016
@author: jonaswallin
'''
from Mixture.density import mNIG
from Mixture.density.purepython import mNIG as pmNIG
from Mixture import mixOneDims
import numpy as np
import numpy.random as npr
import timeit
# most speed here is used startup (iteration = 500, n = 1000)
# Cython:
# 2000 0.152 0.000 0.268 0.000 NIG.py:82(EV)
# 2000 0.098 0.000 0.145 0.000 NIG.py:39(dens)<|fim▁hole|># 2000 0.037 0.000 0.037 0.000 {Mixture.util.cython_Bessel.Bessel1approx}
# Pure Python:
# 2000 1.201 0.001 1.264 0.001 NIG.py:208(EV)
# 2000 1.195 0.001 1.201 0.001 NIG.py:248(dens)
# Pure Python, no precompute:
# 2000 2.322 0.001 2.387 0.001 NIG.py:208(EV)
# 2000 1.205 0.001 1.211 0.001 NIG.py:248(dens)
npr.seed(10)
def speed_python(pure_python=False, precompute = True):
K = 2
d = 2
iteration = 500
mixObj = mixOneDims(K=K, d=d)
if pure_python:
mixObj.set_densites([pmNIG(d=d) for k in range(K)]) # @UnusedVariable
else:
mixObj.set_densites([mNIG(d=d) for k in range(K)]) # @UnusedVariable
paramMat_true = [np.array([[1.1, 1.12, 0.1, 0],
[-1, 0,2 , -4] ]),
np.array([[-2, 0, 0.3, 0],
[1, 0, 2 , -4] ])]
alpha_true = [0]
mixObj.set_paramMat(alpha = alpha_true,paramMat = paramMat_true)
Y = mixObj.sample(n = 1000)
mixObj.set_data(Y)
paramMat = [npr.randn(2,4),npr.randn(2,4)]
paramMat[0][0,0] = 1.1
paramMat[1][0,0] = -2
alpha = np.array(alpha_true)
for i in range(iteration): # @UnusedVariable
p, alpha, paramMat = mixObj.EMstep(alpha = alpha, paramMat = paramMat , precompute = precompute) # @UnusedVariable
if __name__ == "__main__":<|fim▁end|>
|
# 2000 0.051 0.000 0.051 0.000 {Mixture.util.cython_Bessel.Bessel0approx}
|
<|file_name|>project.ts<|end_file_name|><|fim▁begin|>import * as fs from 'fs';
import * as path from 'path';
import * as npm from './npm';
import * as node from './node';
import * as typings from './typings';
import { rmrfSync, walk, exists, symlink, readJsonFile, readFile, flatSymlinkDirectoryIgnore, writeJsonFile, copyFile, copyDirectory, existsCopyDirectory, mkdirp } from './fs';
import { exec } from './sh';
import { renderTemplateDir } from './render';
import * as Mustache from 'mustache';
const { merge } = require('ramda');
export type ProjectType = 'javascript' | 'typescript';
interface IProject {
init: (componentName: string, projectType: ProjectType) => Promise<{}>;
startDevelopment: (projectType: ProjectType, mountPath: string) => Promise<{}>;
}
interface IPackageJson {
name: string;
repository: {
type: string;
url: string;
};
jest?: {};
}
export default class Project implements IProject {
/**
* Sapling works within existing project code. The
* current working directory points to where the
* component project lives.
*/
private cwd: string;
/**
* Path to the mounted projects package.json
*/
private packagePath: string;
constructor(cwd: string) {
this.cwd = cwd;
this.packagePath = path.join(this.cwd, 'package.json');
}
/**
* Install modules (npm / typings) if they are not present
*/
private installModules(projectType: ProjectType): Promise<{}> {
let tasks: Promise<boolean>[] = [];
tasks.push(exists(path.join(this.cwd, 'node_modules')).then((present: boolean) => {
return !present ? npm.install(this.cwd) : Promise.resolve(true);
}));
if (projectType === 'typescript') {
tasks.push(exists(path.join(this.cwd, 'typings')).then((present: boolean) => {
return !present ? typings.install(this.cwd) : Promise.resolve(true);
}));
}
return Promise.all(tasks);
}
/**
* The goal here is to mount the node_modules in our mounted project
* in a way that melds the modules from react-sapling, and those
* modules that the user has included in their own project.
*/
private mountNodeModules(projectType: ProjectType, mountPath: string): Promise<{}> {
// The new path where we will put our melded modules
const mountModulesPath = path.join(mountPath, 'node_modules');
const mountModulesPathBin = path.join(mountModulesPath, '.bin');
/*
Path to modules that exist in the current working directory.
i.e. our component project
*/
const prodModules = path.join(this.cwd, 'node_modules');
/*
Path to modules that exist in development
i.e. modules that are usually in our project, but are nested under
the react-sapling project (caused by running `npm link react-sapling`
during development)
*/
const devModules = path.join(__dirname, '..', 'node_modules');
return mkdirp(mountModulesPathBin)
.then(() => flatSymlinkDirectoryIgnore(path.join(prodModules, '.bin'), mountModulesPathBin))
.then(() => flatSymlinkDirectoryIgnore(path.join(devModules, '.bin'), mountModulesPathBin))
.then(() => flatSymlinkDirectoryIgnore(prodModules, mountModulesPath))
.then(() => flatSymlinkDirectoryIgnore(devModules, mountModulesPath));
}
/**
* Symlink files into the mounted project so that they appear local
* during builds & development.
*/
private mountSymlinks(projectType: ProjectType, mountPath: string): Promise<{}> {
let symlinkPromises: Promise<{}>[] = [];
// Side effect central!
const addSymlink = (srcPath: string, destPath: string, type: 'file' | 'dir') => {
symlinkPromises.push(symlink(path.join(this.cwd, srcPath),
path.join(mountPath, destPath),
type));
};
addSymlink('src', 'src', 'dir');
addSymlink('package.json', 'package.json', 'file');
addSymlink('README.md', 'README.md', 'file');
if (projectType === 'typescript') {
addSymlink('typings', 'typings', 'dir');
addSymlink('tsconfig.json', 'tsconfig.json', 'file');
}
if (projectType === 'javascript') {
addSymlink('.eslintrc', '.eslintrc', 'file');
}
return Promise.all(symlinkPromises);
}
private mountCopies(projectType: ProjectType, mountPath: string): Promise<{}> {<|fim▁hole|> let copyPromises: Promise<{}>[] = [];
// NOTE: This is done to support Jest tests which does now allow
// for symlinked tests for some reason.
copyPromises.push(
mkdirp(path.join(mountPath, 'src-copy')).then(_ =>
existsCopyDirectory(path.join(this.cwd, 'src'),
path.join(mountPath, 'src-copy')))); // #lisp
return Promise.all(copyPromises);
}
/**
* Copy over the build dir files from this project into the mount-path.
*/
private mountBuildDir(projectType: ProjectType, mountPath: string): Promise<{}> {
const buildDir = path.join(__dirname, 'build', 'shared');
return copyDirectory(buildDir, mountPath);
}
private mountProject(projectType: ProjectType, mountPath: string): Promise<{}> {
// Ensure that this project is unmounted.
this.unmountProject(mountPath);
// Always ensure that this project is unmounted on exit.
process.on('exit', () => this.unmountProject(mountPath));
process.on('SIGINT', () => this.unmountProject(mountPath));
process.on('uncaughtException', () => this.unmountProject(mountPath));
return this.installModules(projectType).then(() => {
// Make the mount path, then shove stuff into it (basically).
return mkdirp(mountPath);
}).then(() => Promise.all([
this.mountSymlinks(projectType, mountPath),
this.mountCopies(projectType, mountPath),
this.mountNodeModules(projectType, mountPath),
this.mountBuildDir(projectType, mountPath)
]));
}
private unmountProject(mountPath: string): void {
return rmrfSync(mountPath);
}
private getPackageData(): Promise<IPackageJson> {
return readJsonFile<IPackageJson>(this.packagePath);
}
private generateFiles(projectType: ProjectType, componentName: string): Promise<{}> {
const sharedTemplateDir = path.join(__dirname, 'scaffold', 'templates', 'shared');
const templateDir = path.join(__dirname, 'scaffold', 'templates', projectType);
return this.getPackageData().then((packageData) => {
const templateVars = {
componentName,
projectName: packageData.name
};
return Promise.all([
renderTemplateDir(templateDir, this.cwd, templateVars),
renderTemplateDir(sharedTemplateDir, this.cwd, templateVars)
]);
});
}
private initNpmPackage(projectType: string): Promise<{}> {
// Get the current project data
return this.getPackageData().then((packageData) => {
const repo = packageData.repository;
let projectGitUrl = '';
let isGitHubRepo = false;
if (repo && repo.type === 'git') {
const url = packageData.repository.url;
isGitHubRepo = /github\.com/.test(url);
projectGitUrl = url.replace('git+', '');
}
const isTypescript = projectType === 'typescript';
const isJavascript = !isTypescript;
// Get the path to our template package.json
const packageTemplatePath = path.join(__dirname, 'scaffold', 'config', 'packageData.mu.json');
// Read file contents, render it with variables,
// and merge it with the existing package data
return readFile(packageTemplatePath).then((packageTemplate) => {
const renderedPackageTemplate = Mustache.render(packageTemplate, {
projectGitUrl,
isGitHubRepo,
isTypescript,
isJavascript
});
const ourPackageData = JSON.parse(renderedPackageTemplate);
return Object.assign({}, packageData, ourPackageData);
});
}).then((newPackageData) => {
return writeJsonFile(this.packagePath, newPackageData);
});
}
private copyStaticFiles(projectType: string): Promise<{}> {
const sharedSrcDir = path.join(__dirname, 'scaffold', 'static', 'shared');
const srcDir = path.join(__dirname, 'scaffold', 'static', projectType);
return Promise.all([
existsCopyDirectory(sharedSrcDir, this.cwd),
existsCopyDirectory(srcDir, this.cwd),
]);
}
public init(projectType: ProjectType, componentName: string): Promise<{}> {
return this.generateFiles(projectType, componentName)
.then(() => this.initNpmPackage(projectType))
.then(() => this.copyStaticFiles(projectType))
.then(() => this.installModules(projectType));
}
public startDevelopment(projectType: ProjectType, mountPath: string): Promise<{}> {
return this.mountProject(projectType, mountPath).then(() => {
const serverPath = path.join(mountPath, 'server.js');
return node.runScript(serverPath, [], mountPath, { PROJECT_TYPE: projectType });
});
}
private buildDefinitions(projectType: ProjectType, mountPath: string): Promise<{}> {
if (projectType !== 'typescript') { return Promise.resolve({}); }
const tmpOutDir = `${mountPath}/TMP_TSC_TYPINGS`;
const cmd = `tsc --rootDir ${mountPath} --outDir ${tmpOutDir} --target ES6 --jsx React --experimentalDecorators --allowSyntheticDefaultImports --declaration`;
return exec(cmd, { cwd: mountPath }).then(() => {
return walk(tmpOutDir);
}).then((files) => {
const destDir = path.join(mountPath, '..', 'dist', 'typings');
// make the typings dir
return mkdirp(destDir).then(() => {
// copy over *.d.ts files from the `tmpOutDir` to our `destDir`
return Promise.all([files.filter((file) => {
return /\.d\.ts$/.test(file);
}).map((file) => {
const basename = path.basename(file);
const dest = path.join(destDir, basename);
return copyFile(file, dest);
})]);
});
});
}
public buildLibProd(projectType: ProjectType, mountPath: string): Promise<void> {
return this.mountProject(projectType, mountPath).then(() => {
const buildScriptPath = path.join(mountPath, 'build-lib-prod.js');
return node.runScript(buildScriptPath, [], mountPath, { PROJECT_TYPE: projectType });
}).then(() => this.buildDefinitions(projectType, mountPath))
.then(() => this.unmountProject(mountPath))
.catch(() => this.unmountProject(mountPath));
}
public buildLibDocs(projectType: ProjectType, mountPath: string): Promise<void> {
return this.mountProject(projectType, mountPath).then(() => {
const buildScriptPath = path.join(mountPath, 'build-lib-docs.js');
return node.runScript(buildScriptPath, [], mountPath, { PROJECT_TYPE: projectType });
}).then(() => this.buildDefinitions(projectType, mountPath))
.then(() => this.unmountProject(mountPath))
.catch(() => this.unmountProject(mountPath));
}
public buildBundleDocs(projectType: ProjectType, mountPath: string): Promise<void> {
return this.mountProject(projectType, mountPath).then(() => {
return mkdirp(path.join(mountPath, '..', 'dist', 'docs', 'src'));
}).then(() => {
return copyFile(path.join(mountPath, 'index.html'),
path.join(mountPath, '..', 'dist', 'docs', 'index.html'));
}).then(() => {
const buildScriptPath = path.join(mountPath, 'build-bundle-docs.js');
return node.runScript(buildScriptPath, [], mountPath, { PROJECT_TYPE: projectType });
}).then(() => this.unmountProject(mountPath))
.catch(() => this.unmountProject(mountPath));
}
/* tslint:disable */
private fileList(dir: any): any {
return fs.readdirSync(dir).reduce(function(list: any, file: any): any {
const name: any = path.join(dir, file);
const isDir: any = fs.statSync(name).isDirectory();
return list.concat(isDir ? this.fileList(name) : [name]);
}, []);
}
public runTests(projectType: ProjectType, mountPath: string): Promise<void> {
return this.mountProject(projectType, mountPath).then(() => {
return this.getPackageData();
}).then((packageData) => {
// Path to the jest executable
const jestBin = path.join(mountPath, 'node_modules', '.bin', 'jest');
// The directory that we wish to test with Jest.
// Currently, jest ignores the symlinked `src` dir, so it should only
// look into the `src-copy`.
const jestTestDir = path.join(mountPath);
// Our base jest configuration
const jestConfigPath = path.join(mountPath, 'jest-config.json');
// Fetch the data from that configuration file
return readJsonFile(jestConfigPath).then(jestConfig => {
// Our jest configuration path that we are generating on the fly
const finalJestConfigPath = path.join(mountPath, 'jest-final-config.json');
// Create an object that represents our final Jest configuration. This
// will be stringified and stored in the finalJestConfigPath file.
let finalJestConfig = jestConfig;
// Options that we are going to pass to Jest
let jestOptions = ['--no-cache', '--config', finalJestConfigPath];
// If the user has defined their own Jest configuration, go ahead and
// merge it in with our finalJestConfiguration
if (typeof packageData.jest !== 'undefined') {
finalJestConfig = merge(finalJestConfig, packageData.jest);
}
// Write our new configuration & run Jest with it.
return writeJsonFile(finalJestConfigPath, finalJestConfig).then(() => {
return node.runScript(jestBin,
jestOptions,
jestTestDir,
{ PROJECT_TYPE: projectType });
});
});
}).then(() => {
return this.unmountProject(mountPath);
}).catch(error => {
this.unmountProject(mountPath);
throw new Error('react-sapling: tests faild to run\n' + error.stack);
});
}
}<|fim▁end|>
| |
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>Router.map(function() {
this.route('index.view', {
path: '/',
onBeforeAction: function () {
if (Meteor.isClient) {
Session.set('siteTitle', 'Vazco SlideDeck!');
};
this.next();
}
});
this.route('slideshow.view', {
path: '/slideshow',
onBeforeAction: function () {
if (Meteor.isClient) {
Session.set('siteTitle', 'Vazco SlideDeck! - Slideshow!');
};
this.next();
}
});
this.route('priv.slideshow.view', {
path: '/priv-slideshow',
onBeforeAction: function () {
var settings = App.SlidesSettings.findOne();
if (settings && settings.isPrivViewEnabled) {
if (Meteor.isClient) {
Session.set('siteTitle', 'Vazco SlideDeck! - Private Slideshow!');
};
this.next();
} else {
Router.go('/');
}
}
});
this.route('login.view', {
path: '/login',<|fim▁hole|> onBeforeAction: function () {
if (Meteor.isClient) {
Session.set('siteTitle', 'Vazco SlideDeck! - Admin Login!');
};
this.next();
}
});
});<|fim▁end|>
| |
<|file_name|>binop.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function
import ast
from jaspyx.ast_util import ast_load, ast_call
from jaspyx.visitor import BaseVisitor
class BinOp(BaseVisitor):
def visit_BinOp(self, node):
attr = getattr(self, 'BinOp_%s' % node.op.__class__.__name__, None)
attr(node.left, node.right)
for key, value in {
'Add': '+',
'Sub': '-',
'Mult': '*',
'Div': '/',
'Mod': '%',<|fim▁hole|> 'RShift': '>>',
}.items():
def gen_op(op):
def f_op(self, left, right):
self.group([left, op, right])
return f_op
exec('BinOp_%s = gen_op("%s")' % (key, value))
def BinOp_Pow(self, left, right):
pow_func = ast_load('Math.pow')
self.visit(ast_call(pow_func, left, right))
def BinOp_FloorDiv(self, left, right):
floor = ast_load('Math.floor')
self.visit(ast_call(floor, ast.BinOp(left, ast.Div(), right)))<|fim▁end|>
|
'BitAnd': '&',
'BitOr': '|',
'BitXor': '^',
'LShift': '<<',
|
<|file_name|>phase5.py<|end_file_name|><|fim▁begin|>import json
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.utils import timezone
from django.shortcuts import render_to_response
from cir.models import *
import claim_views
from cir.phase_control import PHASE_CONTROL
import utils
def get_statement_comment_list(request):
response = {}
context = {}
forum = Forum.objects.get(id = request.session['forum_id'])
thread_comments = ForumComment.objects.filter(forum = forum)
print thread_comments
context['comments'] = thread_comments
response['forum_comment'] = render_to_string("phase5/forum-comment.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def put_statement_comment(request):
response = {}
context = {}
author = request.user
parent_id = request.REQUEST.get('parent_id')
text = request.REQUEST.get('text')
created_at = timezone.now()
forum = Forum.objects.get(id = request.session['forum_id'])
if parent_id == "": #root node
newForumComment = ForumComment(author = author, text = text, forum = forum, created_at = created_at)
else:
parent = ForumComment.objects.get(id = parent_id)
newForumComment = ForumComment(author = author, text = text, forum = forum, parent = parent, created_at = created_at)
newForumComment.save()
return HttpResponse(json.dumps(response), mimetype='application/json')
def vote_issue(request):
reason = request.REQUEST.get('reason')
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
support = True
if (request.REQUEST.get('support') == "false"): support = False
vote, created = ForumVote.objects.get_or_create(forum = forum, author = author)
vote.reason = reason
vote.support = support<|fim▁hole|>def render_support_bar(request):
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
response = {}
response["num_support"] = ForumVote.objects.filter(forum = forum, support = True).count()
response["num_oppose"] = ForumVote.objects.filter(forum = forum, support = False).count()
if request.user.is_authenticated():
response["my_num_support"] = ForumVote.objects.filter(forum = forum, support = True, author = author).count()
response["my_num_oppose"] = ForumVote.objects.filter(forum = forum, support = False, author = author).count()
return HttpResponse(json.dumps(response), mimetype='application/json')
def view_vote_result(request):
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
response = {}
context = {}
context["entries"] = ForumVote.objects.filter(forum = forum)
response["vote_result_table"] = render_to_string('phase5/vote-result-table.html', context)
return HttpResponse(json.dumps(response), mimetype='application/json')<|fim▁end|>
|
vote.save()
response = {}
return HttpResponse(json.dumps(response), mimetype='application/json')
|
<|file_name|>inicio.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
print '''<!DOCTYPE html><html>'''
incluir(data,"head")
print '''<body>'''
incluir(data,"header")<|fim▁hole|>print '''</body></html>'''<|fim▁end|>
| |
<|file_name|>unsafe-block-without-braces.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
unsafe //{
std::mem::transmute::<f32, u32>(1.0);
//}
}
//~^^^ ERROR expected one of `extern`, `fn`, or `{`, found `std`<|fim▁end|>
|
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
<|file_name|>coded_input_stream.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::io::BufRead;
use std::io::Read;
use std::mem;
use std::mem::MaybeUninit;
#[cfg(feature = "bytes")]
use ::bytes::Bytes;
#[cfg(feature = "bytes")]
use crate::chars::Chars;
use crate::enums::Enum;
use crate::error::ProtobufError;
use crate::error::WireError;
use crate::io::buf_read_iter::BufReadIter;
use crate::misc::maybe_ununit_array_assume_init;
use crate::reflect::types::ProtobufType;
use crate::reflect::types::ProtobufTypeBool;
use crate::reflect::types::ProtobufTypeDouble;
use crate::reflect::types::ProtobufTypeFixed;
use crate::reflect::types::ProtobufTypeFixed32;
use crate::reflect::types::ProtobufTypeFixed64;
use crate::reflect::types::ProtobufTypeFloat;
use crate::reflect::types::ProtobufTypeInt32;
use crate::reflect::types::ProtobufTypeInt64;
use crate::reflect::types::ProtobufTypeSfixed32;
use crate::reflect::types::ProtobufTypeSfixed64;
use crate::reflect::types::ProtobufTypeSint32;
use crate::reflect::types::ProtobufTypeSint64;
use crate::reflect::types::ProtobufTypeUint32;
use crate::reflect::types::ProtobufTypeUint64;
use crate::reflect::MessageDescriptor;
use crate::unknown::UnknownValue;
use crate::varint::decode::decode_varint32;
use crate::varint::decode::decode_varint64;
use crate::varint::MAX_VARINT_ENCODED_LEN;
use crate::wire_format;
use crate::wire_format::WireType;
use crate::zigzag::decode_zig_zag_32;
use crate::zigzag::decode_zig_zag_64;
use crate::EnumOrUnknown;
use crate::Message;
use crate::MessageDyn;
// Default recursion level limit. 100 is the default value of C++'s implementation.
const DEFAULT_RECURSION_LIMIT: u32 = 100;
// Max allocated vec when reading length-delimited from unknown input stream
pub(crate) const READ_RAW_BYTES_MAX_ALLOC: usize = 10_000_000;
/// Buffered read with handy utilities.
pub struct CodedInputStream<'a> {
source: BufReadIter<'a>,
recursion_level: u32,
recursion_limit: u32,
}
impl<'a> CodedInputStream<'a> {
/// Wrap a `Read`.
///
/// Note resulting `CodedInputStream` is buffered even if `Read` is not.
pub fn new(read: &'a mut dyn Read) -> CodedInputStream<'a> {
CodedInputStream::from_buf_read_iter(BufReadIter::from_read(read))
}
/// Create from `BufRead`.
///
/// `CodedInputStream` will utilize `BufRead` buffer.
pub fn from_buffered_reader(buf_read: &'a mut dyn BufRead) -> CodedInputStream<'a> {
CodedInputStream::from_buf_read_iter(BufReadIter::from_buf_read(buf_read))
}
/// Read from byte slice
pub fn from_bytes(bytes: &'a [u8]) -> CodedInputStream<'a> {
CodedInputStream::from_buf_read_iter(BufReadIter::from_byte_slice(bytes))
}
/// Read from `Bytes`.
///
/// `CodedInputStream` operations like
/// [`read_tokio_bytes`](crate::CodedInputStream::read_tokio_bytes)
/// will return a shared copy of this bytes object.
#[cfg(feature = "bytes")]
pub fn from_tokio_bytes(bytes: &'a Bytes) -> CodedInputStream<'a> {
CodedInputStream::from_buf_read_iter(BufReadIter::from_bytes(bytes))
}
fn from_buf_read_iter(source: BufReadIter<'a>) -> CodedInputStream<'a> {
CodedInputStream {
source: source,
recursion_level: 0,
recursion_limit: DEFAULT_RECURSION_LIMIT,
}
}
/// Set the recursion limit.
pub fn set_recursion_limit(&mut self, limit: u32) {
self.recursion_limit = limit;
}
#[inline]
pub(crate) fn incr_recursion(&mut self) -> crate::Result<()> {
if self.recursion_level >= self.recursion_limit {
return Err(ProtobufError::WireError(WireError::OverRecursionLimit).into());
}
self.recursion_level += 1;
Ok(())
}
#[inline]
pub(crate) fn decr_recursion(&mut self) {
self.recursion_level -= 1;
}
/// How many bytes processed
pub fn pos(&self) -> u64 {
self.source.pos()
}
/// How many bytes until current limit
pub fn bytes_until_limit(&self) -> u64 {
self.source.bytes_until_limit()
}
/// Read bytes into given `buf`.
#[inline]
pub fn read_exact(&mut self, buf: &mut [MaybeUninit<u8>]) -> crate::Result<()> {
self.source.read_exact(buf)
}
/// Read exact number of bytes as `Bytes` object.
///
/// This operation returns a shared view if `CodedInputStream` is
/// constructed with `Bytes` parameter.
#[cfg(feature = "bytes")]
fn read_raw_tokio_bytes(&mut self, count: usize) -> crate::Result<Bytes> {
self.source.read_exact_bytes(count)
}
/// Read one byte
#[inline(always)]
pub fn read_raw_byte(&mut self) -> crate::Result<u8> {
self.source.read_byte()
}
/// Push new limit, return previous limit.
pub fn push_limit(&mut self, limit: u64) -> crate::Result<u64> {
self.source.push_limit(limit)
}
/// Restore previous limit.
pub fn pop_limit(&mut self, old_limit: u64) {
self.source.pop_limit(old_limit);
}
/// Are we at EOF?
#[inline(always)]
pub fn eof(&mut self) -> crate::Result<bool> {
self.source.eof()
}
/// Check we are at EOF.
///
/// Return error if we are not at EOF.
pub fn check_eof(&mut self) -> crate::Result<()> {
let eof = self.eof()?;
if !eof {
return Err(ProtobufError::WireError(WireError::UnexpectedEof).into());
}
Ok(())
}
fn read_raw_varint64_slow(&mut self) -> crate::Result<u64> {
let mut r: u64 = 0;
let mut i = 0;
loop {
if i == MAX_VARINT_ENCODED_LEN {
return Err(ProtobufError::WireError(WireError::IncorrectVarint).into());
}
let b = self.read_raw_byte()?;
if i == 9 && (b & 0x7f) > 1 {
return Err(ProtobufError::WireError(WireError::IncorrectVarint).into());
}
r = r | (((b & 0x7f) as u64) << (i * 7));
i += 1;
if b < 0x80 {
return Ok(r);
}
}
}
fn read_raw_varint32_slow(&mut self) -> crate::Result<u32> {
let v = self.read_raw_varint64_slow()?;
if v > u32::MAX as u64 {
return Err(ProtobufError::WireError(WireError::U32Overflow(v)).into());
}
Ok(v as u32)
}
/// Read varint
#[inline]
pub fn read_raw_varint64(&mut self) -> crate::Result<u64> {
let rem = self.source.remaining_in_buf();
match decode_varint64(rem)? {
Some((r, c)) => {
self.source.consume(c);
Ok(r)
}
None => self.read_raw_varint64_slow(),
}
}
/// Read varint
#[inline]
pub fn read_raw_varint32(&mut self) -> crate::Result<u32> {
let rem = self.source.remaining_in_buf();
match decode_varint32(rem)? {
Some((r, c)) => {
self.source.consume(c);
Ok(r)
}
None => self.read_raw_varint32_slow(),
}
}
#[inline]
fn read_raw_varint32_or_eof(&mut self) -> crate::Result<Option<u32>> {
let rem = self.source.remaining_in_buf();
let v = decode_varint32(rem)?;
match v {
Some((r, c)) => {
self.source.consume(c);
Ok(Some(r))
}
None => {
if self.eof()? {
Ok(None)
} else {
let v = self.read_raw_varint32_slow()?;
Ok(Some(v))
}
}
}
}
/// Read little-endian 32-bit integer
pub fn read_raw_little_endian32(&mut self) -> crate::Result<u32> {
let mut bytes = [MaybeUninit::uninit(); 4];
self.read_exact(&mut bytes)?;
// SAFETY: `read_exact` guarantees that the buffer is filled.
let bytes = unsafe { maybe_ununit_array_assume_init(bytes) };
Ok(u32::from_le_bytes(bytes))
}
/// Read little-endian 64-bit integer
pub fn read_raw_little_endian64(&mut self) -> crate::Result<u64> {
let mut bytes = [MaybeUninit::uninit(); 8];
self.read_exact(&mut bytes)?;
// SAFETY: `read_exact` guarantees that the buffer is filled.
let bytes = unsafe { maybe_ununit_array_assume_init(bytes) };
Ok(u64::from_le_bytes(bytes))
}
/// Read tag number as `u32` or None if EOF is reached.
#[inline]
pub fn read_raw_tag_or_eof(&mut self) -> crate::Result<Option<u32>> {
self.read_raw_varint32_or_eof()
}
/// Read tag
#[inline]
pub(crate) fn read_tag(&mut self) -> crate::Result<wire_format::Tag> {
let v = self.read_raw_varint32()?;
wire_format::Tag::new(v)
}
/// Read tag, return it is pair (field number, wire type)
#[inline]
pub(crate) fn read_tag_unpack(&mut self) -> crate::Result<(u32, WireType)> {
self.read_tag().map(|t| t.unpack())
}
/// Read `double`
pub fn read_double(&mut self) -> crate::Result<f64> {
let bits = self.read_raw_little_endian64()?;
Ok(f64::from_bits(bits))
}
/// Read `float`
pub fn read_float(&mut self) -> crate::Result<f32> {
let bits = self.read_raw_little_endian32()?;
Ok(f32::from_bits(bits))
}
/// Read `int64`
pub fn read_int64(&mut self) -> crate::Result<i64> {
self.read_raw_varint64().map(|v| v as i64)
}
/// Read `int32`
pub fn read_int32(&mut self) -> crate::Result<i32> {
let v = self.read_int64()?;
i32::try_from(v).map_err(|_| WireError::I32Overflow(v).into())
}
/// Read `uint64`
pub fn read_uint64(&mut self) -> crate::Result<u64> {
self.read_raw_varint64()
}
/// Read `uint32`
pub fn read_uint32(&mut self) -> crate::Result<u32> {
self.read_raw_varint32()
}
/// Read `sint64`
pub fn read_sint64(&mut self) -> crate::Result<i64> {
self.read_uint64().map(decode_zig_zag_64)
}
/// Read `sint32`
pub fn read_sint32(&mut self) -> crate::Result<i32> {
self.read_uint32().map(decode_zig_zag_32)
}
/// Read `fixed64`
pub fn read_fixed64(&mut self) -> crate::Result<u64> {
self.read_raw_little_endian64()
}
/// Read `fixed32`
pub fn read_fixed32(&mut self) -> crate::Result<u32> {
self.read_raw_little_endian32()
}
/// Read `sfixed64`
pub fn read_sfixed64(&mut self) -> crate::Result<i64> {
self.read_raw_little_endian64().map(|v| v as i64)
}
/// Read `sfixed32`
pub fn read_sfixed32(&mut self) -> crate::Result<i32> {
self.read_raw_little_endian32().map(|v| v as i32)
}
/// Read `bool`
pub fn read_bool(&mut self) -> crate::Result<bool> {
self.read_raw_varint64().map(|v| v != 0)
}
pub(crate) fn read_enum_value(&mut self) -> crate::Result<i32> {
self.read_int32()
}
/// Read `enum` as `ProtobufEnum`
pub fn read_enum<E: Enum>(&mut self) -> crate::Result<E> {
let i = self.read_enum_value()?;
match Enum::from_i32(i) {
Some(e) => Ok(e),
None => Err(ProtobufError::WireError(WireError::InvalidEnumValue(E::NAME, i)).into()),
}
}
/// Read `enum` as `ProtobufEnumOrUnknown`
pub fn read_enum_or_unknown<E: Enum>(&mut self) -> crate::Result<EnumOrUnknown<E>> {
Ok(EnumOrUnknown::from_i32(self.read_int32()?))
}
fn read_repeated_packed_fixed_into<T: ProtobufTypeFixed>(
&mut self,
target: &mut Vec<T::ProtobufValue>,
) -> crate::Result<()> {
let len_bytes = self.read_raw_varint64()?;
let reserve = if len_bytes <= READ_RAW_BYTES_MAX_ALLOC as u64 {
(len_bytes as usize) / (T::ENCODED_SIZE as usize)
} else {
// prevent OOM on malformed input
// probably should truncate
READ_RAW_BYTES_MAX_ALLOC / (T::ENCODED_SIZE as usize)
};
target.reserve(reserve);
let old_limit = self.push_limit(len_bytes)?;
while !self.eof()? {
target.push(T::read(self)?);
}
self.pop_limit(old_limit);
Ok(())
}
fn read_repeated_packed_into<T: ProtobufType>(
&mut self,
target: &mut Vec<T::ProtobufValue>,
) -> crate::Result<()> {
let len_bytes = self.read_raw_varint64()?;
// value is at least 1 bytes, so this is lower bound of element count
let reserve = if len_bytes <= READ_RAW_BYTES_MAX_ALLOC as u64 {
len_bytes as usize
} else {
// prevent OOM on malformed input
READ_RAW_BYTES_MAX_ALLOC
};
target.reserve(reserve);
let old_limit = self.push_limit(len_bytes)?;
while !self.eof()? {
target.push(T::read(self)?);
}
self.pop_limit(old_limit);
Ok(())
}
/// Read repeated packed `double`
pub fn read_repeated_packed_double_into(&mut self, target: &mut Vec<f64>) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeDouble>(target)
}
/// Read repeated packed `float`
pub fn read_repeated_packed_float_into(&mut self, target: &mut Vec<f32>) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeFloat>(target)
}
/// Read repeated packed `int64`
pub fn read_repeated_packed_int64_into(&mut self, target: &mut Vec<i64>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeInt64>(target)
}
/// Read repeated packed `int32`
pub fn read_repeated_packed_int32_into(&mut self, target: &mut Vec<i32>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeInt32>(target)
}
/// Read repeated packed `uint64`
pub fn read_repeated_packed_uint64_into(&mut self, target: &mut Vec<u64>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeUint64>(target)
}
/// Read repeated packed `uint32`
pub fn read_repeated_packed_uint32_into(&mut self, target: &mut Vec<u32>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeUint32>(target)
}
/// Read repeated packed `sint64`
pub fn read_repeated_packed_sint64_into(&mut self, target: &mut Vec<i64>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeSint64>(target)
}
/// Read repeated packed `sint32`
pub fn read_repeated_packed_sint32_into(&mut self, target: &mut Vec<i32>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeSint32>(target)
}
/// Read repeated packed `fixed64`
pub fn read_repeated_packed_fixed64_into(
&mut self,
target: &mut Vec<u64>,
) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeFixed64>(target)
}
/// Read repeated packed `fixed32`
pub fn read_repeated_packed_fixed32_into(
&mut self,
target: &mut Vec<u32>,
) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeFixed32>(target)
}
/// Read repeated packed `sfixed64`
pub fn read_repeated_packed_sfixed64_into(
&mut self,
target: &mut Vec<i64>,
) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeSfixed64>(target)
}
/// Read repeated packed `sfixed32`
pub fn read_repeated_packed_sfixed32_into(
&mut self,
target: &mut Vec<i32>,
) -> crate::Result<()> {
self.read_repeated_packed_fixed_into::<ProtobufTypeSfixed32>(target)
}
/// Read repeated packed `bool`
pub fn read_repeated_packed_bool_into(&mut self, target: &mut Vec<bool>) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeBool>(target)
}
/// Read repeated packed enum values into the vector.
pub(crate) fn read_repeated_packed_enum_values_into(
&mut self,
target: &mut Vec<i32>,
) -> crate::Result<()> {
self.read_repeated_packed_into::<ProtobufTypeInt32>(target)
}
/// Read `UnknownValue`
pub fn read_unknown(&mut self, wire_type: WireType) -> crate::Result<UnknownValue> {
match wire_type {
WireType::Varint => self.read_raw_varint64().map(|v| UnknownValue::Varint(v)),
WireType::Fixed64 => self.read_fixed64().map(|v| UnknownValue::Fixed64(v)),
WireType::Fixed32 => self.read_fixed32().map(|v| UnknownValue::Fixed32(v)),
WireType::LengthDelimited => {
let len = self.read_raw_varint32()?;
self.read_raw_bytes(len)
.map(|v| UnknownValue::LengthDelimited(v))
}
_ => Err(ProtobufError::WireError(WireError::UnexpectedWireType(wire_type)).into()),
}
}
/// Skip field
pub fn skip_field(&mut self, wire_type: WireType) -> crate::Result<()> {
self.read_unknown(wire_type).map(|_| ())<|fim▁hole|> pub fn read_raw_bytes_into(&mut self, count: u32, target: &mut Vec<u8>) -> crate::Result<()> {
self.source.read_exact_to_vec(count as usize, target)
}
/// Read exact number of bytes
pub fn read_raw_bytes(&mut self, count: u32) -> crate::Result<Vec<u8>> {
let mut r = Vec::new();
self.read_raw_bytes_into(count, &mut r)?;
Ok(r)
}
/// Skip exact number of bytes
pub fn skip_raw_bytes(&mut self, count: u32) -> crate::Result<()> {
self.source.skip_bytes(count)
}
/// Read `bytes` field, length delimited
pub fn read_bytes(&mut self) -> crate::Result<Vec<u8>> {
let mut r = Vec::new();
self.read_bytes_into(&mut r)?;
Ok(r)
}
/// Read `bytes` field, length delimited
#[cfg(feature = "bytes")]
pub fn read_tokio_bytes(&mut self) -> crate::Result<Bytes> {
let len = self.read_raw_varint32()?;
self.read_raw_tokio_bytes(len as usize)
}
/// Read `string` field, length delimited
#[cfg(feature = "bytes")]
pub fn read_tokio_chars(&mut self) -> crate::Result<Chars> {
let bytes = self.read_tokio_bytes()?;
Ok(Chars::from_bytes(bytes).map_err(ProtobufError::Utf8)?)
}
/// Read `bytes` field, length delimited
pub fn read_bytes_into(&mut self, target: &mut Vec<u8>) -> crate::Result<()> {
let len = self.read_raw_varint32()?;
self.read_raw_bytes_into(len, target)?;
Ok(())
}
/// Read `string` field, length delimited
pub fn read_string(&mut self) -> crate::Result<String> {
let mut r = String::new();
self.read_string_into(&mut r)?;
Ok(r)
}
/// Read `string` field, length delimited
pub fn read_string_into(&mut self, target: &mut String) -> crate::Result<()> {
target.clear();
// take target's buffer
let mut vec = mem::replace(target, String::new()).into_bytes();
self.read_bytes_into(&mut vec)?;
let s = match String::from_utf8(vec) {
Ok(t) => t,
Err(_) => return Err(ProtobufError::WireError(WireError::Utf8Error).into()),
};
*target = s;
Ok(())
}
/// Read message, do not check if message is initialized
pub fn merge_message<M: Message>(&mut self, message: &mut M) -> crate::Result<()> {
self.incr_recursion()?;
struct DecrRecursion<'a, 'b>(&'a mut CodedInputStream<'b>);
impl<'a, 'b> Drop for DecrRecursion<'a, 'b> {
fn drop(&mut self) {
self.0.decr_recursion();
}
}
let mut decr = DecrRecursion(self);
let len = decr.0.read_raw_varint64()?;
let old_limit = decr.0.push_limit(len)?;
message.merge_from(&mut decr.0)?;
decr.0.pop_limit(old_limit);
Ok(())
}
/// Like `merge_message`, but for dynamic messages.
pub fn merge_message_dyn(&mut self, message: &mut dyn MessageDyn) -> crate::Result<()> {
let len = self.read_raw_varint64()?;
let old_limit = self.push_limit(len)?;
message.merge_from_dyn(self)?;
self.pop_limit(old_limit);
Ok(())
}
/// Read message
pub fn read_message<M: Message>(&mut self) -> crate::Result<M> {
let mut r: M = Message::new();
self.merge_message(&mut r)?;
r.check_initialized()?;
Ok(r)
}
/// Read message.
pub fn read_message_dyn(
&mut self,
descriptor: &MessageDescriptor,
) -> crate::Result<Box<dyn MessageDyn>> {
let mut r = descriptor.new_instance();
self.merge_message_dyn(&mut *r)?;
r.check_initialized_dyn()?;
Ok(r)
}
}
impl<'a> Read for CodedInputStream<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.source.read(buf).map_err(Into::into)
}
}
impl<'a> BufRead for CodedInputStream<'a> {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
self.source.fill_buf().map_err(Into::into)
}
fn consume(&mut self, amt: usize) {
self.source.consume(amt)
}
}
#[cfg(test)]
mod test {
use std::fmt::Debug;
use std::io;
use std::io::BufRead;
use std::io::Read;
use super::CodedInputStream;
use super::READ_RAW_BYTES_MAX_ALLOC;
use crate::error::ProtobufError;
use crate::hex::decode_hex;
fn test_read_partial<F>(hex: &str, mut callback: F)
where
F: FnMut(&mut CodedInputStream),
{
let d = decode_hex(hex);
let mut reader = io::Cursor::new(d);
let mut is = CodedInputStream::from_buffered_reader(&mut reader as &mut dyn BufRead);
assert_eq!(0, is.pos());
callback(&mut is);
}
fn test_read<F>(hex: &str, mut callback: F)
where
F: FnMut(&mut CodedInputStream),
{
let len = decode_hex(hex).len();
test_read_partial(hex, |reader| {
callback(reader);
assert!(reader.eof().expect("eof"));
assert_eq!(len as u64, reader.pos());
});
}
fn test_read_v<F, V>(hex: &str, v: V, mut callback: F)
where
F: FnMut(&mut CodedInputStream) -> crate::Result<V>,
V: PartialEq + Debug,
{
test_read(hex, |reader| {
assert_eq!(v, callback(reader).unwrap());
});
}
#[test]
fn test_input_stream_read_raw_byte() {
test_read("17", |is| {
assert_eq!(23, is.read_raw_byte().unwrap());
});
}
#[test]
fn test_input_stream_read_raw_varint() {
test_read_v("07", 7, |reader| reader.read_raw_varint32());
test_read_v("07", 7, |reader| reader.read_raw_varint64());
test_read_v("96 01", 150, |reader| reader.read_raw_varint32());
test_read_v("96 01", 150, |reader| reader.read_raw_varint64());
test_read_v(
"ff ff ff ff ff ff ff ff ff 01",
0xffffffffffffffff,
|reader| reader.read_raw_varint64(),
);
test_read("ff ff ff ff ff ff ff ff ff 02", |is| {
assert!(is.read_raw_varint64().is_err());
});
test_read_v("ff ff ff ff 0f", 0xffffffff, |reader| {
reader.read_raw_varint32()
});
test_read_v("ff ff ff ff 0f", 0xffffffff, |reader| {
reader.read_raw_varint64()
});
}
#[test]
fn test_input_stream_read_raw_vaint_malformed() {
// varint cannot have length > 10
test_read_partial("ff ff ff ff ff ff ff ff ff ff 01", |reader| {
let error = reader.read_raw_varint64().unwrap_err().0;
match *error {
ProtobufError::WireError(..) => (),
_ => panic!(),
}
});
test_read_partial("ff ff ff ff ff ff ff ff ff ff 01", |reader| {
let error = reader.read_raw_varint32().unwrap_err().0;
match *error {
ProtobufError::WireError(..) => (),
_ => panic!(),
}
});
}
#[test]
fn test_input_stream_read_raw_varint_unexpected_eof() {
test_read_partial("96 97", |reader| {
let error = reader.read_raw_varint32().unwrap_err().0;
match *error {
ProtobufError::WireError(..) => (),
_ => panic!(),
}
});
}
#[test]
fn test_input_stream_read_raw_varint_pos() {
test_read_partial("95 01 98", |reader| {
assert_eq!(149, reader.read_raw_varint32().unwrap());
assert_eq!(2, reader.pos());
});
}
#[test]
fn test_input_stream_read_int32() {
test_read_v("02", 2, |reader| reader.read_int32());
}
#[test]
fn test_input_stream_read_float() {
test_read_v("95 73 13 61", 17e19, |is| is.read_float());
}
#[test]
fn test_input_stream_read_double() {
test_read_v("40 d5 ab 68 b3 07 3d 46", 23e29, |is| is.read_double());
}
#[test]
fn test_input_stream_skip_raw_bytes() {
test_read("", |reader| {
reader.skip_raw_bytes(0).unwrap();
});
test_read("aa bb", |reader| {
reader.skip_raw_bytes(2).unwrap();
});
test_read("aa bb cc dd ee ff", |reader| {
reader.skip_raw_bytes(6).unwrap();
});
}
#[test]
fn test_input_stream_read_raw_bytes() {
test_read("", |reader| {
assert_eq!(
Vec::from(&b""[..]),
reader.read_raw_bytes(0).expect("read_raw_bytes")
);
})
}
#[test]
fn test_input_stream_limits() {
test_read("aa bb cc", |is| {
let old_limit = is.push_limit(1).unwrap();
assert_eq!(1, is.bytes_until_limit());
let r1 = is.read_raw_bytes(1).unwrap();
assert_eq!(&[0xaa as u8], &r1[..]);
is.pop_limit(old_limit);
let r2 = is.read_raw_bytes(2).unwrap();
assert_eq!(&[0xbb as u8, 0xcc], &r2[..]);
});
}
#[test]
fn test_input_stream_io_read() {
test_read("aa bb cc", |is| {
let mut buf = [0; 3];
assert_eq!(Read::read(is, &mut buf).expect("io::Read"), 3);
assert_eq!(buf, [0xaa, 0xbb, 0xcc]);
});
}
#[test]
fn test_input_stream_io_bufread() {
test_read("aa bb cc", |is| {
assert_eq!(
BufRead::fill_buf(is).expect("io::BufRead::fill_buf"),
&[0xaa, 0xbb, 0xcc]
);
BufRead::consume(is, 3);
});
}
#[test]
#[cfg_attr(miri, ignore)] // Miri is too slow for this test.
fn test_input_stream_read_raw_bytes_into_huge() {
let mut v = Vec::new();
for i in 0..READ_RAW_BYTES_MAX_ALLOC + 1000 {
v.push((i % 10) as u8);
}
let mut slice: &[u8] = v.as_slice();
let mut is = CodedInputStream::new(&mut slice);
let mut buf = Vec::new();
is.read_raw_bytes_into(READ_RAW_BYTES_MAX_ALLOC as u32 + 10, &mut buf)
.expect("read");
assert_eq!(READ_RAW_BYTES_MAX_ALLOC + 10, buf.len());
buf.clear();
is.read_raw_bytes_into(1000 - 10, &mut buf).expect("read");
assert_eq!(1000 - 10, buf.len());
assert!(is.eof().expect("eof"));
}
}<|fim▁end|>
|
}
/// Read raw bytes into the supplied vector. The vector will be resized as needed and
/// overwritten.
|
<|file_name|>view_dt.py<|end_file_name|><|fim▁begin|>import django_tables2 as tables
from django_tables2 import RequestConfig
from django_tables2.utils import A # alias for Accessor
from django.shortcuts import render
import inspect
class DtTemplate(tables.Table):<|fim▁hole|> #name_last = tables.LinkColumn('track:runner',args=[A('id')])
#gender = tables.Column(verbose_name="Gender")
#result_count=tables.Column( accessor='result_count',orderable=False,verbose_name="Number of results")
class Meta:
#model = Runner
attrs = {"class": "paleblue"}<|fim▁end|>
|
#name_first = tables.Column(verbose_name="First Name")
|
<|file_name|>hooks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2014 Nicolas Bessi, Alexandre Fayolle, Camptocamp SA
# Copyright 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
def post_init_hook(cr, registry):
""" Add street3 to address format """
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street2)s\n',
E'%(street2)s\n%(street3)s\n'
)
"""
cr.execute(query)
def uninstall_hook(cr, registry):
""" Remove street3 from address format """
# Remove %(street3)s\n from address_format<|fim▁hole|> query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street3)s\n',
''
)
"""
cr.execute(query)
# Remove %(street3)s from address_format
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street3)s',
''
)
"""
cr.execute(query)<|fim▁end|>
| |
<|file_name|>search_highlight.js<|end_file_name|><|fim▁begin|>/* http://www.kryogenix.org/code/browser/searchhi/ */
/* Modified 20021006 to fix query string parsing and add case insensitivity */
/* Modified 20030227 by [email protected] to skip words with "-" and cut %2B (+) preceding pages */
function highlightWord(node,word)
{
// Iterate into this nodes childNodes
if (node.hasChildNodes)
{
var hi_cn;
for (hi_cn=0;hi_cn<node.childNodes.length;hi_cn++)
{
highlightWord(node.childNodes[hi_cn],word);
}
}
// And do this node itself
if (node.nodeType == 3)
{ // text node
tempNodeVal = node.nodeValue.toLowerCase();
tempWordVal = word.toLowerCase();<|fim▁hole|> {
// word has not already been highlighted!
nv = node.nodeValue;
ni = tempNodeVal.indexOf(tempWordVal);
// Create a load of replacement nodes
before = document.createTextNode(nv.substr(0,ni));
docWordVal = nv.substr(ni,word.length);
// alert( "Found: " + docWordVal );
after = document.createTextNode(nv.substr(ni+word.length));
hiwordtext = document.createTextNode(docWordVal);
hiword = document.createElement("span");
hiword.className = "searchword";
hiword.appendChild(hiwordtext);
pn.insertBefore(before,node);
pn.insertBefore(hiword,node);
pn.insertBefore(after,node);
pn.removeChild(node);
}
}
}
}
function googleSearchHighlight()
{
if (!document.createElement) return;
ref = document.referrer; //or URL for highlighting in place
if (ref.indexOf('?') == -1) return;
qs = ref.substr(ref.indexOf('?')+1);
qsa = qs.split('&');
for (i=0;i<qsa.length;i++)
{
qsip = qsa[i].split('=');
if (qsip.length == 1) continue;
// q= for Google, p= for Yahoo
// query= for JSPWiki
if (qsip[0] == 'query' || qsip[0] == 'q')
{
words = qsip[1].replace(/%2B/g,'');
words = words.replace(/-\S+\s/g,'');
words = unescape(words.replace(/\+/g,' ')).split(/\s+/);
for (w=0;w<words.length;w++) {
highlightWord(document.getElementsByTagName("body")[0],words[w]);
}
}
}
}
window.onload = googleSearchHighlight;<|fim▁end|>
|
if (tempNodeVal.indexOf(tempWordVal) != -1)
{
pn = node.parentNode;
if (pn.className != "searchword")
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey('auth.User')<|fim▁hole|>
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def published (self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title<|fim▁end|>
|
title = models.CharField(max_length=200)
|
<|file_name|>ReturnTypeHandler.spec.ts<|end_file_name|><|fim▁begin|>import 'reflect-metadata';
import { NoReturnValueHandlerFoundError } from '../src/errors/NoReturnValueHandlerFoundError';
import { ReturnTypeHandler } from '../src/ReturnTypeHandler';
import { ReturnType } from '../src/routes/ReturnType';
describe('ReturnTypeHandler', () => {
describe('constructor', () => {
it('should register all given return types', () => {
const types: ReturnType<any>[] = [
{
type: 'default',
getHeaders: () => ({}),
getStatus: () => 0,
getValue: () => '',
},
{
type: 'String',
getHeaders: () => ({}),
getStatus: () => 0,
getValue: () => '',
},
];
const handler = new ReturnTypeHandler(types);
expect(Object.keys((handler as any).returnTypes)).toEqual(expect.arrayContaining(['default', 'String']));
});
it('should overwrite a duplicate return type', () => {
const rt1 = {
type: 'default',<|fim▁hole|> };
const rt2 = {
type: 'default',
getHeaders: () => ({}),
getStatus: () => 0,
getValue: () => '',
};
const types: ReturnType<any>[] = [rt1, rt2];
const handler = new ReturnTypeHandler(types);
expect((handler as any).returnTypes['default']).toBe(rt2);
});
});
describe('handleValue()', () => {
let response: any;
beforeEach(() => {
response = {
status: jest.fn().mockReturnThis(),
set: jest.fn().mockReturnThis(),
send: jest.fn().mockReturnThis(),
end: jest.fn().mockReturnThis(),
} as any;
});
it('should select the default handler if no handler is registered', () => {
const type = {
type: 'default',
getHeaders: jest.fn(),
getStatus: jest.fn(),
getValue: jest.fn(),
};
const handler = new ReturnTypeHandler([type]);
handler.handleValue('foobar', response);
expect(type.getHeaders.mock.calls.length).toBe(1);
});
it('should handle a value with a correct handler', () => {
const type1 = {
type: 'default',
getHeaders: jest.fn(),
getStatus: jest.fn(),
getValue: jest.fn(),
};
const type2 = {
type: 'Number',
getHeaders: jest.fn(),
getStatus: jest.fn(),
getValue: jest.fn(),
};
const handler = new ReturnTypeHandler([type1, type2]);
handler.handleValue(1, response);
expect(type1.getHeaders.mock.calls.length).toBe(0);
expect(type2.getHeaders.mock.calls.length).toBe(1);
});
it('should throw an error if no handler is registered', () => {
const handler = new ReturnTypeHandler([]);
const fn = () => {
handler.handleValue('', response);
};
expect(fn).toThrow(NoReturnValueHandlerFoundError);
});
it('should not call send if the value is not set', () => {
const type = {
type: 'default',
getHeaders: jest.fn(),
getStatus: jest.fn(),
getValue: jest.fn(),
};
const handler = new ReturnTypeHandler([type]);
handler.handleValue(null, response);
expect(response.status.mock.calls.length).toBe(1);
expect(response.set.mock.calls.length).toBe(1);
expect(response.send.mock.calls.length).toBe(0);
});
it('should select the default handler if the value is not set', () => {
const type = {
type: 'default',
getHeaders: jest.fn(),
getStatus: jest.fn(),
getValue: jest.fn(),
};
const handler = new ReturnTypeHandler([type]);
handler.handleValue(null, response);
expect(type.getHeaders.mock.calls.length).toBe(1);
expect(type.getStatus.mock.calls.length).toBe(1);
expect(type.getValue.mock.calls.length).toBe(0);
});
});
});<|fim▁end|>
|
getHeaders: () => ({}),
getStatus: () => 0,
getValue: () => '',
|
<|file_name|>xistream.hpp<|end_file_name|><|fim▁begin|>// vim: bs=2:ts=4:sw=4:tw=80:noexpandtab
#ifndef FST_XISTREAM_HPP
#define FST_XISTREAM_HPP
#include "ex.hpp"
#include <string>
#include <istream>
namespace fst
{
// class that extend/modify istream methods/behaviors
// catch ios_base::failure exceptions and throws io_ex
class xistream
{
public:
// ctor
// del - if true, delete is on dtor
xistream(std::istream *is, bool del = true);
// dtor
virtual ~xistream();
// read up to n bytes
xistream &read(char *buf, std::streamsize n);
// return a reference to istream<|fim▁hole|> void set_is(std::istream *is, bool del = true);
// extration operator
template <typename T>
xistream &operator>>(T &val);
// use >> to extract
// check if it was extracted up to EOF
template <typename T>
xistream &full_extract(T &val);
// use std::getline()
void getline(std::string &s);
std::istream *operator->() const;
protected:
std::istream *_is;
bool _del;
private:
// release current istream
void release_is();
xistream(const xistream &);
xistream operator=(const xistream &);
};
/// templates and inline functions ///
template <typename T>
xistream &xistream::operator>>(T &val)
{
try
{
*_is >> val;
return *this;
}
catch (const std::ios_base::failure &e)
{
throw EX(io_ex, e.what());
}
}
template <typename T>
xistream &xistream::full_extract(T &val)
{
*this >> val;
if (!_is->eof())
throw EX(ex, "full_extract() failed");
return *this;
}
inline std::istream &xistream::get_istream() const
{
return *_is;
}
inline std::istream *xistream::operator->() const
{
return _is;
}
} // fst
#endif<|fim▁end|>
|
std::istream &get_istream() const;
// set another istream
|
<|file_name|>module.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
let fs = require("fs")
, chalk = require("chalk");
module.exports = function(name) {
let file = process.env.CONFIG_PATH + "initializers/" + name;
if (!fs.existsSync(file + ".js")) console.log(chalk.red("\tInitializer", name + ".js not found, add it on /config/initializers"));
return require(file);
};<|fim▁end|>
|
"use strict";
|
<|file_name|>sensor_base.py<|end_file_name|><|fim▁begin|>"""Support for the Philips Hue sensors as a platform."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from aiohue import AiohueException, Unauthorized
from aiohue.v1.sensors import TYPE_ZLL_PRESENCE
import async_timeout
from homeassistant.components.sensor import SensorStateClass
from homeassistant.core import callback
from homeassistant.helpers import debounce, entity
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from ..const import REQUEST_REFRESH_DELAY
from .helpers import remove_devices
from .hue_event import EVENT_CONFIG_MAP
from .sensor_device import GenericHueDevice
SENSOR_CONFIG_MAP: dict[str, Any] = {}
LOGGER = logging.getLogger(__name__)
def _device_id(aiohue_sensor):
# Work out the shared device ID, as described below
device_id = aiohue_sensor.uniqueid
if device_id and len(device_id) > 23:
device_id = device_id[:23]
return device_id
class SensorManager:
"""Class that handles registering and updating Hue sensor entities.
Intended to be a singleton.
"""
SCAN_INTERVAL = timedelta(seconds=5)
def __init__(self, bridge):
"""Initialize the sensor manager."""
self.bridge = bridge
self._component_add_entities = {}
self.current = {}
self.current_events = {}
self._enabled_platforms = ("binary_sensor", "sensor")
self.coordinator = DataUpdateCoordinator(
bridge.hass,
LOGGER,
name="sensor",
update_method=self.async_update_data,
update_interval=self.SCAN_INTERVAL,
request_refresh_debouncer=debounce.Debouncer(
bridge.hass, LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
)
async def async_update_data(self):
"""Update sensor data."""
try:
async with async_timeout.timeout(4):
return await self.bridge.async_request_call(
self.bridge.api.sensors.update
)
except Unauthorized as err:
await self.bridge.handle_unauthorized_error()
raise UpdateFailed("Unauthorized") from err
except AiohueException as err:
raise UpdateFailed(f"Hue error: {err}") from err
async def async_register_component(self, platform, async_add_entities):
"""Register async_add_entities methods for components."""
self._component_add_entities[platform] = async_add_entities
if len(self._component_add_entities) < len(self._enabled_platforms):
LOGGER.debug("Aborting start with %s, waiting for the rest", platform)
return
# We have all components available, start the updating.
self.bridge.reset_jobs.append(
self.coordinator.async_add_listener(self.async_update_items)
)
await self.coordinator.async_refresh()
@callback
def async_update_items(self):
"""Update sensors from the bridge."""
api = self.bridge.api.sensors
if len(self._component_add_entities) < len(self._enabled_platforms):
return
to_add = {}
primary_sensor_devices = {}
current = self.current
# Physical Hue motion sensors present as three sensors in the API: a
# presence sensor, a temperature sensor, and a light level sensor. Of
# these, only the presence sensor is assigned the user-friendly name
# that the user has given to the device. Each of these sensors is
# linked by a common device_id, which is the first twenty-three
# characters of the unique id (then followed by a hyphen and an ID
# specific to the individual sensor).
#
# To set up neat values, and assign the sensor entities to the same
# device, we first, iterate over all the sensors and find the Hue
# presence sensors, then iterate over all the remaining sensors -
# finding the remaining ones that may or may not be related to the
# presence sensors.
for item_id in api:
if api[item_id].type != TYPE_ZLL_PRESENCE:
continue
primary_sensor_devices[_device_id(api[item_id])] = api[item_id]
# Iterate again now we have all the presence sensors, and add the
# related sensors with nice names where appropriate.
for item_id in api:
uniqueid = api[item_id].uniqueid
if current.get(uniqueid, self.current_events.get(uniqueid)) is not None:
continue
sensor_type = api[item_id].type
# Check for event generator devices
event_config = EVENT_CONFIG_MAP.get(sensor_type)
if event_config is not None:
base_name = api[item_id].name
name = event_config["name_format"].format(base_name)
new_event = event_config["class"](api[item_id], name, self.bridge)
self.bridge.hass.async_create_task(
new_event.async_update_device_registry()
)
self.current_events[uniqueid] = new_event
sensor_config = SENSOR_CONFIG_MAP.get(sensor_type)
if sensor_config is None:
continue
base_name = api[item_id].name<|fim▁hole|> name = sensor_config["name_format"].format(base_name)
current[uniqueid] = sensor_config["class"](
api[item_id], name, self.bridge, primary_sensor=primary_sensor
)
to_add.setdefault(sensor_config["platform"], []).append(current[uniqueid])
self.bridge.hass.async_create_task(
remove_devices(
self.bridge,
[value.uniqueid for value in api.values()],
current,
)
)
for platform, value in to_add.items():
self._component_add_entities[platform](value)
class GenericHueSensor(GenericHueDevice, entity.Entity):
"""Representation of a Hue sensor."""
should_poll = False
@property
def available(self):
"""Return if sensor is available."""
return self.bridge.sensor_manager.coordinator.last_update_success and (
self.allow_unreachable
# remotes like Hue Tap (ZGPSwitchSensor) have no _reachability_
or self.sensor.config.get("reachable", True)
)
@property
def state_class(self):
"""Return the state class of this entity, from STATE_CLASSES, if any."""
return SensorStateClass.MEASUREMENT
async def async_added_to_hass(self):
"""When entity is added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.bridge.sensor_manager.coordinator.async_add_listener(
self.async_write_ha_state
)
)
async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.bridge.sensor_manager.coordinator.async_request_refresh()
class GenericZLLSensor(GenericHueSensor):
"""Representation of a Hue-brand, physical sensor."""
@property
def extra_state_attributes(self):
"""Return the device state attributes."""
return {"battery_level": self.sensor.battery}<|fim▁end|>
|
primary_sensor = primary_sensor_devices.get(_device_id(api[item_id]))
if primary_sensor is not None:
base_name = primary_sensor.name
|
<|file_name|>Family.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package HW09_101044044;
/**
* CSE241_HW09 Samet Sait Talayhan 101044044
*/
/**
* @author talayhan
*
*/
public class Family {
// Data fields
private Person father = null;
private Person mother = null;
private Person [] children;
/** The current size of the children array */
private int size = 0;
/** The current size of the array */
private final static int INITIAL_CAPACITY = 10;
/** T */
private int capacity = INITIAL_CAPACITY;
public static int numberOfFamilies = 0;
/**
* No parameter constructor. */
public Family(){
++numberOfFamilies; //increase family number
children = new Person [INITIAL_CAPACITY];
}
/**
* Two paramater constructor. */
public Family(Person father, Person mother){
if(father.getGender().equals(mother.getGender())){
throw new IllegalArgumentException("Father and Mother should have different" +
" gender!");
}
this.father = father;
this.mother = mother;
children = new Person [INITIAL_CAPACITY];
++numberOfFamilies;
}
/**
* Allocate a new array to hold */
private void reallocate(){
capacity = capacity * 2;
Person[] newArray = new Person[capacity];
System.arraycopy(children, 0, newArray, 0, children.length);
children = newArray;
}
/**
* Method at() returns the child at the given index.
* @param int index
* @return Person at the given index. */
public Person at(int index){
// Validty check
if(index < 0 || index >= size){
throw new IndexOutOfBoundsException("Invalid index!");
}
return children[index];
}
/**
* Method add() that adds the Person as a child
* to the family.
* @param Person as a child. */
public void add(Person child){
if (size >= capacity) {
reallocate();
}
children[size] = child;
++size;
}
/*
* Method compareTo() comparing two families, and
* returns true if two families are equal.
* @return boolean
<|fim▁hole|> * @param Family object
* **/
public boolean compareTo(Family other){
if(this.hashCode() == other.hashCode()){
return true;
}
return false;
}
/**
* Override method toString()
* @return String
* */
public String toString(){
String s = "\t\tFamily Informations\n";
s += "Father: \n" + father.toString();
s += "Mother: \n" + mother.toString();
for (int i = 0; i < size; i++) {
s += "Child" + i + ":\n" + children[i].toString();
}
return s;
}
/**
* Method isRelative() returns true if one of the persons is a
* relative of the other by equals sirname the family array list.
* @param get two person objects and family objects array
* @return boolean type, true or false.
* */
public static boolean isRelative(Person one, Person two, Family[] families){
if(one.getLastName() == two.getLastName())
return true;
for (int i = 0; i < families.length; i++) {
String comp = families[i].father.getLastName();
if(comp == one.getLastName() ||
comp == two.getLastName()){
return true;
}
}
return false;
}
//Gettters and setters methods.
public Person getFather() {
return father;
}
public void setFather(Person father) {
this.father = father;
}
public Person getMother() {
return mother;
}
public void setMother(Person mother) {
this.mother = mother;
}
public Person[] getChildren() {
return children;
}
public void setChildren(Person[] children) {
this.children = children;
}
public static int getNumberOfFamilies() {
return numberOfFamilies;
}
// Actually, Java take cares of all clean up resources,
// But we should decrease number of families in destructor,
protected void finalize(){
--numberOfFamilies;
}
}<|fim▁end|>
| |
<|file_name|>window_size.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicIsize;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::task::Poll;
use super::atomic_box_option::AtomicBoxOption;
use super::waiters::*;
use crate::debug_undebug::DebugUndebug;
use futures::future;
use futures::task::Context;
use std::fmt;
#[derive(Debug)]
struct ConnOutWindowShared {
window_size: AtomicIsize,
closed: AtomicBool,
}
struct StreamWindowShared {
conn: Arc<ConnOutWindowShared>,
task: AtomicBoxOption<std::task::Waker>,
closed: AtomicBool,<|fim▁hole|>}
impl fmt::Debug for StreamWindowShared {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let StreamWindowShared {
conn,
task,
closed,
window_size,
} = self;
f.debug_struct("StreamWindowShared")
.field("conn", conn)
.field("task", &DebugUndebug(task))
.field("closed", closed)
.field("window_size", window_size)
.finish()
}
}
pub struct ConnOutWindowSender {
waker: Waker,
shared: Arc<ConnOutWindowShared>,
}
impl Drop for ConnOutWindowSender {
fn drop(&mut self) {
self.shared.closed.store(true, Ordering::SeqCst);
self.waker.wake_all();
}
}
struct ConnOutWindowReceiver {
shared: Arc<ConnOutWindowShared>,
}
pub struct StreamOutWindowSender {
shared: Arc<StreamWindowShared>,
}
impl Drop for StreamOutWindowSender {
fn drop(&mut self) {
self.shared.closed.store(true, Ordering::SeqCst);
if let Some(task) = self.shared.task.swap_null(Ordering::SeqCst) {
task.wake();
}
}
}
pub struct StreamOutWindowReceiver {
conn_waiter: Waiter,
shared: Arc<StreamWindowShared>,
}
impl fmt::Debug for StreamOutWindowReceiver {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("StreamOutWindowReceiver")
.field("conn_waiter", &"...")
.field("shared", &self.shared)
.finish()
}
}
impl ConnOutWindowSender {
pub fn new(size: u32) -> ConnOutWindowSender {
ConnOutWindowSender {
waker: Waker::new(),
shared: Arc::new(ConnOutWindowShared {
window_size: AtomicIsize::new(size as isize),
closed: AtomicBool::new(false),
}),
}
}
pub fn new_stream(&self, initial: u32) -> (StreamOutWindowSender, StreamOutWindowReceiver) {
let shared = Arc::new(StreamWindowShared {
conn: self.shared.clone(),
window_size: AtomicIsize::new(initial as isize),
task: AtomicBoxOption::new(),
closed: AtomicBool::new(false),
});
let sender = StreamOutWindowSender {
shared: shared.clone(),
};
let receiver = StreamOutWindowReceiver {
conn_waiter: self.waker.new_waiter(),
shared: shared,
};
(sender, receiver)
}
pub fn get(&self) -> isize {
self.shared.window_size.load(Ordering::SeqCst) as isize
}
pub fn increase(&self, size: usize) {
assert!(size <= isize::max_value() as usize);
let old_size = self
.shared
.window_size
.fetch_add(size as isize, Ordering::SeqCst);
let new_size = old_size + size as isize;
if new_size > 0 {
self.waker.wake_all();
}
}
}
impl StreamOutWindowSender {
/// `size` can be negative when INITIAL_WINDOW_SIZE
/// setting changes to lower value.
pub fn increase(&self, size: isize) {
let old_size = self
.shared
.window_size
.fetch_add(size as isize, Ordering::SeqCst);
let new_size = old_size + size as isize;
if new_size > 0 {
if let Some(task) = self.shared.task.swap_null(Ordering::SeqCst) {
task.wake();
}
}
}
pub fn get(&self) -> isize {
self.shared.window_size.load(Ordering::SeqCst) as isize
}
}
struct ConnDead;
#[derive(Eq, PartialEq, Debug)]
pub(crate) enum StreamDead {
Stream,
Conn,
}
impl From<ConnDead> for StreamDead {
fn from(_: ConnDead) -> StreamDead {
StreamDead::Conn
}
}
impl StreamOutWindowReceiver {
pub fn decrease(&self, size: usize) {
self.shared
.conn
.window_size
.fetch_sub(size as isize, Ordering::SeqCst);
self.shared
.window_size
.fetch_sub(size as isize, Ordering::SeqCst);
}
fn check_conn_closed(&self) -> crate::Result<()> {
if self.shared.conn.closed.load(Ordering::Relaxed) {
// TODO: better error
Err(crate::Error::ClientControllerDied)
} else {
Ok(())
}
}
fn check_stream_closed(&self) -> crate::Result<()> {
self.check_conn_closed()?;
if self.shared.closed.load(Ordering::SeqCst) {
// TODO: better error
Err(crate::Error::ClientControllerDied)
} else {
Ok(())
}
}
fn poll_conn(&self, cx: &mut Context<'_>) -> Poll<crate::Result<()>> {
self.check_conn_closed()?;
if self.shared.conn.window_size.load(Ordering::SeqCst) > 0 {
return Poll::Ready(Ok(()));
}
self.conn_waiter.park(cx);
self.check_conn_closed()?;
if self.shared.conn.window_size.load(Ordering::SeqCst) > 0 {
Poll::Ready(Ok(()))
} else {
Poll::Pending
}
}
pub fn poll(&self, cx: &mut Context<'_>) -> Poll<crate::Result<()>> {
self.check_stream_closed()?;
if self.shared.window_size.load(Ordering::SeqCst) <= 0 {
self.shared
.task
.store_box(Box::new(cx.waker().clone()), Ordering::SeqCst);
self.check_stream_closed()?;
if self.shared.window_size.load(Ordering::SeqCst) <= 0 {
return Poll::Pending;
}
}
self.poll_conn(cx).map_err(|e| e.into())
}
pub async fn poll_f(&self) -> crate::Result<()> {
future::poll_fn(|cx| self.poll(cx)).await
}
}<|fim▁end|>
|
window_size: AtomicIsize,
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate curl;
extern crate tokio_core;<|fim▁hole|>
use std::io::{self, Write, BufWriter};
use curl::easy::Easy;
use futures::future::*;
use std::fs::File;
use futures_cpupool::CpuPool;
use std::sync::{Mutex, Arc};
use futures::{Future, Stream, Async};
use futures::stream::futures_unordered;
use tokio_core::reactor::Core;
use tokio_curl::{Session, Perform};
fn make_file(x: i32, data: &mut Vec<u8>) -> usize {
let f = File::create(format!("./data/{}.txt", x)).expect("Unable to open file");
let mut writer = BufWriter::new(&f);
writer.write_all(data.as_mut_slice()).unwrap();
data.len()
}
fn collect_request(x: i32, url: &str, sess: &Session) -> FutureResult<Perform, ()> {
let mut data = Vec::new();
let mut easy = Easy::new();
easy.get(true).unwrap();
easy.url("https://www.rust-lang.org").unwrap();
easy.write_function(|data| Ok(data.len())).unwrap();
make_file(x, &mut data);
ok(sess.perform(easy))
}
fn main() {
let url = "https://en.wikipedia.org/wiki/Immanuel_Kant";
let mut core = Core::new().unwrap();
let handle = core.handle();
let pool = CpuPool::new_num_cpus();
let session = Session::new(handle);
let requests = (0..20).into_iter().map(|x| {
pool.spawn(collect_request(x, url, &session))
});
let performed = futures_unordered(requests).into_future();
}
// let out = requests.into_stream().wait();<|fim▁end|>
|
extern crate tokio_curl;
extern crate fibers;
extern crate futures;
extern crate futures_cpupool;
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# nala - file/directory watcher
# Copyright (C) 2013 Eugenio "g7" Paolantonio <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from distutils.core import setup
setup(name='nala',
version='0.0.1',
description='file/directory watcher libraries',
author='Eugenio Paolantonio',<|fim▁hole|> ],
requires=['gi.repository.GLib', 'gi.repository.GObject', 'gi.repository.Gio']
)<|fim▁end|>
|
author_email='[email protected]',
url='https://github.com/g7/python-nala',
packages=[
"nala"
|
<|file_name|>loyalty-outlined.js<|end_file_name|><|fim▁begin|>import { h } from 'omi';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(h(h.f, null, h("path", {
d: "M21.41 11.58l-9-9C12.05 2.22 11.55 2 11 2H4c-1.1 0-2 .9-2 2v7c0 .55.22 1.05.59 1.42l9 9c.36.36.86.58 1.41.58s1.05-.22 1.41-.59l7-7c.37-.36.59-.86.59-1.41s-.23-1.06-.59-1.42zM13 20.01L4 11V4h7v-.01l9 9-7 7.02z"<|fim▁hole|> r: "1.5"
}), h("path", {
d: "M8.9 12.55c0 .57.23 1.07.6 1.45l3.5 3.5 3.5-3.5c.37-.37.6-.89.6-1.45 0-1.13-.92-2.05-2.05-2.05-.57 0-1.08.23-1.45.6l-.6.6-.6-.59c-.37-.38-.89-.61-1.45-.61-1.13 0-2.05.92-2.05 2.05z"
})), 'LoyaltyOutlined');<|fim▁end|>
|
}), h("circle", {
cx: "6.5",
cy: "6.5",
|
<|file_name|>utils_libguestfs.py<|end_file_name|><|fim▁begin|>"""
libguestfs tools test utility functions.
"""
import logging
from autotest.client import os_dep, utils
from autotest.client.shared import error
import propcan
class LibguestfsCmdError(Exception):
"""
Error of libguestfs-tool command.
"""
def __init__(self, details=''):
self.details = details
Exception.__init__(self)
def __str__(self):
return str(self.details)
def lgf_cmd_check(cmd):
"""
To check whether the cmd is supported on this host.
@param cmd: the cmd to use a libguest tool.
@return: None if the cmd is not exist, otherwise return its path.
"""
libguestfs_cmds = ['libguestfs_test_tool', 'guestfish', 'guestmount',
'virt-alignment-scan', 'virt-cat', 'virt-copy-in',
'virt-copy-out', 'virt-df', 'virt-edit',
'virt-filesystems', 'virt-format', 'virt-inspector',
'virt-list-filesystems', 'virt-list-partitions',
'virt-ls', 'virt-make-fs', 'virt-rescue',
'virt-resize', 'virt-sparsify', 'virt-sysprep',
'virt-tar', 'virt-tar-in', 'virt-tar-out',
'virt-win-reg']
if not (cmd in libguestfs_cmds):
raise LibguestfsCmdError("Command %s is not supported by libguestfs yet." % cmd)
try:
return os_dep.command(cmd)
except ValueError:
logging.warning("You have not installed %s on this host.", cmd)
return None
def lgf_command(cmd, **dargs):
"""
Interface of libguestfs tools' commands.
@param cmd: Command line to execute.
@param dargs: standardized command keywords.
@return: CmdResult object.
@raise: LibguestfsCmdError if non-zero exit status
and ignore_status=False
"""
ignore_status = dargs.get('ignore_status', True)
debug = dargs.get('debug', False)
timeout = dargs.get('timeout', 60)
if debug:
logging.debug("Running command %s in debug mode.", cmd)
# Raise exception if ignore_status == False
try:
ret = utils.run(cmd, ignore_status=ignore_status,
verbose=debug, timeout=timeout)
except error.CmdError, detail:
raise LibguestfsCmdError(detail)
if debug:
logging.debug("status: %s", ret.exit_status)
logging.debug("stdout: %s", ret.stdout.strip())
logging.debug("stderr: %s", ret.stderr.strip())
# Return CmdResult instance when ignore_status is True
return ret
class LibguestfsBase(propcan.PropCanBase):
"""
Base class of libguestfs tools.
"""
__slots__ = ('ignore_status', 'debug', 'timeout')<|fim▁hole|> def __init__(self, *args, **dargs):
init_dict = dict(*args, **dargs)
init_dict['ignore_status'] = init_dict.get('ignore_status', True)
init_dict['debug'] = init_dict.get('debug', False)
init_dict['timeout'] = init_dict.get('timeout', 60)
super(LibguestfsBase, self).__init__(init_dict)
def set_ignore_status(self, ignore_status):
"""
Enforce setting ignore_status as a boolean.
"""
if bool(ignore_status):
self.dict_set('ignore_status', True)
else:
self.dict_set('ignore_status', False)
def set_debug(self, debug):
"""
Accessor method for 'debug' property that logs message on change
"""
if not self.INITIALIZED:
self.dict_set('debug', debug)
else:
current_setting = self.dict_get('debug')
desired_setting = bool(debug)
if not current_setting and desired_setting:
self.dict_set('debug', True)
logging.debug("Libguestfs debugging enabled")
# current and desired could both be True
if current_setting and not desired_setting:
self.dict_set('debug', False)
logging.debug("Libguestfs debugging disabled")
def libguest_test_tool_cmd(qemuarg=None, qemudirarg=None,
timeoutarg=None, **dargs):
"""
Execute libguest-test-tool command.
@param qemuarg: the qemu option
@param qemudirarg: the qemudir option
@param timeoutarg: the timeout option
@return: a CmdResult object
@raise: raise LibguestfsCmdError
"""
cmd = "libguest-test-tool"
if qemuarg is not None:
cmd += " --qemu '%s'" % qemuarg
if qemudirarg is not None:
cmd += " --qemudir '%s'" % qemudirarg
if timeoutarg is not None:
cmd += " --timeout %s" % timeoutarg
# Allow to raise LibguestfsCmdError if ignore_status is False.
return lgf_command(cmd, **dargs)
def virt_edit_cmd(disk_or_domain, file_path, options=None,
extra=None, expr=None, **dargs):
"""
Execute virt-edit command to check whether it is ok.
Since virt-edit will need uses' interact, maintain and return
a session if there is no raise after command has been executed.
@param disk_or_domain: a img path or a domain name.
@param file_path: the file need to be edited in img file.
@param options: the options of virt-edit.
@param extra: additional suffix of command.
@return: a session of executing virt-edit command.
"""
# disk_or_domain and file_path are necessary parameters.
cmd = "virt-edit '%s' '%s'" % (disk_or_domain, file_path)
if options is not None:
cmd += " %s" % options
if extra is not None:
cmd += " %s" % extra
if expr is not None:
cmd += " -e '%s'" % expr
return lgf_command(cmd, **dargs)<|fim▁end|>
| |
<|file_name|>DocumentsViewController.js<|end_file_name|><|fim▁begin|>Ext.define('Hrproject.view.clouddrive.documentview.DocumentsViewController',{
extend : 'Ext.app.ViewController',
requires:['Hrproject.view.fw.PDF.panel.PDF'],
alias : 'controller.documentview',
clouddrive : null,
drivelist : null,
driveDetails : null,
driveDetailsController : null,
frame : null,
downloadFileform : null,
selectedFileId:null,
selectedFileNode:null,
baseFileId:null,
moveToTagId:null,
fileUploadWindow:null,
init : function()
{
this.clouddrive = this.getView().up().up();
/*create form and iframe used to download file*/
var body = Ext.getBody();
this.frame = body.createChild({
tag : 'iframe',
cls : 'x-hidden',
id : 'hiddenform-iframe' + this.getView().id,
name : 'iframe' + this.getView().id
});
this.downloadFileform = body.createChild({
tag : 'form',
cls : 'x-hidden',
id : 'hiddenform-form' + this.getView().id,
method : 'post',
ContentType : 'application/json;application/xml',
action : "",
target : 'iframe' + this.getView().id
});
},
initObject : function()
{
this.driveDetails = this.clouddrive.down("#drivedetails");
this.driveDetailsController = this.driveDetails.controller;
this.drivelist = this.driveDetails.down("#drivelist");
this.manageRevGridStore=this.getManageRevGridStore();
},
loadData : function(panel, eopts)
{
this.initObject();
},
/*
* this method call when user call on folder/file if user
* click on folder then folder will open if user clock on
* file the file will download
*/
itemdblclick : function(dataview, record)
{
if (record.data.hierarchy.length > 0) {
this.driveDetailsController.selectNode(record.data.id, true)
} else {
this.downloadFiles(record.data.ftId);
}
},
/** this method is used to download file*/
downloadFiles : function(fileId)
{
this.downloadFileform.dom.action = "secure/cloudDriveController/downloadFilePost?fileId="
+ fileId, this.downloadFileform.dom.submit();
},
/*
* this method call on right click of any file/folder
* this will show menu which contains - preview,download
*/
itemcontextmenu : function(dataview, record, item, index,e, eOpts)
{
debugger;
e.stopEvent();
this.selectedFileId = record.data.ftId;
this.selectedFileNode =record;
var menu = this.getRightClickMenu(record.data.hierarchy);
menu.showAt(e.getXY());
},
getRightClickMenu : function(hierarchy)
{
var nodesHieraObj = this.driveDetailsController.nodesHieraObj;
if (nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -1&& hierarchy.length > 0) {
return;
}
return Ext.create('Ext.menu.Menu',{
items : [
hierarchy.length > 0 ? "": Ext.create('Ext.Action',
{
text : 'Preview',
icon : 'images/cloud/previewDoc.png',
listeners:{
click:'onPreviewClick',
scope:this
}
},this),
hierarchy.length > 0 ? "": Ext.create('Ext.Action',
{
text : 'Download',
icon : 'images/cloud/download.png',
listeners:{
click:'onDownLoadClick',
scope:this
}
}),
hierarchy.length > 0 ? "": '-',
nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -2
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Move To',
icon : 'images/cloud/moveTo.png',
listeners:{
click:'onMoveToClick',
scope:this
}
})
: "",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -2
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Share',
icon : 'images/cloud/ic_share.png',
listeners:{
click:'onShareClick',
scope:this
}
})
: "",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -2
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Rename',
icon : 'images/cloud/rename.png',
listeners:{
click:'onRenameClick',
scope:this
}
})
: "",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -2
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Add Star',
disabled:true,
icon : 'images/cloud/addStar.png',
handler : function() {
}
})
: "",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId != -1
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Manage revisions',
icon : 'images/cloud/mr.png',
listeners:{
click:'onMakeRevisionClick',
scope:this
}
})
: "",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId != -1
&& hierarchy.length == 0 ? Ext.create('Ext.Action',
{
text : 'Make a Copy',
icon : 'images/cloud/make-copy.png',
listeners:{
click:'onMakeCopyClick',
scope:this
}
}):"",
nodesHieraObj[nodesHieraObj.length - 1].data.ftId == -1 ? "": Ext.create('Ext.Action',
{
text : 'Remove',
icon : 'images/trash.png',
listeners:{
click:'deleteFunc1',
scope:this
}
}) ]
},this);
},//getRightClickMenu ends
onPreviewClick : function()
{
var appType = this.selectedFileNode.data.applicationType;
if (appType.search("pdf") != -1) {
this.openPDFViewer(this.selectedFileId,this.selectedFileNode.data.text);
} else if (appType.search("image") != -1) {
this.openImageViewer(this.selectedFileId,this.selectedFileNode.data.text);
} else {
this.downloadFiles(this.selectedFileId);
}
},
openPDFViewer:function(fileId, displayName)
{
debugger;;
Ext.create('Ext.window.Window', {
title : displayName,
height : 500,
width : 700,
maximizable : true,
layout : 'fit',
items : [{
xtype :'pdfpanel',
title : '',
width : 589,
height : 433,
pageScale : 0.75, // Initial scaling of the PDF. 1 = 100%
// src : '../buzzor/SpringByExample.pdf', // URL to the PDF -
// Same Domain or Server with CORS Support
src : 'secure/cloudDriveController/downloadFile?fileId=' + fileId
}]
}).show();
},
openImageViewer:function(fileId, displayName)
{
Ext.create('Ext.window.Window', {
title : displayName,
height : 500,
width : 700,
maximizable : true,
layout : 'fit',
items : {
xtype : 'image',
title : '',
width : 589,
height : 433,
pageScale : 0.75, // Initial scaling of the PDF. 1 = 100%
// src : '../buzzor/SpringByExample.pdf', // URL to the PDF -
// Same Domain or Server with CORS Support
src : 'secure/cloudDriveController/downloadFile?fileId=' + fileId
}
}).show();
},
onDownLoadClick:function()
{
this.downloadFiles(this.selectedFileId);
},
deleteFunc1:function()
{
debugger;
// File - 0 // Tag - 1
var isFileTag = -1;
var selId = this.selectedFileNode.data.id;
if (selId.search("f") == 0) {
isFileTag = 0;
} else if (selId.search("t") == 0) {
isFileTag = 1;
}
this.deleteFunc(isFileTag, this.selectedFileId);
},
deleteFunc:function(isFileTag, selectedFileId, rowIndex, grid)
{
Ext.Msg.confirm('Confirmation',"Are you sure you want to delete Folder/File", function(id, value) {
if (id == 'yes') {
/**Check if grid is defined then del function is called from grid or else called from right click menu*/
if(this.grid!=undefined){
this.grid.getStore().removeAt(this.rowIndex);
this.me.deleteOperation(isFileTag, selectedFileId);
}
else{
this.me.deleteOperation(isFileTag, selectedFileId);
}
}
}, {
me : this,
rowIndex:rowIndex,
grid:grid
});
},
deleteOperation:function(isFileTag, selectedFileId)
{
debugger;
Ext.Ajax.request({
url : 'secure/cloudDriveController/deleteTagsFile?fileId='+selectedFileId+"&isFileTag="+isFileTag,
headers : {
'Content-Type' : 'application/json;application/xml'
},
waitMsg : 'Loading...',
method : 'POST',
scope:this,
params : {
fileId : selectedFileId,
isFileTag : isFileTag
},
success : function(response,currentObject) {
debugger;
currentObject.scope.driveDetails.controller.refreshDriveTree();
Ext.Msg.alert('Success',"Files Deleted successfully");
},
failure : function(response) {
Ext.Msg.alert('Error', 'Cannot connect to server');
}
});
},
onMakeCopyClick:function()
{
debugger;
var selectedFileId= this.selectedFileId;
Ext.Ajax.request({
url : 'secure/cloudDriveController/copyFile?fileId='+selectedFileId,
headers : {'Content-Type' : 'application/json;application/xml'},
waitMsg : 'Loading...',
method : 'POST',
scope:this,
params : {
fileId : selectedFileId,
},
success : function(response,currentObject) {
currentObject.scope.driveDetails.controller.refreshDriveTree();
Ext.Msg.alert('Success',"Files Copied successfully");
},
failure : function(response) {
Ext.Msg.alert('Error', 'Cannot connect to server');
}
});
},
onRenameClick:function()
{
Ext.create('Ext.window.Window', {
title :'Rename File',
width:'25%',
maximizable : true,
resizable : true,
layout : 'vbox',
bodyPadding:'8',
items :[{
xtype:'textfield',
fieldLabel:'New Name',
value:this.selectedFileNode.data.text,
itemId:'renameFieldId',
width:'100%',
}],
buttons:[{
text:'Save',
icon:'images/greenFlopy_save.png',
style:'float:right',
listeners:{
click:'onRenameSaveBtnClick',
scope:this,
}
},{
text:'Cancel',
icon:'images/delete_icon.png',
style:'float:right',
handler:function(){
this.up().up().close();
}
}]
}).show();
},
onRenameSaveBtnClick:function(btn)
{
var selectedFileId=this.selectedFileId;
var newFileName=btn.up().up().down('#renameFieldId').getValue();
Ext.Ajax.request({
url : 'secure/cloudDriveController/renameFile?fileId='+selectedFileId+"&newFileName="+newFileName,
headers : {'Content-Type' : 'application/json;application/xml'},
waitMsg : 'Loading...',
method : 'POST',
scope:this,
btn:btn,
params : {
fileId : selectedFileId,
newFileName : newFileName
},
success : function(response,currentObject) {
currentObject.scope.driveDetails.controller.refreshDriveTree();
currentObject.btn.up().up().close();
Ext.Msg.alert('Success',"Files Renamed successfully");
},
failure : function(response) {
Ext.Msg.alert('Error', 'Cannot connect to server');
}
});
},
onMoveToClick:function()
{
Ext.create('Ext.window.Window', {
title :'Choose Destination Folder',
width:'20%',
height:300,
maximizable : true,
resizable : true,
tbar:['->',{
xtype : 'button',
icon : 'images/cloud/newFolder.png',
tooltip:'Add New Folder',
listeners:{
click:'onAddNewFolderClick',
scope:this<|fim▁hole|> itemId : 'folderlist',
rootVisible :false,
useArrows: true,
listeners : {
load : 'loadMoveToFolderList',
itemClick:'onClickFolderList',
scope:this
}
}],
buttons:[{
text:'Move',
icon:'images/cloud/moveRight.png',
style:'float:right',
listeners:{
click:'onMoveBtnClick',
scope:this,
}
},{
text:'Cancel',
icon:'images/delete_icon.png',
style:'float:right',
handler:function(){
this.up().up().close();
}
}]
}).show();
},
loadMoveToFolderList:function(treepanel)
{
debugger;
var data=this.driveDetailsController.drivelistTreeData;
var rootNode = treepanel.getRootNode();
rootNode.removeAll();
for (var x1 = 0; x1 < data.length; x1++) {
this.addChild(rootNode, data[x1]);
}
},
addChild : function(parentNode, node)
{
if (node.hasOwnProperty("children")&& node.children != null)
{
if(node.hierarchy!=3)
{
var child = {
text : node.text,
icon : "images/folder_crisp.png",
image : node.image,
hierarchy : node.hierarchy,
id : node.id,
files : node.files,
displayText : node.displayText,
ftId : node.ftId,
mimeType : node.mimeType,
}
if(node.children.length==0){
child["leaf"]=true;
}
var newNode = parentNode.appendChild(child);
}
for (var x = 0; x < node.children.length; x++) {
this.addChild(newNode, node.children[x]);
}
} else {
node["leaf"]=true;
parentNode.appendChild(node);
}
},
onClickFolderList : function(view, record, item, index, e)
{
debugger;
this.driveDetailsController.moveToSelectedNode=record.data;
this.moveToTagId=record.data.ftId
},
onMoveBtnClick:function(btn)
{
debugger;
var selectedFileId=this.selectedFileId;
var tagId=this.moveToTagId;
/*grid=btn.up().up().down('#folderListGrid');
var tagId=grid.getSelectionModel().getSelection()[0].data.tagId;*/
Ext.Ajax.request({
url : 'secure/cloudDriveController/moveFile?fileId='+selectedFileId+"&tagId="+tagId,
headers : {'Content-Type' : 'application/json;application/xml'},
waitMsg : 'Loading...',
method : 'POST',
scope:this,
btn:btn,
params : {
fileId : selectedFileId,
tagId : tagId
},
success : function(response,currentObject) {
debugger;
currentObject.scope.driveDetails.controller.refreshDriveTree();
currentObject.btn.up().up().close();
Ext.Msg.alert('Success',"Files Moved successfully");
},
failure : function(response) {
Ext.Msg.alert('Error', 'Cannot connect to server');
}
});
},
onAddNewFolderClick:function(btn)
{
debugger;
this.driveDetailsController.createFolderFlag="gridView";
this.driveDetailsController.moveToTreePanel=btn.up().up().down("#folderlist");
this.driveDetailsController.onCreateFolderBtnClick();
},
onShareClick:function()
{
Ext.create('Ext.window.Window', {
title :'Share With',
width:'25%',
maximizable : true,
resizable : true,
layout : 'vbox',
//bodyPadding:'8',
items :[{
xtype:'gridpanel',
itemId:'shareUserGrid',
selType: 'checkboxmodel',
multiSelect : true,
border:true,
store:new Ext.create('Ext.data.Store', {
fields : ['loginId','firstName','userId','lastName'],
data : [],
sorters: ['firstName']
}),
columns : [{
text : "User Name",
sortable : true,
resizable : true,
menuDisabled : true,
dataIndex : 'loginId',
flex : 0.333333333,
},{
text : "First Name",
sortable : true,
resizable : true,
menuDisabled : true,
dataIndex : 'firstName',
flex : 0.333333333,
},{
text : "Last Name",
sortable : true,
resizable : true,
menuDisabled : true,
dataIndex : 'lastName',
flex : 0.333333333,
},{
dataIndex:"userId",
hidden:true,
} ],
title : '',
width : '100%',
//height : 380,
listeners:{
afterrender:'afterShareUserGridRender',
scope:this
}
}],
buttons:[{
text:'Share',
icon:'images/cloud/share.png',
style:'float:right',
listeners:{
click:'onShareBtnClick',
scope:this,
}
},{
text:'Cancel',
icon:'images/delete_icon.png',
style:'float:right',
handler:function(){
this.up().up().close();
}
}]
}).show();
},
afterShareUserGridRender:function(grid)
{
Ext.Ajax.request({
url : "secure/cloudDriveController/getUsers",
method : 'POST',
scope : this,
grid:grid,
waitMsg : 'Loading...',
jsonData : {},
success : function(response, currentObject,options)
{
debugger;
var jsonResponse = Ext.JSON.decode(response.responseText);
var data = jsonResponse.response.data;
var tempData=[];
for(var i=0;i<data.length;i++)
{
var obj={
loginId:data[i].loginId,
firstName:data[i].firstName,
lastName:data[i].lastName,
userId:data[i].userId
}
tempData.push(obj);
}
currentObject.grid.getStore().loadData(tempData);
},
failure : function() {
Ext.Msg.alert('Error','Cannot connect to server');
}
});
},
onShareBtnClick:function(btn)
{
debugger;
var grid=btn.up().up().down("#shareUserGrid");
var jsonData=[];
if (grid.getSelectionModel().hasSelection())
{
var rows = grid.getSelectionModel().getSelection();
for (var x = 0; x < rows.length; x++)
{
var obj={
sharedUserId:rows[x].data.userId,
fileId:this.selectedFileId
}
jsonData.push(obj);
}
//console.log(jsonData);
Ext.Ajax.request({
url : "secure/cloudDriveController/saveFileSharedToUser",
method : 'POST',
headers : {'Content-Type' : 'application/json;application/xml'},
jsonData:jsonData,
btn:btn,
success : function(response, currentObject,options)
{
debugger;
var jsonResponse = Ext.JSON.decode(response.responseText);
if(jsonResponse.response.success==true){
Ext.Msg.alert('Success',"File Shared Successfully");
currentObject.btn.up().up().close();
}
else{
Ext.Msg.alert('Error', 'Sharing of File Failed!');
}
},
failure : function() {
Ext.Msg.alert('Error', 'Cannot connect to server!');
}
});
}
},
onMakeRevisionClick:function()
{
Ext.create('Ext.window.Window', {
title :'Manage Revision',
width:'30%',
height:300,
maximizable : true,
resizable : true,
autoScroll:true,
layout:'fit',
items:[{
xtype:'form',
layout : 'vbox',
items :[{
xtype : 'filefield',
margin:'5 1 5 322',
msgTarget : 'side',
allowBlank : false,
buttonOnly : true,
name : 'uploadFile',
buttonConfig : {
text:'Upload New Revision',
icon : 'images/cloud/uploadRevision.png',
tooltip:'Upload New Revision',
},
listeners:{
change:'onUpldNewRev',
scope:this
}
},
{
xtype:'gridpanel',
width:'100%',
border:true,
margin:'2',
selType: 'cellmodel',
scope:this, //Pass current controller scope
store:this.manageRevGridStore,
columns : [
{
xtype : 'actioncolumn',
menuDisabled : true,
text : 'Delete',
align: 'center',
flex:0.15,
items : [ {
//icon : 'images/delete.gif',/**No need to give icon or tooltip if they are mentioned in getClass & if hidden-class is returned then icon attr req*/
tooltip : 'Delete Record',
getClass: function(v, metadata, r, rowIndex, colIndex, store) {
if(rowIndex==0) {
return "icon-disable"; //return "x-hidden-display";
}else{
return "icon-enable";
}
},
handler : function(grid, rowIndex, colIndex){
if(rowIndex!=0){
rec=grid.getStore().getAt(rowIndex);
var me=this.up().up().scope;
me.deleteFunc(0, rec.data.fileId,rowIndex,grid);
}else{
Ext.Msg.alert('Info',"Cannot delete latest version of the file");
}
}
}]
},{
text:'Current Revision Date',
dataIndex:'createdDate',
flex:0.3
},{
text:'Current Revision By',
dataIndex:'updatedBy',
flex:0.3
},{
text:'Storage Used',
dataIndex:'fileSize',
flex:0.2
}],
listeners:{
afterrender:'afterManageRevGridRender',
cellclick :'revGridCellClick',
scope:this
}
}],
}],
buttons:[{
text:'Cancel',
icon:'images/delete_icon.png',
style:'float:right',
handler:function(){
this.up().up().close();
}
}]
}).show();
},
getManageRevGridStore:function()
{
return new Ext.create('Ext.data.Store', {
fields : ['baseFileId','fileId','updatedBy','createdDate','fileSize'],
data : []
});
},
afterManageRevGridRender:function(grid)
{
var selectedFileId=this.selectedFileId;
Ext.Ajax.request({
url : "secure/cloudDriveController/getFileRevision?fileId="+selectedFileId,
method : 'POST',
scope : this,
waitMsg : 'Loading...',
jsonData : {},
/*params:{
fileId:selectedFileId
},*/
success : function(response, params,options)
{ debugger;
var responseJson = Ext.JSON.decode(response.responseText).response;
if (responseJson.success == 'true')
{
var data = responseJson.data;
params.scope.baseFileId=data[0].baseFileId;
params.scope.manageRevGridStore.loadData(data);
} else {
Ext.Msg.alert('Error',"loading of manage revision grid failed");
}
},
failure : function() {
Ext.Msg.alert('Error','Cannot connect to server');
}
});
},
onUpldNewRev:function(fileupload)
{
debugger;
this.fileUploadWindow=fileupload.up().up();
var entMask = new Ext.LoadMask({
msg : 'Uploading...',
target : fileupload.up().up()
}).show();
var form = fileupload.up('form').getForm();
if (form.isValid()) {
form.submit({url : 'secure/cloudDriveController/uploadRevisionFile?parentId='
+ this.driveDetailsController.selNode.ftId
+ "&parentHierachy="
+ this.driveDetailsController.selNode.hierarchy
+ "&oldFileId="
+ this.selectedFileId
+ "&baseFileId="
+ this.baseFileId,
scope : this,
entMask:entMask,
success : function(fp, currentObject) {
debugger;
currentObject.entMask.hide();
currentObject.scope.driveDetails.controller.refreshDriveTree();
currentObject.scope.afterManageRevGridRender();
currentObject.scope.fileUploadWindow.close();
Ext.Msg.alert('Success',"Files With New Revision Uploaded successfully");
}
});
}
},
revGridCellClick:function(iView, iCellEl,iColIdx, iStore, iRowEl, iRowIdx,iEvent)
{
/** If 2nd Column's cell is clicked then Download File*/
if (iColIdx == 1) {
this.downloadFiles(iStore.data.fileId);
}
}
});<|fim▁end|>
|
}
}],
items :[{
xtype : 'treepanel',
|
<|file_name|>bar-chart.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { BarChartComponent } from './bar-chart.component';
@NgModule({
imports: [
CommonModule,
FormsModule,<|fim▁hole|> ],
declarations: [
BarChartComponent
],
providers: [
],
exports : [
BarChartComponent
]
})
export class BarChartModule {}<|fim▁end|>
| |
<|file_name|>prefix_test.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import api_select
prefixed_jobs = """
serial flow: [
job: 'top_quick1'
serial flow: [
job: 'top_x_quick2-1'
]
serial flow: [
job: 'top_x_quick2-2'
]
serial flow: [
job: 'top_x_quick2-3'
]
job: 'top_quick3'
parallel flow: (
serial flow: [
job: 'top_y_z_quick4a'
]
serial flow: [
job: 'quick4b'
]
job: 'top_y_quick5'
)
]
"""
def test_prefix(api_type, capsys):
with api_select.api(__file__, api_type) as api:
def job(name):
api.job(name, exec_time=0.5, max_fails=0, expect_invocations=0, expect_order=None, params=None)
api.flow_job()
job('quick1')
index = 0
for index in 1, 2, 3:
job('x_quick2-' + str(index))
job('quick3')
job('y_z_quick4')
job('y_quick5')
with serial(api, timeout=70, report_interval=3, job_name_prefix='top_', just_dump=True) as ctrl1:
ctrl1.invoke('quick1')
for index in 1, 2, 3:
with ctrl1.serial(timeout=20, report_interval=3, job_name_prefix='x_') as ctrl2:
ctrl2.invoke('quick2-' + str(index))
ctrl1.invoke('quick3')
with ctrl1.parallel(timeout=40, report_interval=3, job_name_prefix='y_') as ctrl2:
with ctrl2.serial(timeout=40, report_interval=3, job_name_prefix='z_') as ctrl3a:
ctrl3a.invoke('quick4a')
# Reset prefix
with ctrl2.serial(timeout=40, report_interval=3, job_name_prefix=None) as ctrl3b:
ctrl3b.invoke('quick4b')
ctrl2.invoke('quick5')
sout, _ = capsys.readouterr()<|fim▁hole|><|fim▁end|>
|
assert prefixed_jobs.strip() in sout
|
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
{
'name': "BestJa: Application Moderation",
'summary': "Two stage recruitment process",
'description': """
BestJa Application Moderation
=============================
Split recruitment process into two separate stages.
The first ("preliminary") stage is handled by offer moderators.
The second stage is handled by the recruiting organization itself.""",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'depends': [<|fim▁hole|> 'bestja_offers',
'bestja_offers_moderation',
],
'data': [
'views/offer.xml',
'menu.xml',
'messages.xml',
'security/security.xml',
],
}<|fim▁end|>
|
'base',
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>import collections
from django import forms
from django.forms.util import ErrorDict
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from amo import helpers
from applications.models import AppVersion
sort_by = (
('', _lazy(u'Keyword Match')),
('updated', _lazy(u'Updated', 'advanced_search_form_updated')),
('newest', _lazy(u'Created', 'advanced_search_form_newest')),
('weeklydownloads', _lazy(u'Downloads')),
('users', _lazy(u'Users')),
('averagerating', _lazy(u'Rating', 'advanced_search_form_rating')),
)
collection_sort_by = (
('weekly', _lazy(u'Most popular this week')),
('monthly', _lazy(u'Most popular this month')),
('all', _lazy(u'Most popular all time')),
('rating', _lazy(u'Highest Rated')),
('newest', _lazy(u'Newest')),
)
per_page = (20, 50, )
tuplize = lambda x: divmod(int(x * 10), 10)
# These releases were so minor that we don't want to search for them.
skip_versions = collections.defaultdict(list)
skip_versions[amo.FIREFOX] = [tuplize(v) for v in amo.FIREFOX.exclude_versions]
min_version = collections.defaultdict(lambda: (0, 0))
min_version.update({<|fim▁hole|> amo.SUNBIRD: tuplize(amo.SUNBIRD.min_display_version),
})
def get_app_versions(app):
appversions = AppVersion.objects.filter(application=app.id)
min_ver, skip = min_version[app], skip_versions[app]
versions = [(a.major, a.minor1) for a in appversions]
strings = ['%s.%s' % v for v in sorted(set(versions), reverse=True)
if v >= min_ver and v not in skip]
return [('any', _('Any'))] + zip(strings, strings)
# Fake categories to slip some add-on types into the search groups.
_Cat = collections.namedtuple('Cat', 'id name weight type')
def get_search_groups(app):
sub = []
types_ = [t for t in (amo.ADDON_DICT, amo.ADDON_SEARCH, amo.ADDON_THEME)
if t in app.types]
for type_ in types_:
sub.append(_Cat(0, amo.ADDON_TYPES[type_], 0, type_))
sub.extend(helpers.sidebar(app)[0])
sub = [('%s,%s' % (a.type, a.id), a.name) for a in
sorted(sub, key=lambda x: (x.weight, x.name))]
top_level = [('all', _('all add-ons')),
('collections', _('all collections')), ]
if amo.ADDON_PERSONA in app.types:
top_level += (('personas', _('all personas')),)
return top_level[:1] + sub + top_level[1:], top_level
SEARCH_CHOICES = (
('all', _lazy('search for add-ons')),
('collections', _lazy('search for collections')),
('personas', _lazy('search for personas')),
('apps', _lazy('search for apps')))
class SimpleSearchForm(forms.Form):
"""Powers the search box on every page."""
q = forms.CharField(required=False)
cat = forms.CharField(required=False, widget=forms.HiddenInput)
appver = forms.CharField(required=False, widget=forms.HiddenInput)
platform = forms.CharField(required=False, widget=forms.HiddenInput)
choices = dict(SEARCH_CHOICES)
def clean_cat(self):
self.data = dict(self.data.items())
return self.data.setdefault('cat', 'all')
def placeholder(self):
val = self.clean_cat()
return self.choices.get(val, self.choices['all'])
def SearchForm(request):
current_app = request.APP or amo.FIREFOX
search_groups, top_level = get_search_groups(current_app)
class _SearchForm(SimpleSearchForm):
cat = forms.ChoiceField(choices=search_groups, required=False)
# This gets replaced by a <select> with js.
lver = forms.ChoiceField(
label=_(u'{0} Version').format(unicode(current_app.pretty)),
choices=get_app_versions(current_app), required=False)
appver = forms.CharField(required=False)
atype = forms.TypedChoiceField(label=_('Type'),
choices=[(t, amo.ADDON_TYPE[t]) for t in amo.ADDON_SEARCH_TYPES],
required=False, coerce=int, empty_value=amo.ADDON_ANY)
pid = forms.TypedChoiceField(label=_('Platform'),
choices=[(p[0], p[1].name) for p in amo.PLATFORMS.iteritems()
if p[1] != amo.PLATFORM_ANY], required=False,
coerce=int, empty_value=amo.PLATFORM_ANY.id)
platform = forms.ChoiceField(required=False,
choices=[[p.shortname, p.id] for p in amo.PLATFORMS.values()])
sort = forms.ChoiceField(label=_('Sort By'), choices=sort_by,
required=False)
pp = forms.TypedChoiceField(label=_('Per Page'),
choices=zip(per_page, per_page), required=False, coerce=int,
empty_value=per_page[0])
advanced = forms.BooleanField(widget=forms.HiddenInput, required=False)
tag = forms.CharField(widget=forms.HiddenInput, required=False)
page = forms.IntegerField(widget=forms.HiddenInput, required=False)
# Attach these to the form for usage in the template.
top_level_cat = dict(top_level)
def clean_platform(self):
p = self.cleaned_data.get('platform')
choices = dict(self.fields['platform'].choices)
return choices.get(p)
# TODO(jbalogh): when we start using this form for zamboni search, it
# should check that the appid and lver match up using app_versions.
def clean(self):
d = self.cleaned_data
raw = self.data
# Set some defaults
if not d.get('appid'):
d['appid'] = request.APP.id
# Since not all categories are listed in this form, we use the raw
# data.
if 'cat' in raw:
if ',' in raw['cat']:
try:
d['atype'], d['cat'] = map(int, raw['cat'].split(','))
except ValueError:
d['cat'] = None
elif raw['cat'] == 'all':
d['cat'] = None
if 'page' not in d or not d['page'] or d['page'] < 1:
d['page'] = 1
return d
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
Does not remove cleaned_data if there are errors.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data
# has changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
d = request.GET.copy()
return _SearchForm(d)
class SecondarySearchForm(forms.Form):
q = forms.CharField(widget=forms.HiddenInput, required=False)
cat = forms.CharField(widget=forms.HiddenInput)
pp = forms.CharField(widget=forms.HiddenInput, required=False)
sortby = forms.ChoiceField(label=_lazy(u'Sort By'),
choices=collection_sort_by,
initial='weekly', required=False)
page = forms.IntegerField(widget=forms.HiddenInput, required=False)
def clean_pp(self):
d = self.cleaned_data['pp']
try:
return int(d)
except:
return per_page[0]
def clean(self):
d = self.cleaned_data
if not d.get('pp'):
d['pp'] = per_page[0]
return d
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
Does not remove cleaned_data if there are errors.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data
# has changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
SORT_CHOICES = (
(None, _lazy(u'Relevance')),
('users', _lazy(u'Most Users')),
('rating', _lazy(u'Top Rated')),
('created', _lazy(u'Newest')),
# --
('name', _lazy(u'Name')),
('downloads', _lazy(u'Weekly Downloads')),
('updated', _lazy(u'Recently Updated')),
('hotness', _lazy(u'Up & Coming')),
)
APP_SORT_CHOICES = (
(None, _lazy(u'Relevance')),
('downloads', _lazy(u'Weekly Downloads')),
('rating', _lazy(u'Top Rated')),
('created', _lazy(u'Newest')),
# --
('name', _lazy(u'Name')),
('hotness', _lazy(u'Up & Coming')),
)
class ESSearchForm(forms.Form):
q = forms.CharField(required=False)
tag = forms.CharField(required=False)
platform = forms.ChoiceField(required=False,
choices=[(p.shortname, p.id) for p in amo.PLATFORMS.values()])
appver = forms.CharField(required=False)
atype = forms.TypedChoiceField(required=False, coerce=int,
choices=[(t, amo.ADDON_TYPE[t]) for t in amo.ADDON_SEARCH_TYPES])
cat = forms.CharField(required=False)
sort = forms.ChoiceField(required=False, choices=SORT_CHOICES)
def __init__(self, *args, **kw):
addon_type = kw.pop('type', None)
super(ESSearchForm, self).__init__(*args, **kw)
if addon_type == amo.ADDON_WEBAPP:
self.fields['sort'].choices = APP_SORT_CHOICES
def clean_appver(self):
appver = self.cleaned_data.get('appver')
if appver:
major = appver.split('.')[0]
if major.isdigit():
appver = major + '.0'
return appver
def clean_sort(self):
sort = self.cleaned_data.get('sort')
return sort if sort in dict(SORT_CHOICES) else None
def clean_cat(self):
cat = self.cleaned_data.get('cat')
if ',' in cat:
try:
self.cleaned_data['atype'], cat = map(int, cat.split(','))
except ValueError:
return None
else:
try:
return int(cat)
except ValueError:
return None
def full_clean(self):
"""
Cleans self.data and populates self._errors and self.cleaned_data.
Does not remove cleaned_data if there are errors.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data
# has changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()<|fim▁end|>
|
amo.FIREFOX: tuplize(amo.FIREFOX.min_display_version),
amo.THUNDERBIRD: tuplize(amo.THUNDERBIRD.min_display_version),
amo.SEAMONKEY: tuplize(amo.SEAMONKEY.min_display_version),
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.