prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>SignalCellular2BarTwoTone.js<|end_file_name|><|fim▁begin|>import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
<|fim▁hole|>, 'SignalCellular2BarTwoTone');<|fim▁end|> | export default createSvgIcon(
<React.Fragment><path fill="none" d="M0 0h24v24H0V0z" /><g><path fillOpacity=".3" d="M2 22h20V2L2 22z" /><path d="M14 10L2 22h12V10z" /></g></React.Fragment> |
<|file_name|>vm-to-zones.test.js<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (c) 2015, Joyent, Inc.
*/
var test = require('./test-namer')('vm-to-zones');
var util = require('util');
var bunyan = require('bunyan');
var utils = require('../../lib/utils');
var buildZonesFromVm = require('../../lib/vm-to-zones');
var log = bunyan.createLogger({name: 'cns'});
test('basic single container', function (t) {
var config = {
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']), ['abc123.inst.def432']);
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
var fwd = zones['foo']['abc123.inst.def432'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['abc123.inst.def432.foo']}
]);
t.end();
});
test('cloudapi instance', function (t) {
var config = {
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
services: [ { name: 'cloudapi', ports: [] } ],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'admin'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']), [
'abc123.inst.def432', 'cloudapi.svc.def432', 'cloudapi']);
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
var fwd = zones['foo']['cloudapi'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4'], src: 'abc123'},
{constructor: 'TXT', args: ['abc123'], src: 'abc123'}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['abc123.inst.def432.foo']}
]);
t.end();
});
test('with use_alias', function (t) {
var config = {
use_alias: true,
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']),
['abc123.inst.def432', 'test.inst.def432']);
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
var fwd = zones['foo']['test.inst.def432'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.inst.def432.foo']}
]);
t.end();
});
test('with use_login', function (t) {
var config = {
use_login: true,
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},<|fim▁hole|> {
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']),
['abc123.inst.def432', 'abc123.inst.bar']);
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
var fwd = zones['foo']['abc123.inst.bar'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['abc123.inst.bar.foo']}
]);
t.end();
});
test('with use_alias and use_login', function (t) {
var config = {
use_alias: true,
use_login: true,
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']),
['abc123.inst.def432', 'abc123.inst.bar', 'test.inst.def432',
'test.inst.bar']);
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
var fwd = zones['foo']['test.inst.bar'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.inst.bar.foo']}
]);
t.end();
});
test('using a PTR name', function (t) {
var config = {
use_alias: true,
use_login: true,
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
ptrname: 'test.something.com',
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.something.com']}
]);
t.end();
});
test('multi-zone', function (t) {
var config = {
use_alias: true,
use_login: true,
forward_zones: {
'foo': {},
'bar': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
},
{
ip: '3.2.1.4',
zones: ['bar']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones).sort(),
['1.2.3.in-addr.arpa', '3.2.1.in-addr.arpa', 'bar', 'foo']);
t.deepEqual(Object.keys(zones['foo']).sort(),
['abc123.inst.bar', 'abc123.inst.def432', 'test.inst.bar',
'test.inst.def432']);
t.deepEqual(Object.keys(zones['bar']).sort(),
Object.keys(zones['foo']).sort());
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
t.deepEqual(Object.keys(zones['1.2.3.in-addr.arpa']), ['4']);
var fwd = zones['foo']['test.inst.bar'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.inst.bar.foo']}
]);
var rev2 = zones['1.2.3.in-addr.arpa']['4'];
t.deepEqual(rev2, [
{constructor: 'PTR', args: ['test.inst.bar.bar']}
]);
t.end();
});
test('multi-zone, single PTRs', function (t) {
var config = {
use_alias: true,
use_login: true,
forward_zones: {
'foo': {},
'bar': {},
'baz': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo', 'bar']
},
{
ip: '3.2.1.4',
zones: ['baz']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones).sort(),
['1.2.3.in-addr.arpa', '3.2.1.in-addr.arpa', 'bar', 'baz', 'foo']);
t.deepEqual(Object.keys(zones['foo']).sort(),
['abc123.inst.bar', 'abc123.inst.def432', 'test.inst.bar',
'test.inst.def432']);
t.deepEqual(Object.keys(zones['bar']).sort(),
Object.keys(zones['foo']).sort());
t.deepEqual(Object.keys(zones['3.2.1.in-addr.arpa']), ['4']);
t.deepEqual(Object.keys(zones['1.2.3.in-addr.arpa']), ['4']);
var fwd = zones['foo']['test.inst.bar'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.inst.bar.foo']}
]);
var rev2 = zones['1.2.3.in-addr.arpa']['4'];
t.deepEqual(rev2, [
{constructor: 'PTR', args: ['test.inst.bar.baz']}
]);
t.end();
});
test('multi-zone, shortest zone priority PTR', function (t) {
var config = {
use_alias: true,
use_login: true,
forward_zones: {
'foobarbaz': {},
'foobar': {},
'baz': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432',
login: 'bar'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foobar', 'foobarbaz', 'baz']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
var rev = zones['3.2.1.in-addr.arpa']['4'];
t.deepEqual(rev, [
{constructor: 'PTR', args: ['test.inst.bar.baz']}
]);
t.end();
});
test('service with srvs', function (t) {
var config = {
use_alias: true,
forward_zones: {
'foo': {}
},
reverse_zones: {}
};
var vm = {
uuid: 'abc123',
alias: 'test',
services: [
{ name: 'svc1', ports: [1234, 1235] }
],
listInstance: true,
listServices: true,
owner: {
uuid: 'def432'
},
nics: [
{
ip: '1.2.3.4',
zones: ['foo']
}
]
};
var zones = buildZonesFromVm(vm, config, log);
t.deepEqual(Object.keys(zones), ['foo', '3.2.1.in-addr.arpa']);
t.deepEqual(Object.keys(zones['foo']),
['abc123.inst.def432', 'test.inst.def432', 'svc1.svc.def432']);
var fwd = zones['foo']['test.inst.def432'];
t.deepEqual(fwd, [
{constructor: 'A', args: ['1.2.3.4']},
{constructor: 'TXT', args: ['abc123']}
]);
var svc = zones['foo']['svc1.svc.def432'];
t.deepEqual(svc, [
{constructor: 'A', args: ['1.2.3.4'], src: 'abc123'},
{constructor: 'TXT', args: ['abc123'], src: 'abc123'},
{constructor: 'SRV', args: ['test.inst.def432.foo', 1234],
src: 'abc123'},
{constructor: 'SRV', args: ['test.inst.def432.foo', 1235],
src: 'abc123'}
]);
t.end();
});<|fim▁end|> | nics: [ |
<|file_name|>miscdefs.py<|end_file_name|><|fim▁begin|># # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## #
#~ This file is part of NZBmegasearch by pillone.
#~
#~ NZBmegasearch is free software: you can redistribute it and/or modify
#~ it under the terms of the GNU General Public License as published by
#~ the Free Software Foundation, either version 3 of the License, or
#~ (at your option) any later version.
#~
#~ NZBmegasearch is distributed in the hope that it will be useful,
#~ but WITHOUT ANY WARRANTY; without even the implied warranty of
#~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#~ GNU General Public License for more details.
#~
#~ You should have received a copy of the GNU General Public License
#~ along with NZBmegasearch. If not, see <http://www.gnu.org/licenses/>.
# # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## # ## #
import requests
import sys
import base64
import DeepsearchModule
from functools import wraps
from flask import Response,request
import config_settings
from flask import render_template
import os
import subprocess
import datetime
import time
import logging
import SearchModule
import urlparse
import urllib
import datetime
import json
from operator import itemgetter
#~ max visualized
LOG_MAXLINES = 500
log = logging.getLogger(__name__)
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def logviewer(logsdir):
filename=logsdir+'nzbmegasearch.log'
array1 = []
count = 0
for line in reversed(open(filename).readlines()):
if(count > LOG_MAXLINES):
break
array1.append(line.decode('utf-8').rstrip())
count = count + 1
return(render_template('loginfo.html', loginfo =array1 ))
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def daemonize(logsdir):
#~ full credits to SICKBEARD
# Make a non-session-leader child process
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError, e:
raise RuntimeError("1st fork failed: %s [%d]" % (e.strerror, e.errno))
os.setsid() # @UndefinedVariable - only available in UNIX
# Make sure I can read my own files and shut out others
prev = os.umask(0)
os.umask(prev and int('077', 8))
# Make the child a session-leader by detaching from the terminal
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError, e:
raise RuntimeError("2nd fork failed: %s [%d]" % (e.strerror, e.errno))
dev_null = file('/dev/null', 'r')
os.dup2(dev_null.fileno(), sys.stdin.fileno())
log.info("Daemonized using PID " + str(pid))
#~ LEGACY DAEMON LOGGING
#~ silences console output
#~ sys.stdout = open('tmpdl', 'wt')
#~ logging.basicConfig(
#~ level=logging.DEBUG,
#~ format='%(asctime)s:%(levelname)s:%(name)s:%(message)s',
#~ filename=logsdir+'nzbmegasearch_daemon.log',
#~ filemode='a')
#~ stdout_logger = logging.getLogger('STDOUT')
#~ sl = StreamToLogger(stdout_logger, logging.INFO)
#~ sys.stdout = sl
#~ stderr_logger = logging.getLogger('STDERR')
#~ sl = StreamToLogger(stderr_logger, logging.ERROR)
#~ sys.stderr = sl
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
self.linebuf = ''
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def connectinfo():
return render_template('connectinfo.html')
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
class Auth:
def __init__(self, cfgsetsp):
#~ another instance to not use ptrs
self.cfgsets = config_settings.CfgSettings()
def check_auth(self, username, password, mode):
if(mode == 0):
if(username == self.cfgsets.cgen['general_usr'] and password == self.cfgsets.cgen['general_pwd']):
return True
if(mode == 1):
if(len(self.cfgsets.cgen['config_user']) != 0):
if(username == self.cfgsets.cgen['config_user'] and password == self.cfgsets.cgen['config_pwd']):
return True
else:
if(username == self.cfgsets.cgen['general_usr'] and password == self.cfgsets.cgen['general_pwd']):
return True
return False
def authenticate(self):
"""Sends a 401 response that enables basic auth"""
retres = Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
return retres
def requires_auth(self, f):
@wraps(f)
def decorated(*args, **kwargs):
self.cfgsets.refresh()
if(len(self.cfgsets.cgen['general_usr']) != 0):
auth = request.authorization
if not auth or not self.check_auth(auth.username, auth.password,0):
sret = self.authenticate()
return sret
return f(*args, **kwargs)
else:
return f(*args, **kwargs)
return f(*args, **kwargs)
return decorated
def requires_conf(self, f):
@wraps(f)
def decorated(*args, **kwargs):
if(len(self.cfgsets.cgen['config_user']) != 0 or len(self.cfgsets.cgen['general_usr']) != 0):
auth = request.authorization
if not auth or not self.check_auth(auth.username, auth.password,1):
return self.authenticate()
return f(*args, **kwargs)
else:
return f(*args, **kwargs)
return f(*args, **kwargs) <|fim▁hole|>
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
class DownloadedStats:
#~ megatransfer
def __init__(self):
import urlparse
cfgsets = config_settings.CfgSettings()
self.cgen = cfgsets.cgen
self.logsdir = SearchModule.resource_path('logs/nzbmegasearch.log')
self.scriptsdir = SearchModule.resource_path('get_stats.sh')
self.cfg_urlidx = []
self.excludeurls= ['http://ftdworld.net', 'https://nzbx.co']
if(cfgsets.cfg is not None):
self.config = cfgsets.cfg
for i in xrange(len(self.config)):
if(self.config[i]['builtin'] == 0):
self.cfg_urlidx.append(i)
def get_generalstats(self,args):
log.info('Stats general have been requested')
savedurl = []
errstr = "WRONG KEY"
if('key' not in args):
return errstr
else:
if(args['key'] != self.cgen['stats_key']):
return errstr
daytochk = datetime.datetime.now().strftime("%Y-%m-%d")
if('d' in args):
daytochk=args['d']
subprocess.call([self.scriptsdir + ' '+self.logsdir + ' ' + daytochk ], shell=True, executable="/bin/bash")
stat_info = {}
with open("/tmp/logstats_gen") as infile:
for line in infile:
value = line.split()
#~ print value
#~ print line
if(value[0] not in stat_info):
stat_info[value[0]] = []
stat_info[value[0]].append( float(value[1]) )
#~ print stat_info
stat_info_curated = []
uidx = 0
for key in stat_info.keys():
meant = float(sum(stat_info[key]))/len(stat_info[key]) if len(stat_info[key]) > 0 else float('nan')
mediant = sorted(stat_info[key])[len(stat_info[key])/2]
stat_info_curated_t = {}
stat_info_curated_t['succ_call'] = len(stat_info[key])
stat_info_curated_t['name'] = key
stat_info_curated_t['mean'] = meant
stat_info_curated_t['median'] = mediant
stat_info_curated_t['min'] = min(stat_info[key])
stat_info_curated_t['max'] = max(stat_info[key])
stat_info_curated.append(stat_info_curated_t)
uidx += 1
stat_info_curated = sorted(stat_info_curated, key=itemgetter('median'))
return render_template('stats_gen.html',stat_cur=stat_info_curated)
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
class ChkServer:
def __init__(self, cgen):
self.cgen = cgen
self.agent_headers = { 'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1' }
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def check(self, args):
ret = 0
if(('hostname' in args) and ('type' in args)):
# Perform the search using every module
global globalResults
if 'loadedModules' not in globals():
SearchModule.loadSearchModules()
#~ specials
if(args['type'] == 'OMG'):
ret = 1
cfg_tmp = {'valid': 1,
'type': 'OMG',
'speed_class': 2,
'extra_class': 0,
'login': args['user'],
'pwd': args['pwd'],
'timeout': self.cgen['timeout_class'][2],
'builtin': 1}
for module in SearchModule.loadedModules:
if( module.typesrch == 'OMG'):
module.search('Ubuntu', cfg_tmp)
print cfg_tmp['retcode']
if(cfg_tmp['retcode'][0] != 200):
ret = 0
#~ server based API
if(args['type'] == 'NAB'):
ret = 1
cfg_tmp = {'url': args['hostname'],
'type': 'NAB',
'api': args['api'],
'speed_class': 2,
'extra_class': 0,
'valid': 1,
'timeout': self.cgen['timeout_class'][2],
'builtin': 0 }
for module in SearchModule.loadedModules:
if( module.typesrch == 'NAB'):
module.search('Ubuntu', cfg_tmp)
print cfg_tmp['retcode']
if(cfg_tmp['retcode'][0] != 200):
ret = 0
#~ server based WEB
if(args['type'] == 'DSN' or args['type'] == 'DS_GNG'):
cfg_deep_tmp = [{'url': args['hostname'],
'user':args['user'],
'pwd': args['pwd'],
'type': args['type'],
'speed_class': 2,
'extra_class': 0,
'valid': 1,
}]
ds_tmp = DeepsearchModule.DeepSearch(cfg_deep_tmp, self.cgen)
ret_bool = ds_tmp.ds[0].search('Ubuntu')
if(ret_bool):
ret = 1
else:
ret = 0
return ret
#~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
class ChkVersion:
def __init__(self, debugflag=False):
self.dirconf= os.getenv('OPENSHIFT_DATA_DIR', '')
self.dirconf_local = os.path.dirname(os.path.realpath(__file__))+'/'
if getattr(sys, 'frozen', False):
self.dirconf_local = os.path.dirname(sys.executable)+'/'
self.ver_notify = { 'chk':-1,
'curver': -1,
'os':-1}
self.chk_update_ts = 0
self.chk_update_refreshrate = 3600 * 4
if(debugflag == False):
self.chk_update()
def chk_update(self):
dt1 = (datetime.datetime.now() - datetime.datetime.fromtimestamp(self.chk_update_ts))
dl = (dt1.days+1) * dt1.seconds
if(dl > self.chk_update_refreshrate):
if (sys.platform.startswith('linux') and len(self.dirconf)==0):
self.ver_notify['os'] = 'linux'
else:
self.ver_notify['os'] = 'other'
if (len(self.dirconf)):
self.ver_notify['os'] = 'openshift'
print '>> Checking for updates...'
self.chk_local_ver()
self.ver_notify['chk'] = self.chk_repos_ver()
self.chk_update_ts = time.time()
def chk_local_ver(self):
verify_str = '80801102808011028080110280801102'
usedir = self.dirconf_local
if (len(self.dirconf)):
usedir = self.dirconf
with open(usedir+'vernum.num') as f:
content = f.readlines()
vals = content[0].split(' ')
if(vals[0] == verify_str):
self.ver_notify['curver'] = float(vals[1])
def autoupdate(self):
#~ linux only, sorry win users
if (sys.platform.startswith('linux') and len(self.dirconf)==0):
#~ print 'MISCDEFS: THIS LINE HAS TO BE REMOVED BEFORE DEPLOYMENT'
mssg = '>> Running autoupdate on Linux platform'
print mssg
log.info(mssg)
subprocess.call(["git", "fetch"])
subprocess.call(["git", "reset", "--hard", "origin/master"])
pythonscr = sys.executable
os.execl(pythonscr, pythonscr, * sys.argv)
def chk_repos_ver(self):
verify_str = '80801102808011028080110280801102'
url_versioning = 'https://raw.github.com/pillone/usntssearch/master/NZBmegasearch/vernum.num'
#~ print 'MISCDEFS: TO REMOVE LINE IN AUTOUPD BEFORE DEPLOYMENT'
try:
http_result = requests.get(url=url_versioning, verify=False)
#~ print http_result.text
vals = http_result.text.split(' ')
cur_ver = float(vals[1])
if(vals[0] != verify_str):
return -1
if(self.ver_notify['curver'] < cur_ver):
print '>> A newer version is available. User notification on.'
#~ in case of supported platforms this is never executed, but autoupdated
self.autoupdate()
return 1
else:
if(self.ver_notify['curver'] == cur_ver):
print '>> This is the newest version available'
return 0
except Exception as e:
mssg = str(e)
print mssg
log.critical(mssg)
return -1<|fim▁end|> | return decorated |
<|file_name|>gen_parser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna ([email protected])<|fim▁hole|> */
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()<|fim▁end|> | * @param <T> |
<|file_name|>bigrig_tests.js<|end_file_name|><|fim▁begin|>'use strict';
/* global describe, it */
var fs = require('fs');
var expect = require('chai').expect;
var bigrig = require('../');
describe('Big Rig', function () {
it ('throws if no processes are found', function () {
expect(function () {
bigrig.analyze(null);
}).to.throw('Zero processes (tabs) found.');
});
it ('throws if given invalid input data is given', function () {
expect(function () {
bigrig.analyze('wobble');
}).to.throw('Invalid trace contents; not JSON');
});
it ('throws if given a trace with extensions and strict mode is enabled',
function (done) {
fs.readFile('./test/data/load-extensions.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var error = 'Extensions running during capture; ' +
'see http://bit.ly/bigrig-extensions';
expect(function () {
bigrig.analyze(data, {
strict: true
});
}).to.throw(error);
done();
});
});
// TODO(paullewis) Add multiprocess test.
it ('returns JSON for a file with a single process', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData).to.be.an('array');
expect(jsonData[0]).to.be.an('object');
done();
});
});
it ('generates valid JSON', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
jsonData = JSON.parse(JSON.stringify(jsonData));
expect(jsonData).to.be.an('array');
done();
});
});
it ('supports timed ranges', function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0]).to.be.an('object');
expect(jsonData[0].title).to.equal('sideNavAnimation');
expect(jsonData[0].start).to.be.above(0);
expect(jsonData[0].end).to.be.within(1179, 1180);
done();
});
});
it ('correctly applies RAIL type when time range is specified',
function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data, {
types: {
'sideNavAnimation': bigrig.ANIMATION
}
});
expect(jsonData[0].type).to.equal(bigrig.ANIMATION);
done();
});
});
it ('correctly infers RAIL Load when time range not specified',
function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}<|fim▁hole|> expect(jsonData[0].type).to.equal(bigrig.LOAD);
expect(jsonData[0].title).to.equal('Load');
done();
});
});
it ('correctly infers RAIL Response when time range not specified',
function (done) {
fs.readFile('./test/data/response.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].type).to.equal(bigrig.RESPONSE);
expect(jsonData[0].title).to.equal('sideNavResponse');
done();
});
});
it ('correctly infers RAIL Animation when time range not specified',
function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].type).to.equal(bigrig.ANIMATION);
expect(jsonData[0].title).to.equal('sideNavAnimation');
done();
});
});
it ('correctly infers multiple RAIL regions', function (done) {
fs.readFile('./test/data/response-animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData.length).to.equal(2);
expect(jsonData[0].type).to.equal(bigrig.RESPONSE);
expect(jsonData[0].title).to.equal('sideNavResponse');
expect(jsonData[1].type).to.equal(bigrig.ANIMATION);
expect(jsonData[1].title).to.equal('sideNavAnimation');
done();
});
});
it ('returns the correct fps for animations', function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].fps).to.be.within(59, 61);
done();
});
});
it ('returns the correct JS breakdown', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(
jsonData[0].extendedInfo.javaScript['localhost:11080']
).to.be.within(245, 246);
expect(
jsonData[0].extendedInfo.javaScript['www.google-analytics.com']
).to.be.within(59, 60);
done();
});
});
it ('correctly captures forced layouts and recalcs', function (done) {
fs.readFile('./test/data/forced-recalc-layout.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(
jsonData[0].extendedInfo.forcedRecalcs
).to.equal(1);
expect(
jsonData[0].extendedInfo.forcedLayouts
).to.equal(1);
done();
});
});
});<|fim▁end|> |
var jsonData = bigrig.analyze(data); |
<|file_name|>Body.cpp<|end_file_name|><|fim▁begin|>#include "Body.h"
vec_f CalculateCenterOfMass(nz::BodyPart* polygon)
{
switch (polygon->Type)
{
case nz::ShapeType::Polygon:
{
vec_f com;
auto poly = polygon->Polygon;
int count = poly->Count();
for (int i = 0; i < count; ++i)
{
vec_f vertex = poly->At(i);
com.X += vertex.X;
com.Y += vertex.Y;
}
return com / count;
}
case nz::ShapeType::Rectangle:
{
return vec_f(polygon->Rectangle.X / 2, polygon->Rectangle.Y);
}
default: break;
}
return vec_f::Zero();
}
nz::Body::Body() :
Rotation(0),
Velocity(vec_f::Zero()),
CollisionHandler(nullptr),
_position(vec_f::Zero()) {}
nz::Body::Body(vec_f position, number_f rotation) :
Rotation(rotation),
Velocity(vec_f::Zero()),
CollisionHandler(nullptr),
_position(position) {}
nz::Body::Body(vec_f position, number_f rotation, Thunk1<Body*>* const collisionHandler) :
Rotation(rotation),
Velocity(vec_f::Zero()),
CollisionHandler(collisionHandler),
_position(position) {}
nz::Aabb nz::Body::GetBoundingBox() const
{
return _boundingBox;
}
nz::Aabb nz::Body::CalculateBoundingBox() const
{
auto returnable = _boundingBox;
returnable.Min += CalculateWorldTranslation();
returnable.Max += CalculateWorldTranslation();
return returnable;
}
vec_f nz::Body::GetCenterOfMass() const
{
return _centerOfMass;
}
vec_f nz::Body::CalculateWorldTranslation() const
{
return _position + _centerOfMass;
}
void nz::Body::SetWorldTranslation(vec_f translation)
{
_position = translation - _centerOfMass;
}
void nz::Body::AttachBodyPart(std::shared_ptr<BodyPart> part)
{
Part = part;
_centerOfMass = CalculateCenterOfMass(part.get());
// Realign the position.
_position -= _centerOfMass;
Aabb aabb;
switch (Part->Type)
{<|fim▁hole|> case ShapeType::Polygon:
{
auto poly = Part->Polygon;
int count = poly->Count();
for (int i = 0; i < count; ++i)
{
auto calcPos = poly->At(i) + _centerOfMass;
if (calcPos.X < aabb.Min.X)
{
aabb.Min.X = calcPos.X;
}
if (calcPos.Y < aabb.Min.Y)
{
aabb.Min.Y = calcPos.Y;
}
if (calcPos.X > aabb.Max.X)
{
aabb.Max.X = calcPos.X;
}
if (calcPos.Y > aabb.Max.Y)
{
aabb.Max.Y = calcPos.Y;
}
}
break;
}
case ShapeType::Rectangle:
{
auto rect = Part->Rectangle;
aabb.Min = _centerOfMass;
aabb.Max = rect + _centerOfMass;
break;
}
default:
break;
}
_boundingBox = aabb;
}<|fim▁end|> | |
<|file_name|>0002_dispensed.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('stats', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Dispensed',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('total_approved', models.IntegerField(help_text='Number of projects approved in any instance.')),
('total_dispensed', models.IntegerField(help_text='Number of projects that did not go to 2nd round of votes.')),
('dispensed_by_plenary', models.IntegerField(help_text='Those projects dispensed due to `acuerdo del pleno`.')),
('dispensed_by_spokesmen', models.IntegerField(help_text='Those projects dispensed due to `junta de portavoces`.')),
('dispensed_others', models.IntegerField(help_text='All other projects dispensed, and those with no specific reason.')),
],
options={<|fim▁hole|><|fim▁end|> | },
bases=(models.Model,),
),
] |
<|file_name|>create_nested_task.py<|end_file_name|><|fim▁begin|>#!/bin/env python
import libsedml
def create_nested_task(file_name):
doc = libsedml.SedDocument(1, 4)
# create simulation
sim = doc.createSteadyState()
sim.setId("steady1")
# need to set the correct KISAO Term
alg = sim.createAlgorithm()
alg.setKisaoID("KISAO:0000282")
# create model
model = doc.createModel()
model.setId("model1")
model.setLanguage("urn:sedml:language:sbml")
model.setSource("oscli.xml")
# create tasks
task = doc.createTask()
task.setId("task0")
task.setModelReference("model1")
task.setSimulationReference("steady1")
task = doc.createRepeatedTask()
assert(isinstance(task, libsedml.SedRepeatedTask))
task.setId("task1")
task.setResetModel(True)
task.setRangeId("current")
<|fim▁hole|> range = task.createUniformRange()
assert(isinstance(range, libsedml.SedUniformRange))
range.setId("current")
range.setStart(0)
range.setEnd(0)
range.setNumberOfSteps(100)
range.setType("linear")
change = task.createTaskChange()
assert(isinstance(change, libsedml.SedSetValue))
change.setModelReference("model1")
change.setTarget("/sbml:sbml/sbml:model/sbml:listOfParameters/sbml:parameter[@id="J0_v0"]")
change.setRange("current")
change.setMath(libsedml.parseL3Formula("current"))
subtask = task.createSubTask()
subtask.setOrder(1)
subtask.setTask("task0")
# write doc
libsedml.writeSedML(doc, file_name)
if __name__ == "__main__":
create_nested_task('nested_task.xml')<|fim▁end|> | |
<|file_name|>ofp_flow_action_pop_mpls.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #<|fim▁hole|># Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.network.flow.basic.flow_action import (
FlowAction
)
class OFPFlowActionPopMpls(FlowAction):
MPLS_UNICAST = 0x8847
MPLS_MULTICAST = 0x8848
# property key
ETH_TYPE = "eth_type"
def __init__(self, type_, eth_type):
super(OFPFlowActionPopMpls, self).__init__(type_)
self._body[self.ETH_TYPE] = eth_type
@property
def eth_type(self):
return self._body[self.ETH_TYPE]
@classmethod
def create_from_packed(cls, packed):
return cls(packed[cls.TYPE], packed[cls.ETH_TYPE])
def packed_object(self):
return self._body<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0 #
# # |
<|file_name|>fxaa.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate glium;
#[allow(unused_imports)]
use glium::{glutin, Surface};
mod support;
mod fxaa {
use glium::{self, Surface};
use glium::backend::Facade;
use glium::backend::Context;
use glium::framebuffer::SimpleFrameBuffer;
use std::cell::RefCell;
use std::rc::Rc;
pub struct FxaaSystem {
context: Rc<Context>,
vertex_buffer: glium::VertexBuffer<SpriteVertex>,
index_buffer: glium::IndexBuffer<u16>,
program: glium::Program,
target_color: RefCell<Option<glium::texture::Texture2d>>,
target_depth: RefCell<Option<glium::framebuffer::DepthRenderBuffer>>,
}
#[derive(Copy, Clone)]
struct SpriteVertex {
position: [f32; 2],
i_tex_coords: [f32; 2],
}
implement_vertex!(SpriteVertex, position, i_tex_coords);
impl FxaaSystem {
pub fn new<F: ?Sized>(facade: &F) -> FxaaSystem where F: Facade + Clone {
FxaaSystem {
context: facade.get_context().clone(),
vertex_buffer: glium::VertexBuffer::new(facade,
&[
SpriteVertex { position: [-1.0, -1.0], i_tex_coords: [0.0, 0.0] },
SpriteVertex { position: [-1.0, 1.0], i_tex_coords: [0.0, 1.0] },
SpriteVertex { position: [ 1.0, 1.0], i_tex_coords: [1.0, 1.0] },
SpriteVertex { position: [ 1.0, -1.0], i_tex_coords: [1.0, 0.0] }
]
).unwrap(),
index_buffer: glium::index::IndexBuffer::new(facade,
glium::index::PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3]).unwrap(),
program: program!(facade,
100 => {
vertex: r"
#version 100
attribute vec2 position;
attribute vec2 i_tex_coords;
varying vec2 v_tex_coords;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
v_tex_coords = i_tex_coords;
}
",
fragment: r"
#version 100
precision mediump float;
uniform vec2 resolution;
uniform sampler2D tex;
uniform int enabled;
varying vec2 v_tex_coords;
#define FXAA_REDUCE_MIN (1.0/ 128.0)
#define FXAA_REDUCE_MUL (1.0 / 8.0)
#define FXAA_SPAN_MAX 8.0
vec4 fxaa(sampler2D tex, vec2 fragCoord, vec2 resolution,
vec2 v_rgbNW, vec2 v_rgbNE,
vec2 v_rgbSW, vec2 v_rgbSE,
vec2 v_rgbM) {
vec4 color;
mediump vec2 inverseVP = vec2(1.0 / resolution.x, 1.0 / resolution.y);
vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;
vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;
vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;
vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;
vec4 texColor = texture2D(tex, v_rgbM);
vec3 rgbM = texColor.xyz;
vec3 luma = vec3(0.299, 0.587, 0.114);
float lumaNW = dot(rgbNW, luma);
float lumaNE = dot(rgbNE, luma);
float lumaSW = dot(rgbSW, luma);
float lumaSE = dot(rgbSE, luma);
float lumaM = dot(rgbM, luma);
float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));
float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));
mediump vec2 dir;
dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));
dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));
float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *
(0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);
float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);
dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),
max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),
dir * rcpDirMin)) * inverseVP;
vec3 rgbA = 0.5 * (
texture2D(tex, fragCoord * inverseVP + dir * (1.0 / 3.0 - 0.5)).xyz +
texture2D(tex, fragCoord * inverseVP + dir * (2.0 / 3.0 - 0.5)).xyz);
vec3 rgbB = rgbA * 0.5 + 0.25 * (
texture2D(tex, fragCoord * inverseVP + dir * -0.5).xyz +
texture2D(tex, fragCoord * inverseVP + dir * 0.5).xyz);
float lumaB = dot(rgbB, luma);
if ((lumaB < lumaMin) || (lumaB > lumaMax))
color = vec4(rgbA, texColor.a);
else
color = vec4(rgbB, texColor.a);
return color;
}
void main() {
vec2 fragCoord = v_tex_coords * resolution;
vec4 color;
if (enabled != 0) {
vec2 inverseVP = 1.0 / resolution.xy;
mediump vec2 v_rgbNW = (fragCoord + vec2(-1.0, -1.0)) * inverseVP;
mediump vec2 v_rgbNE = (fragCoord + vec2(1.0, -1.0)) * inverseVP;
mediump vec2 v_rgbSW = (fragCoord + vec2(-1.0, 1.0)) * inverseVP;
mediump vec2 v_rgbSE = (fragCoord + vec2(1.0, 1.0)) * inverseVP;
mediump vec2 v_rgbM = vec2(fragCoord * inverseVP);
color = fxaa(tex, fragCoord, resolution, v_rgbNW, v_rgbNE, v_rgbSW,
v_rgbSE, v_rgbM);
} else {
color = texture2D(tex, v_tex_coords);
}
gl_FragColor = color;
}
"
}
).unwrap(),
target_color: RefCell::new(None),
target_depth: RefCell::new(None),
}
}
}
pub fn draw<T, F, R>(system: &FxaaSystem, target: &mut T, enabled: bool, mut draw: F)
-> R where T: Surface, F: FnMut(&mut SimpleFrameBuffer) -> R
{
let target_dimensions = target.get_dimensions();
let mut target_color = system.target_color.borrow_mut();
let mut target_depth = system.target_depth.borrow_mut();
{
let clear = if let &Some(ref tex) = &*target_color {
tex.get_width() != target_dimensions.0 ||
tex.get_height().unwrap() != target_dimensions.1
} else {
false
};
if clear { *target_color = None; }
}
{
let clear = if let &Some(ref tex) = &*target_depth {
tex.get_dimensions() != target_dimensions
} else {
false
};
if clear { *target_depth = None; }
}
if target_color.is_none() {
let texture = glium::texture::Texture2d::empty(&system.context,
target_dimensions.0 as u32,
target_dimensions.1 as u32).unwrap();
*target_color = Some(texture);
}
let target_color = target_color.as_ref().unwrap();
if target_depth.is_none() {
let texture = glium::framebuffer::DepthRenderBuffer::new(&system.context,
glium::texture::DepthFormat::I24,
target_dimensions.0 as u32,
target_dimensions.1 as u32).unwrap();
*target_depth = Some(texture);
}
let target_depth = target_depth.as_ref().unwrap();
let output = draw(&mut SimpleFrameBuffer::with_depth_buffer(&system.context, target_color,
target_depth).unwrap());
let uniforms = uniform! {
tex: &*target_color,
enabled: if enabled { 1i32 } else { 0i32 },
resolution: (target_dimensions.0 as f32, target_dimensions.1 as f32)
};
target.draw(&system.vertex_buffer, &system.index_buffer, &system.program, &uniforms,
&Default::default()).unwrap();
output
}
}
fn main() {
println!("This example demonstrates FXAA. Is is an anti-aliasing technique done at the \
post-processing stage. This example draws the teapot to a framebuffer and then \
copies from the texture to the main framebuffer by applying a filter to it.\n\
You can use the space bar to switch fxaa on and off.");
// building the display, ie. the main object
let event_loop = glutin::event_loop::EventLoop::new();
let wb = glutin::window::WindowBuilder::new();
let cb = glutin::ContextBuilder::new()
.with_depth_buffer(24)
.with_vsync(true);
let display = glium::Display::new(wb, cb, &event_loop).unwrap();
// building the vertex and index buffers
let vertex_buffer = support::load_wavefront(&display, include_bytes!("support/teapot.obj"));
// the program
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform mat4 persp_matrix;
uniform mat4 view_matrix;
in vec3 position;
in vec3 normal;
out vec3 v_position;
out vec3 v_normal;
void main() {
v_position = position;
v_normal = normal;
gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);
}
",
fragment: "
#version 140
in vec3 v_normal;
out vec4 f_color;
const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);
void main() {
float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);
vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);
f_color = vec4(color, 1.0);
}
",
},
110 => {
vertex: "
#version 110
uniform mat4 persp_matrix;
uniform mat4 view_matrix;
attribute vec3 position;
attribute vec3 normal;
varying vec3 v_position;
varying vec3 v_normal;
void main() {
v_position = position;
v_normal = normal;
gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);
}
",
fragment: "
#version 110
varying vec3 v_normal;
const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);
void main() {
float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);
vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);
gl_FragColor = vec4(color, 1.0);
}
",
},
100 => {
vertex: "
#version 100
uniform lowp mat4 persp_matrix;
uniform lowp mat4 view_matrix;
attribute lowp vec3 position;
attribute lowp vec3 normal;
varying lowp vec3 v_position;
varying lowp vec3 v_normal;
void main() {
v_position = position;
v_normal = normal;
gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);
}
",
fragment: "
#version 100
varying lowp vec3 v_normal;
const lowp vec3 LIGHT = vec3(-0.2, 0.8, 0.1);
void main() {
lowp float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);
lowp vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);
gl_FragColor = vec4(color, 1.0);
}
",
},
).unwrap();
//
let mut camera = support::camera::CameraState::new();
let fxaa = fxaa::FxaaSystem::new(&display);
let mut fxaa_enabled = true;
// the main loop
support::start_loop(event_loop, move |events| {
camera.update();
// building the uniforms
let uniforms = uniform! {
persp_matrix: camera.get_perspective(),
view_matrix: camera.get_view(),
};
// draw parameters
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
// drawing a frame<|fim▁hole|> fxaa::draw(&fxaa, &mut target, fxaa_enabled, |target| {
target.clear_color_and_depth((0.0, 0.0, 0.0, 0.0), 1.0);
target.draw(&vertex_buffer,
&glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),
&program, &uniforms, ¶ms).unwrap();
});
target.finish().unwrap();
let mut action = support::Action::Continue;
// polling and handling the events received by the window
for event in events {
match event {
glutin::event::Event::WindowEvent { event, .. } => {
camera.process_input(&event);
match event {
glutin::event::WindowEvent::CloseRequested => action = support::Action::Stop,
glutin::event::WindowEvent::KeyboardInput { input, .. } => match input.state {
glutin::event::ElementState::Pressed => match input.virtual_keycode {
Some(glutin::event::VirtualKeyCode::Escape) => action = support::Action::Stop,
Some(glutin::event::VirtualKeyCode::Space) => {
fxaa_enabled = !fxaa_enabled;
println!("FXAA is now {}", if fxaa_enabled { "enabled" } else { "disabled" });
},
_ => (),
},
_ => (),
},
_ => (),
}
},
_ => (),
}
};
action
});
}<|fim▁end|> | let mut target = display.draw(); |
<|file_name|>prePublishThaliCordovaPlugin.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>var path = require('path');
var fs = require('fs');
var rootDirectory = path.join(__dirname, "../../");
if (path.basename(rootDirectory) != "Thali_CordovaPlugin") {
process.exit(0);
}
var readMeFileName = "readme.md";
var parentReadMe = path.join(__dirname, "../../", readMeFileName);
var localReadMe = path.join(__dirname, "../", readMeFileName);
fs.writeFileSync(localReadMe, fs.readFileSync(parentReadMe));
process.exit(0);<|fim▁end|> | // prePublish gets run on 'npm install' (e.g. even if you aren't actually publishing)
// so we have to check to make sure that we are in our own directory and this isn't
// some poor user trying to install our package. |
<|file_name|>mpi_esm.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from cdo import Cdo
from pycmbs.data import Data
import tempfile as tempfile
import copy
import glob
import os
import sys
import numpy as np
from pycmbs.benchmarking import preprocessor
from pycmbs.benchmarking.utils import get_T63_landseamask, get_temporary_directory
from pycmbs.benchmarking.models.model_basic import *
class JSBACH_BOT(Model):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, **kwargs):
super(JSBACH_BOT, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
self.type = 'JSBACH_BOT'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
@return: string with unique combination of models and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_albedo_data(self, interval='season'):
"""
get albedo data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
filename = self.data_dir + 'data/model1/' + self.experiment + '_echam6_BOT_mm_1979-2006_albedo_yseasmean.nc'
ls_mask = get_T63_landseamask(self.shift_lon)
albedo = Data(filename, v, read=True,
label='MPI-ESM albedo ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return albedo
def get_tree_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_forest_shrub.nc'
v = 'var12'
tree = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')))
return tree
def get_grass_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_grass_crop_pasture_2001.nc'
v = 'var12'
grass = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')), squeeze=True)
return grass
def get_surface_shortwave_radiation_down(self, interval='season'):
"""
get surface shortwave incoming radiation data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
y1 = '1979-01-01'
y2 = '2006-12-31'
rawfilename = self.data_dir + 'data/model/' + self.experiment + '_echam6_BOT_mm_1979-2006_srads.nc'
if not os.path.exists(rawfilename):
return None
#--- read data
cdo = pyCDO(rawfilename, y1, y2)
if interval == 'season':
seasfile = cdo.seasmean()
del cdo
print 'seasfile: ', seasfile
cdo = pyCDO(seasfile, y1, y2)
filename = cdo.yseasmean()
else:
raise ValueError('Invalid interval option %s ' % interval)
#--- read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#--- read SIS data
sis = Data(filename, v, read=True,
label='MPI-ESM SIS ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data)
return sis
def get_rainfall_data(self, interval='season'):
"""
get rainfall data for JSBACH
returns Data object
"""
if interval == 'season':
pass
else:
raise ValueError('Invalid value for interval: %s' % interval)
#/// PREPROCESSING: seasonal means ///
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
filename1 = self.data_dir + self.experiment + '_echam6_BOT_mm_1980_sel.nc'
tmp = pyCDO(filename1, s_start_time, s_stop_time).seldate()
tmp1 = pyCDO(tmp, s_start_time, s_stop_time).seasmean()
filename = pyCDO(tmp1, s_start_time, s_stop_time).yseasmean()
#/// READ DATA ///
#1) land / sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#2) precipitation data
try:
v = 'var4'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
except:
v = 'var142'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return rain
class JSBACH_RAW2(Model):
"""
Class for RAW JSBACH model output
works on the real raw output
"""
#def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, model_dict=None, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
"""
The assignment of certain variables to different input streams is done in the routine
get_jsbach_data_generic()
Parameters
----------
input_format : str
specifies file format of input data
['nc','grb']
"""
super(JSBACH_RAW2, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
#self.get_data()
self.type = 'JSBACH_RAW2'
self.input_format = input_format
assert self.input_format in ['nc', 'grb']
self.raw_outdata = raw_outdata
self._unique_name = self._get_unique_name()
# do preprocessing of streams (only needed once!) ---
self.files = {}
self._preproc_streams()
#~ self.model_dict = copy.deepcopy(model_dict)
self.model = 'JSBACH'
def _get_filenames_jsbach_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_main_mm_*.' + self.input_format
def _get_filenames_veg_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_veg_mm_*.' + self.input_format
def _get_filenames_land_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_land_mm_*.' + self.input_format
def _get_filenames_surf_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_surf_mm_*.' + self.input_format
def _get_filenames_albedo_VIS(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_mm_*_VIS_albedo.' + self.input_format
def _get_filenames_albedo_NIR(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_mm_*_NIR_albedo.' + self.input_format
def _get_filenames_echam_BOT(self):
return self.data_dir + self.raw_outdata + '../echam6/' + self.experiment + '_echam6_BOT_mm_*.sz'
def _preproc_streams(self):
"""
It is assumed that the standard JSBACH postprocessing scripts have been applied.
Thus monthly mean data is available for each stream and code tables still need to be applied.
This routine does the following:
1) merge all times from individual (monthly mean) output files
2) assign codetables to work with proper variable names
3) aggregate data from tiles to gridbox values
"""
print 'Preprocessing JSBACH raw data streams (may take a while) ...'
cdo = Cdo()
# jsbach stream
print ' JSBACH stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_', dir=get_temporary_directory()) # temporary file
#~ print self.data_dir
#~ print self.raw_outdata
#~ print 'Files: ', self._get_filenames_jsbach_stream()
#~ stop
if len(glob.glob(self._get_filenames_jsbach_stream())) > 0: # check if input files existing at all
print 'Mering the following files:', self._get_filenames_jsbach_stream()
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_jsbach_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
print 'Outfile: ', outfile
#~ os.remove(tmp)
print 'Temporary name: ', tmp
self.files.update({'jsbach': outfile})
# veg stream
print ' VEG stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_veg_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_veg.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_veg_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_veg_stream())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_veg_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'veg': outfile})
# veg land
print ' LAND stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_land_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_land.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_land_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_land_stream())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_land_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'land': outfile})
# surf stream
print ' SURF stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_surf_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_surf.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_surf_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_surf_stream())) > 0: # check if input files existing at all
print glob.glob(self._get_filenames_surf_stream())
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_surf_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:<|fim▁hole|>
# ECHAM BOT stream
print ' BOT stream ...'
outfile = get_temporary_directory() + self.experiment + '_echam6_echam_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_echam6_echam.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_echam6_echam_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_echam_BOT())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_echam_BOT())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'echam': outfile})
# ALBEDO file
# albedo files as preprocessed by a script of Thomas
print ' ALBEDO VIS stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_VIS_albedo_mm_full.nc'
if os.path.exists(outfile):
pass
else:
if len(glob.glob(self._get_filenames_albedo_VIS())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=outfile, input=self._get_filenames_albedo_VIS())
self.files.update({'albedo_vis': outfile})
print ' ALBEDO NIR stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_NIR_albedo_mm_full.nc'
if os.path.exists(outfile):
pass
else:
if len(glob.glob(self._get_filenames_albedo_NIR())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=outfile, input=self._get_filenames_albedo_NIR())
self.files.update({'albedo_nir': outfile})
def _get_unique_name(self):
"""
get unique name from model and experiment
@return: string with unique combination of models and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_albedo_data(self, interval='season'):
"""
calculate albedo as ratio of upward and downwelling fluxes
first the monthly mean fluxes are used to calculate the albedo,
This routine uses the definitions of the routines how to
read upward and downward fluxes
"""
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#~ tmpdict = copy.deepcopy(kwargs)
#~ print self.dic_vars
routine_up = self.dic_vars['surface_upward_flux']
routine_down = self.dic_vars['sis']
#sw_down = self.get_surface_shortwave_radiation_down(interval=interval, **kwargs)
cmd = 'sw_down = self.' + routine_down
exec(cmd)
#sw_up = self.get_surface_shortwave_radiation_up(interval=interval, **kwargs)
cmd = 'sw_up = self.' + routine_up
exec(cmd)
# climatological mean
alb = sw_up[0].div(sw_down[0])
alb.label = self.experiment + ' albedo'
alb.unit = '-'
# original data
alb_org = sw_up[1][2].div(sw_down[1][2])
alb_org.label = self.experiment + ' albedo'
alb_org.unit = '-'
retval = (alb_org.time, alb_org.fldmean(), alb_org)
return alb, retval
def get_albedo_data_vis(self, interval='season', **kwargs):
"""
This routine retrieves the JSBACH albedo information for VIS
it requires a preprocessing with a script that aggregates from tile
to box values!
Parameters
----------
interval : str
['season','monthly']
"""
#~ tmpdict = copy.deepcopy(self.model_dict['albedo_vis'])
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_albedo_data_nir(self, interval='season', **kwargs):
"""
This routine retrieves the JSBACH albedo information for VIS
it requires a preprocessing with a script that aggregates from tile
to box values!
Parameters
----------
interval : str
['season','monthly']
"""
#~ tmpdict = copy.deepcopy(self.model_dict['albedo_nir'])
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_up(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_down(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_rainfall_data(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_temperature_2m(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_jsbach_data_generic(self, interval='season', **kwargs):
"""
unique parameters are:
filename - file basename
variable - name of the variable as the short_name in the netcdf file
kwargs is a dictionary with keys for each model. Then a dictionary with properties follows
"""
if not self.type in kwargs.keys():
print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
return None
print self.type
print kwargs
locdict = kwargs[self.type]
# read settings and details from the keyword arguments
# no defaults; everything should be explicitely specified in either the config file or the dictionaries
varname = locdict.pop('variable')
units = locdict.pop('unit', 'Unit not specified')
lat_name = locdict.pop('lat_name', 'lat')
lon_name = locdict.pop('lon_name', 'lon')
#model_suffix = locdict.pop('model_suffix')
#model_prefix = locdict.pop('model_prefix')
file_format = locdict.pop('file_format')
scf = locdict.pop('scale_factor')
valid_mask = locdict.pop('valid_mask')
custom_path = locdict.pop('custom_path', None)
thelevel = locdict.pop('level', None)
target_grid = self._actplot_options['targetgrid']
interpolation = self._actplot_options['interpolation']
if self.type != 'JSBACH_RAW2':
print self.type
raise ValueError('Invalid data format here!')
# define from which stream of JSBACH data needs to be taken for specific variables
if varname in ['swdown_acc', 'swdown_reflect_acc']:
filename1 = self.files['jsbach']
elif varname in ['precip_acc']:
filename1 = self.files['land']
elif varname in ['temp2']:
filename1 = self.files['echam']
elif varname in ['var14']: # albedo vis
filename1 = self.files['albedo_vis']
elif varname in ['var15']: # albedo NIR
filename1 = self.files['albedo_nir']
else:
print varname
raise ValueError('Unknown variable type for JSBACH_RAW2 processing!')
force_calc = False
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#/// PREPROCESSING ///
cdo = Cdo()
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
#1) select timeperiod and generate monthly mean file
if target_grid == 't63grid':
gridtok = 'T63'
else:
gridtok = 'SPECIAL_GRID'
file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc' # target filename
file_monthly = get_temporary_directory() + os.path.basename(file_monthly)
sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)
if not os.path.exists(filename1):
print 'WARNING: File not existing: ' + filename1
return None
cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)
sys.stdout.write('\n *** Reading model data... \n')
sys.stdout.write(' Interval: ' + interval + '\n')
#2) calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing! ')
if not os.path.exists(mdata_clim_file):
return None
#3) read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, level=thelevel, time_cycle=thetime_cylce)
mdata.std = mdata_std.data.copy()
del mdata_std
mdata_N = Data(mdata_N_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel)
mdata.n = mdata_N.data.copy()
del mdata_N
#ensure that climatology always starts with J anuary, therefore set date and then sort
mdata.adjust_time(year=1700, day=15) # set arbitrary time for climatology
mdata.timsort()
#4) read monthly data
mdata_all = Data(file_monthly, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, time_cycle=12, scale_factor=scf, level=thelevel)
mdata_all.adjust_time(day=15)
if target_grid == 't63grid':
mdata._apply_mask(get_T63_landseamask(False, area=valid_mask))
mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask))
else:
tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid)
mdata._apply_mask(tmpmsk)
mdata_all._apply_mask(tmpmsk)
del tmpmsk
mdata_mean = mdata_all.fldmean()
# return data as a tuple list
retval = (mdata_all.time, mdata_mean, mdata_all)
del mdata_all
return mdata, retval
class JSBACH_SPECIAL(JSBACH_RAW2):
"""
special class for more flexible reading of JSBACH input data
it allows to specify the input format and the directory of the input data
in case that you use a different setup, it is probably easiest to
just copy this class and make the required adaptations.
"""
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, model_dict=None, input_format='nc', raw_outdata='', **kwargs):
super(JSBACH_SPECIAL, self).__init__(filename, dic_variables, experiment, name=name, shift_lon=shift_lon, model_dict=model_dict, input_format=input_format, raw_outdata=raw_outdata, **kwargs)
class xxxxxxxxJSBACH_RAW(Model):
"""
Class for RAW JSBACH model output
works on manually preprocessed already concatenated data
"""
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, intervals='monthly', **kwargs):
super(JSBACH_RAW, self).__init__(filename, dic_variables, name=name, intervals=intervals, **kwargs)
print('WARNING: This model class should be depreciated as it contained a lot of hardcoded dependencies and is only intermediate')
#TODO: depreciate this class
stop
self.experiment = experiment
self.shift_lon = shift_lon
self.type = 'JSBACH_RAW'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_temperature_2m(self, interval='monthly', **kwargs):
"""
get surface temperature (2m) from JSBACH model results
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
variable = 'temp2'
rawfile = self.data_dir + self.experiment + '_echam6_echam_' + variable + '_ALL.nc'
files = glob.glob(rawfile)
if len(files) != 1:
print 'Inputfiles: ', files
raise ValueError('Something went wrong: Invalid number of input files!')
else:
rawfile = files[0]
mdata, retval = self._do_preprocessing(rawfile, variable, y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
def get_albedo_data(self, interval='monthly', **kwargs):
"""
calculate albedo as ratio of upward and downwelling fluxes
first the monthly mean fluxes are used to calculate the albedo,
"""
# read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon) # TODO make this more flexible
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
Fd = self.get_surface_shortwave_radiation_down(**kwargs)
Fu = self.get_surface_shortwave_radiation_up(**kwargs)
if Fu is None:
print 'File not existing for UPWARD flux!: ', self.name
return None
else:
Fu_i = Fu[0]
if Fu_i is None:
return None
if Fd is None:
print 'File not existing for DOWNWARD flux!: ', self.name
return None
else:
Fd_i = Fd[0]
if Fd_i is None:
return None
lab = Fu_i.label
# albedo for chosen interval as caluclated as ratio of means of fluxes in that interval (e.g. season, months)
Fu_i.div(Fd_i, copy=False)
del Fd_i # Fu contains now the albedo
Fu_i._apply_mask(ls_mask.data)
#albedo for monthly data (needed for global mean plots )
Fu_m = Fu[1][2]
del Fu
Fd_m = Fd[1][2]
del Fd
Fu_m.div(Fd_m, copy=False)
del Fd_m
Fu_m._apply_mask(ls_mask.data)
Fu_m._set_valid_range(0., 1.)
Fu_m.label = lab + ' albedo'
Fu_i.label = lab + ' albedo'
Fu_m.unit = '-'
Fu_i.unit = '-'
# center dates of months
Fu_m.adjust_time(day=15)
Fu_i.adjust_time(day=15)
# return data as a tuple list
retval = (Fu_m.time, Fu_m.fldmean(), Fu_m)
return Fu_i, retval
#-----------------------------------------------------------------------
def _do_preprocessing(self, rawfile, varname, s_start_time, s_stop_time, interval='monthly', force_calc=False, valid_mask='global', target_grid='t63grid'):
"""
perform preprocessing
* selection of variable
* temporal subsetting
"""
cdo = Cdo()
if not os.path.exists(rawfile):
print('File not existing! %s ' % rawfile)
return None, None
# calculate monthly means
file_monthly = get_temporary_directory() + os.sep + os.path.basename(rawfile[:-3]) + '_' + varname + '_' + s_start_time + '_' + s_stop_time + '_mm.nc'
if (force_calc) or (not os.path.exists(file_monthly)):
cdo.monmean(options='-f nc', output=file_monthly, input='-seldate,' + s_start_time + ',' + s_stop_time + ' ' + '-selvar,' + varname + ' ' + rawfile, force=force_calc)
else:
pass
if not os.path.exists(file_monthly):
raise ValueError('Monthly preprocessing did not work! %s ' % file_monthly)
# calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing!')
if not os.path.exists(mdata_clim_file):
return None
# read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self.name, shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.name + ' std', unit='-', shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
mdata.std = mdata_std.data.copy()
del mdata_std
mdata_N = Data(mdata_N_file, varname, read=True, label=self.name + ' std', shift_lon=False, lat_name='lat', lon_name='lon')
mdata.n = mdata_N.data.copy()
del mdata_N
# ensure that climatology always starts with January, therefore set date and then sort
mdata.adjust_time(year=1700, day=15) # set arbitrary time for climatology
mdata.timsort()
#4) read monthly data
mdata_all = Data(file_monthly, varname, read=True, label=self.name, shift_lon=False, time_cycle=12, lat_name='lat', lon_name='lon')
mdata_all.adjust_time(day=15)
#mask_antarctica masks everything below 60 degree S.
#here we only mask Antarctica, if only LAND points shall be used
if valid_mask == 'land':
mask_antarctica = True
elif valid_mask == 'ocean':
mask_antarctica = False
else:
mask_antarctica = False
if target_grid == 't63grid':
mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
else:
tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica)
mdata._apply_mask(tmpmsk)
mdata_all._apply_mask(tmpmsk)
del tmpmsk
mdata_mean = mdata_all.fldmean()
# return data as a tuple list
retval = (mdata_all.time, mdata_mean, mdata_all)
del mdata_all
return mdata, retval
def get_surface_shortwave_radiation_down(self, interval='monthly', **kwargs):
"""
get surface shortwave incoming radiation data for JSBACH
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
rawfile = self.data_dir + self.experiment + '_jsbach_' + y1[0: 4] + '_' + y2[0: 4] + '.nc'
mdata, retval = self._do_preprocessing(rawfile, 'swdown_acc', y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_surface_shortwave_radiation_up(self, interval='monthly', **kwargs):
"""
get surface shortwave upward radiation data for JSBACH
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO: move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
rawfile = self.data_dir + self.experiment + '_jsbach_' + y1[0: 4] + '_' + y2[0: 4] + '.nc'
mdata, retval = self._do_preprocessing(rawfile, 'swdown_reflect_acc', y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_model_data_generic(self, interval='monthly', **kwargs):
"""
This is only a wrapper to redirect to individual functions
for the JSBACH_RAW class
Currently only the usage for rainfall is supported!
"""
# HACK: only a wrapper, should be depreciated
raise ValueError('Rainfall analysis not working yet!')
self.get_rainfall_data(interval=interval, **kwargs)
def get_rainfall_data(self, interval='monthly', **kwargs):
"""
get surface rainfall data for JSBACH
uses already preprocessed data where the convective and
advective rainfall has been merged
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO : move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
variable = 'aprc'
rawfile = self.data_dir + self.experiment + '_echam6_echam_*_precipitation.nc'
files = glob.glob(rawfile)
if len(files) != 1:
print 'Inputfiles: ', files
raise ValueError('Something went wrong: Invalid number of input files!')
else:
rawfile = files[0]
mdata, retval = self._do_preprocessing(rawfile, variable, y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_gpp_data(self, interval='season'):
"""
get surface GPP data for JSBACH
todo temporal aggregation of data --> or leave it to the user!
"""
cdo = Cdo()
v = 'var167'
y1 = str(self.start_time)[0:10]
y2 = str(self.stop_time)[0:10]
rawfilename = self.data_dir + 'data/model/' + self.experiment + '_' + y1[0:4] + '-' + y2[0:4] + '.nc'
times_in_file = int(''.join(cdo.ntime(input=rawfilename)))
if interval == 'season':
if times_in_file != 4:
tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
cdo.yseasmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_yseasmean.nc')
rawfilename = tmp_file[:-3] + '_yseasmean.nc'
if interval == 'monthly':
if times_in_file != 12:
tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
cdo.ymonmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_ymonmean.nc')
rawfilename = tmp_file[:-3] + '_ymonmean.nc'
if not os.path.exists(rawfilename):
return None
filename = rawfilename
#--- read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#--- read SW up data
gpp = Data4D(filename, v, read=True,
label=self.experiment + ' ' + v, unit='gC m-2 a-1', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data, scale_factor=3600. * 24. * 30. / 0.083
)
return gpp.sum_data4D()
#-----------------------------------------------------------------------<|fim▁end|> | cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'surf': outfile}) |
<|file_name|>io-hook.ts<|end_file_name|><|fim▁begin|>export abstract class IoHook {
abstract enableClickPropagation();<|fim▁hole|><|fim▁end|> |
abstract disableClickPropagation();
} |
<|file_name|>test_job_posting.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import unittest
import mock
from ..models import JobPosting
class JobPostingTestCase(unittest.TestCase):<|fim▁hole|> model.position_name = 'Position Name'
# action
email = unicode(model)
# assert
self.assertEqual(model.position_name, email)<|fim▁end|> | def test_unicode_should_return_position_name(self):
# setup
model = JobPosting() |
<|file_name|>checked_neg.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::signeds::PrimitiveSigned;
fn checked_neg_helper<T: PrimitiveSigned>() {
let test = |n: T, out| {
assert_eq!(n.checked_neg(), out);
};
test(T::ZERO, Some(T::ZERO));
test(T::ONE, Some(T::NEGATIVE_ONE));
test(T::exact_from(100), Some(T::exact_from(-100)));
test(T::NEGATIVE_ONE, Some(T::ONE));
test(T::exact_from(-100), Some(T::exact_from(100)));
test(T::MIN, None);
}
<|fim▁hole|>#[test]
fn test_checked_neg() {
apply_fn_to_signeds!(checked_neg_helper);
}<|fim▁end|> | |
<|file_name|>sum.py<|end_file_name|><|fim▁begin|>from chainer import cuda
from chainer import function
from chainer.utils import type_check
class Sum(function.Function):
"""Sum of array elements over a given axis."""
keepdims = False
def __init__(self, axis=None, keepdims=False):
if axis is None:
self.axis = None
elif isinstance(axis, int):
self.axis = (axis,)
elif isinstance(axis, tuple) and all(isinstance(a, int) for a in axis):
if len(set(axis)) != len(axis):
raise ValueError('duplicate value in axis: ({})'.format(
', '.join(map(str, axis))))
self.axis = axis
else:
raise TypeError('None, int or tuple of int are required')
self.keepdims = keepdims
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
in_types[0].dtype.kind == 'f',
)
if self.axis is not None:
for axis in self.axis:
if axis >= 0:
type_check.expect(
axis < in_types[0].ndim,
)
else:
type_check.expect(
-axis - 1 < in_types[0].ndim,
)
def forward(self, x):
self.retain_inputs(())
self._in_shape = x[0].shape
self._in_dtype = x[0].dtype
self._xp = cuda.get_array_module(*x)
return self._xp.asarray(
x[0].sum(axis=self.axis, keepdims=self.keepdims)),
def backward(self, x, gy):
xp = self._xp
gy = gy[0]
if not (len(self._in_shape) == 0 or
self.axis is None or self.keepdims):
actual_axis = []
for axis in self.axis:
if axis < 0:
axis += len(self._in_shape)
actual_axis.append(axis)
for axis in sorted(actual_axis):
gy = xp.expand_dims(gy, axis=axis)
if hasattr(xp, 'broadcast_to'):
gx = xp.broadcast_to(gy, self._in_shape)
else:<|fim▁hole|> return gx,
def sum(x, axis=None, keepdims=False):
"""Sum of array elements over a given axis.
Args:
x (~chainer.Variable): Elements to sum.
axis (None, int, or tuple of int): Axis which a sum is performed.
The default (axis = None) is perform a sum over all the dimensions
of the input array.
keepdims (bool): If ``True``, the specified axes are remained as axes
of length one.
Returns:
~chainer.Variable: Output variable.
"""
return Sum(axis, keepdims)(x)<|fim▁end|> | # NumPy 1.9 does not support broadcast_to.
dummy_x = xp.empty(self._in_shape, 'b')
gx, _ = xp.broadcast_arrays(gy, dummy_x)
|
<|file_name|>changes.go<|end_file_name|><|fim▁begin|>package tmdb
import (
"fmt"
)
// Changes struct
type Changes struct {
Results []struct {
ID int
Adult bool
}
}
var changeOptions = map[string]struct{}{
"page": {},
"start_date": {},
"end_date": {}}
// GetChangesMovie gets a list of movie ids that have been edited
// https://developers.themoviedb.org/3/changes/get-movie-change-list
func (tmdb *TMDb) GetChangesMovie(options map[string]string) (*Changes, error) {
var movieChanges Changes
optionsString := getOptionsString(options, changeOptions)
uri := fmt.Sprintf("%s/movie/changes?api_key=%s%s", baseURL, tmdb.apiKey, optionsString)
result, err := getTmdb(uri, &movieChanges)
return result.(*Changes), err
}
// GetChangesPerson gets a list of people ids that have been edited
// https://developers.themoviedb.org/3/changes/get-person-change-list
func (tmdb *TMDb) GetChangesPerson(options map[string]string) (*Changes, error) {
var personChanges Changes
optionsString := getOptionsString(options, changeOptions)
uri := fmt.Sprintf("%s/person/changes?api_key=%s%s", baseURL, tmdb.apiKey, optionsString)
result, err := getTmdb(uri, &personChanges)
return result.(*Changes), err
}
// GetChangesTv gets a list of tv show ids that have been edited
// https://developers.themoviedb.org/3/changes/get-tv-change-list
func (tmdb *TMDb) GetChangesTv(options map[string]string) (*Changes, error) {
var tvChanges Changes<|fim▁hole|>}<|fim▁end|> | optionsString := getOptionsString(options, changeOptions)
uri := fmt.Sprintf("%s/tv/changes?api_key=%s%s", baseURL, tmdb.apiKey, optionsString)
result, err := getTmdb(uri, &tvChanges)
return result.(*Changes), err |
<|file_name|>utils_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2014, OneLogin, Inc.
# All rights reserved.
from base64 import b64decode
import json
from lxml import etree
from os.path import dirname, join, exists
import unittest
from xml.dom.minidom import parseString
from onelogin.saml2 import compat
from onelogin.saml2.constants import OneLogin_Saml2_Constants
from onelogin.saml2.settings import OneLogin_Saml2_Settings
from onelogin.saml2.utils import OneLogin_Saml2_Utils
class OneLogin_Saml2_Utils_Test(unittest.TestCase):
data_path = join(dirname(__file__), '..', '..', '..', 'data')
def loadSettingsJSON(self, filename=None):
if filename:
filename = join(dirname(__file__), '..', '..', '..', 'settings', filename)
else:
filename = join(dirname(__file__), '..', '..', '..', 'settings', 'settings1.json')
if exists(filename):
stream = open(filename, 'r')
settings = json.load(stream)
stream.close()
return settings
else:
raise Exception('Settings json file does not exist')
def file_contents(self, filename):
f = open(filename, 'r')
content = f.read()
f.close()
return content
def testFormatCert(self):
"""
Tests the format_cert method of the OneLogin_Saml2_Utils
"""
settings_info = self.loadSettingsJSON()
cert = settings_info['idp']['x509cert']
self.assertNotIn('-----BEGIN CERTIFICATE-----', cert)
self.assertNotIn('-----END CERTIFICATE-----', cert)
self.assertEqual(len(cert), 860)
formated_cert1 = OneLogin_Saml2_Utils.format_cert(cert)
self.assertIn('-----BEGIN CERTIFICATE-----', formated_cert1)
self.assertIn('-----END CERTIFICATE-----', formated_cert1)
formated_cert2 = OneLogin_Saml2_Utils.format_cert(cert, True)
self.assertEqual(formated_cert1, formated_cert2)
formated_cert3 = OneLogin_Saml2_Utils.format_cert(cert, False)
self.assertNotIn('-----BEGIN CERTIFICATE-----', formated_cert3)
self.assertNotIn('-----END CERTIFICATE-----', formated_cert3)
self.assertEqual(len(formated_cert3), 860)
def testFormatPrivateKey(self):
"""
Tests the format_private_key method of the OneLogin_Saml2_Utils
"""
key = "-----BEGIN RSA PRIVATE KEY-----\nMIICXgIBAAKBgQDivbhR7P516x/S3BqKxupQe0LONoliupiBOesCO3SHbDrl3+q9\nIbfnfmE04rNuMcPsIxB161TdDpIesLCn7c8aPHISKOtPlAeTZSnb8QAu7aRjZq3+\nPbrP5uW3TcfCGPtKTytHOge/OlJbo078dVhXQ14d1EDwXJW1rRXuUt4C8QIDAQAB\nAoGAD4/Z4LWVWV6D1qMIp1Gzr0ZmdWTE1SPdZ7Ej8glGnCzPdguCPuzbhGXmIg0V\nJ5D+02wsqws1zd48JSMXXM8zkYZVwQYIPUsNn5FetQpwxDIMPmhHg+QNBgwOnk8J\nK2sIjjLPL7qY7Itv7LT7Gvm5qSOkZ33RCgXcgz+okEIQMYkCQQDzbTOyDL0c5WQV\n6A2k06T/azdhUdGXF9C0+WkWSfNaovmTgRXh1G+jMlr82Snz4p4/STt7P/XtyWzF\n3pkVgZr3AkEA7nPjXwHlttNEMo6AtxHd47nizK2NUN803ElIUT8P9KSCoERmSXq6\n6PDekGNic4ldpsSvOeYCk8MAYoDBy9kvVwJBAMLgX4xg6lzhv7hR5+pWjTb1rIY6\nrCHbrPfU264+UZXz9v2BT/VUznLF81WMvStD9xAPHpFS6R0OLghSZhdzhI0CQQDL\n8Duvfxzrn4b9QlmduV8wLERoT6rEVxKLsPVz316TGrxJvBZLk/cV0SRZE1cZf4uk\nXSWMfEcJ/0Zt+LdG1CqjAkEAqwLSglJ9Dy3HpgMz4vAAyZWzAxvyA1zW0no9GOLc\nPQnYaNUN/Fy2SYtETXTb0CQ9X1rt8ffkFP7ya+5TC83aMg==\n-----END RSA PRIVATE KEY-----\n"
formated_key = OneLogin_Saml2_Utils.format_private_key(key, True)
self.assertIn('-----BEGIN RSA PRIVATE KEY-----', formated_key)
self.assertIn('-----END RSA PRIVATE KEY-----', formated_key)
self.assertEqual(len(formated_key), 891)
formated_key = OneLogin_Saml2_Utils.format_private_key(key, False)
self.assertNotIn('-----BEGIN RSA PRIVATE KEY-----', formated_key)
self.assertNotIn('-----END RSA PRIVATE KEY-----', formated_key)
self.assertEqual(len(formated_key), 816)
key_2 = "-----BEGIN PRIVATE KEY-----\nMIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAM62buSW9Zgh7CmZ\nouJekK0ac9sgEZkspemjv7SyE6Hbdz+KmUr3C7MI6JuPfVyJbxvMDf3FbgBBK7r5\nyfGgehXwplLMZj8glvV3NkdLMLPWmaw9U5sOzRoym46pVvsEo1PUL2qDK5Wrsm1g\nuY1KIDSHL59NQ7PzDKgm1dxioeXFAgMBAAECgYA/fvRzTReloo3rfWD2Tfv84EpE\nPgaJ2ZghO4Zwl97F8icgIo/R4i760Lq6xgnI+gJiNHz7vcB7XYl0RrRMf3HgbA7z\npJxREmOVltESDHy6lH0TmCdv9xMmHltB+pbGOhqBvuGgFbEOR73lDDV0ln2rEITJ\nA2zjYF+hWe8b0JFeQQJBAOsIIIlHAMngjhCQDD6kla/vce972gCFU7ZeFw16ZMmb\n8W4rGRfQoQWYxSLAFIFsYewSBTccanyYbBNe3njki3ECQQDhJ4cgV6VpTwez4dkp\nU/xCHKoReedAEJhXucTNGpiIqu+TDgIz9aRbrgnUKkS1s06UJhcDRTl/+pCSRRt/\nCA2VAkBkPw4pn1hNwvK1S8t9OJQD+5xcKjZcvIFtKoqonAi7GUGL3OQSDVFw4q1K\n2iSk40aM+06wJ/WfeR+3z2ISrGBxAkAJ20YiF1QpcQlASbHNCl0vs7uKOlDyUAer\nR3mjFPf6e6kzQdi815MTZGIPxK3vWmMlPymgvgYPYTO1A4t5myulAkEA1QioAWcJ\noO26qhUlFRBCR8BMJoVPImV7ndVHE7usHdJvP7V2P9RyuRcMCTVul8RRmyoh/+yG\n4ghMaHo/v0YY5Q==\n-----END PRIVATE KEY-----\n"
formated_key_2 = OneLogin_Saml2_Utils.format_private_key(key_2, True)
self.assertIn('-----BEGIN PRIVATE KEY-----', formated_key_2)
self.assertIn('-----END PRIVATE KEY-----', formated_key_2)
self.assertEqual(len(formated_key_2), 916)
formated_key_2 = OneLogin_Saml2_Utils.format_private_key(key_2, False)
self.assertNotIn('-----BEGIN PRIVATE KEY-----', formated_key_2)
self.assertNotIn('-----END PRIVATE KEY-----', formated_key_2)
self.assertEqual(len(formated_key_2), 848)
key_3 = 'MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAM62buSW9Zgh7CmZouJekK0ac9sgEZkspemjv7SyE6Hbdz+KmUr3C7MI6JuPfVyJbxvMDf3FbgBBK7r5yfGgehXwplLMZj8glvV3NkdLMLPWmaw9U5sOzRoym46pVvsEo1PUL2qDK5Wrsm1guY1KIDSHL59NQ7PzDKgm1dxioeXFAgMBAAECgYA/fvRzTReloo3rfWD2Tfv84EpEPgaJ2ZghO4Zwl97F8icgIo/R4i760Lq6xgnI+gJiNHz7vcB7XYl0RrRMf3HgbA7zpJxREmOVltESDHy6lH0TmCdv9xMmHltB+pbGOhqBvuGgFbEOR73lDDV0ln2rEITJA2zjYF+hWe8b0JFeQQJBAOsIIIlHAMngjhCQDD6kla/vce972gCFU7ZeFw16ZMmb8W4rGRfQoQWYxSLAFIFsYewSBTccanyYbBNe3njki3ECQQDhJ4cgV6VpTwez4dkpU/xCHKoReedAEJhXucTNGpiIqu+TDgIz9aRbrgnUKkS1s06UJhcDRTl/+pCSRRt/CA2VAkBkPw4pn1hNwvK1S8t9OJQD+5xcKjZcvIFtKoqonAi7GUGL3OQSDVFw4q1K2iSk40aM+06wJ/WfeR+3z2ISrGBxAkAJ20YiF1QpcQlASbHNCl0vs7uKOlDyUAerR3mjFPf6e6kzQdi815MTZGIPxK3vWmMlPymgvgYPYTO1A4t5myulAkEA1QioAWcJoO26qhUlFRBCR8BMJoVPImV7ndVHE7usHdJvP7V2P9RyuRcMCTVul8RRmyoh/+yG4ghMaHo/v0YY5Q=='
formated_key_3 = OneLogin_Saml2_Utils.format_private_key(key_3, True)
self.assertIn('-----BEGIN RSA PRIVATE KEY-----', formated_key_3)
self.assertIn('-----END RSA PRIVATE KEY-----', formated_key_3)
self.assertEqual(len(formated_key_3), 924)
formated_key_3 = OneLogin_Saml2_Utils.format_private_key(key_3, False)
self.assertNotIn('-----BEGIN PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----END PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----BEGIN RSA PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----END RSA PRIVATE KEY-----', formated_key_3)
self.assertEqual(len(formated_key_3), 848)
def testRedirect(self):
"""
Tests the redirect method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
# Check relative and absolute
hostname = OneLogin_Saml2_Utils.get_self_host(request_data)
url = 'http://%s/example' % hostname
url2 = '/example'
target_url = OneLogin_Saml2_Utils.redirect(url, {}, request_data)
target_url2 = OneLogin_Saml2_Utils.redirect(url2, {}, request_data)
self.assertEqual(target_url, target_url2)
# Check that accept http/https and reject other protocols
url3 = 'https://%s/example?test=true' % hostname
url4 = 'ftp://%s/example' % hostname
target_url3 = OneLogin_Saml2_Utils.redirect(url3, {}, request_data)
self.assertIn('test=true', target_url3)
self.assertRaisesRegexp(Exception, 'Redirect to invalid URL',
OneLogin_Saml2_Utils.redirect, url4, {}, request_data)
# Review parameter prefix
parameters1 = {
'value1': 'a'
}
target_url5 = OneLogin_Saml2_Utils.redirect(url, parameters1, request_data)
self.assertEqual('http://%s/example?value1=a' % hostname, target_url5)
target_url6 = OneLogin_Saml2_Utils.redirect(url3, parameters1, request_data)
self.assertEqual('https://%s/example?test=true&value1=a' % hostname, target_url6)
# Review parameters
parameters2 = {
'alphavalue': 'a',
'numvaluelist': ['1', '2'],
'testing': None
}
target_url7 = OneLogin_Saml2_Utils.redirect(url, parameters2, request_data)
parameters2_decoded = {"alphavalue": "alphavalue=a", "numvaluelist": "numvaluelist[]=1&numvaluelist[]=2", "testing": "testing"}
parameters2_str = "&".join(parameters2_decoded[x] for x in parameters2)
self.assertEqual('http://%s/example?%s' % (hostname, parameters2_str), target_url7)
parameters3 = {
'alphavalue': 'a',
'emptynumvaluelist': [],
'numvaluelist': [''],
}
parameters3_decoded = {"alphavalue": "alphavalue=a", "numvaluelist": "numvaluelist[]="}
parameters3_str = "&".join((parameters3_decoded[x] for x in parameters3.keys() if x in parameters3_decoded))
target_url8 = OneLogin_Saml2_Utils.redirect(url, parameters3, request_data)
self.assertEqual('http://%s/example?%s' % (hostname, parameters3_str), target_url8)
def testGetselfhost(self):
"""
Tests the get_self_host method of the OneLogin_Saml2_Utils
"""
request_data = {}
self.assertRaisesRegexp(Exception, 'No hostname defined',
OneLogin_Saml2_Utils.get_self_host, request_data)
request_data = {
'server_name': 'example.com'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com:443'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com:ok'
}
self.assertEqual('example.com:ok', OneLogin_Saml2_Utils.get_self_host(request_data))
def testisHTTPS(self):
"""
Tests the is_https method of the OneLogin_Saml2_Utils
"""
request_data = {
'https': 'off'
}
self.assertFalse(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'https': 'on'
}
self.assertTrue(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'server_port': '80'
}
self.assertFalse(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'server_port': '443'
}
self.assertTrue(OneLogin_Saml2_Utils.is_https(request_data))
def testGetSelfURLhost(self):
"""
Tests the get_self_url_host method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
self.assertEqual('http://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '80'
self.assertEqual('http://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '81'
self.assertEqual('http://example.com:81', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '443'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
del request_data['server_port']
request_data['https'] = 'on'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '444'
self.assertEqual('https://example.com:444', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '443'
request_data['request_uri'] = ''
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = '/'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = 'onelogin/'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = '/onelogin'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = 'https://example.com/onelogin/sso'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data2 = {
'request_uri': 'example.com/onelogin/sso'
}
self.assertRaisesRegexp(Exception, 'No hostname defined',
OneLogin_Saml2_Utils.get_self_url_host, request_data2)
def testGetSelfURL(self):
"""
Tests the get_self_url method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data)
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = ''
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/'
self.assertEqual(url + '/', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = 'index.html'
self.assertEqual(url + 'index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '?index.html'
self.assertEqual(url + '?index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/index.html'
self.assertEqual(url + '/index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/index.html?testing'
self.assertEqual(url + '/index.html?testing', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/test/index.html?testing'
self.assertEqual(url + '/test/index.html?testing', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = 'https://example.com/testing'
self.assertEqual(url + '/testing', OneLogin_Saml2_Utils.get_self_url(request_data))
def testGetSelfURLNoQuery(self):
"""
Tests the get_self_url_no_query method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com',
'script_name': '/index.html'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data) + request_data['script_name']
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url_no_query(request_data))
request_data['path_info'] = '/test'
self.assertEqual(url + '/test', OneLogin_Saml2_Utils.get_self_url_no_query(request_data))
def testGetSelfRoutedURLNoQuery(self):
"""
Tests the get_self_routed_url_no_query method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com',
'request_uri': '/example1/route?x=test',
'query_string': '?x=test'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data) + '/example1/route'
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data))
request_data_2 = {
'http_host': 'example.com',
'request_uri': '',
}
url_2 = OneLogin_Saml2_Utils.get_self_url_host(request_data_2)
self.assertEqual(url_2, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_2))
request_data_3 = {
'http_host': 'example.com',
}
url_3 = OneLogin_Saml2_Utils.get_self_url_host(request_data_3)
self.assertEqual(url_3, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_3))
request_data_4 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
'query_string': '?invalid=1'
}
url_4 = OneLogin_Saml2_Utils.get_self_url_host(request_data_4) + '/example1/route/test/'
self.assertEqual(url_4, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_4))
request_data_5 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
'query_string': ''
}
url_5 = OneLogin_Saml2_Utils.get_self_url_host(request_data_5) + '/example1/route/test/'
self.assertEqual(url_5, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_5))
request_data_6 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
}
url_6 = OneLogin_Saml2_Utils.get_self_url_host(request_data_6) + '/example1/route/test/'
self.assertEqual(url_6, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_6))
def testGetStatus(self):
"""
Gets the status of a message
"""
xml = self.file_contents(join(self.data_path, 'responses', 'response1.xml.base64'))
xml = b64decode(xml)
dom = etree.fromstring(xml)<|fim▁hole|>
status = OneLogin_Saml2_Utils.get_status(dom)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_SUCCESS, status['code'])
xml2 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'status_code_responder.xml.base64'))
xml2 = b64decode(xml2)
dom2 = etree.fromstring(xml2)
status2 = OneLogin_Saml2_Utils.get_status(dom2)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_RESPONDER, status2['code'])
self.assertEqual('', status2['msg'])
xml3 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'status_code_responer_and_msg.xml.base64'))
xml3 = b64decode(xml3)
dom3 = etree.fromstring(xml3)
status3 = OneLogin_Saml2_Utils.get_status(dom3)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_RESPONDER, status3['code'])
self.assertEqual('something_is_wrong', status3['msg'])
xml_inv = self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_status.xml.base64'))
xml_inv = b64decode(xml_inv)
dom_inv = etree.fromstring(xml_inv)
self.assertRaisesRegexp(Exception, 'Missing Status on response',
OneLogin_Saml2_Utils.get_status, dom_inv)
xml_inv2 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_status_code.xml.base64'))
xml_inv2 = b64decode(xml_inv2)
dom_inv2 = etree.fromstring(xml_inv2)
self.assertRaisesRegexp(Exception, 'Missing Status Code on response',
OneLogin_Saml2_Utils.get_status, dom_inv2)
def testParseDuration(self):
"""
Tests the parse_duration method of the OneLogin_Saml2_Utils
"""
duration = 'PT1393462294S'
timestamp = 1393876825
parsed_duration = OneLogin_Saml2_Utils.parse_duration(duration, timestamp)
self.assertEqual(2787339119, parsed_duration)
parsed_duration_2 = OneLogin_Saml2_Utils.parse_duration(duration)
self.assertTrue(parsed_duration_2 > parsed_duration)
invalid_duration = 'PT1Y'
self.assertRaisesRegexp(Exception, 'Unrecognised ISO 8601 date format',
OneLogin_Saml2_Utils.parse_duration, invalid_duration)
new_duration = 'P1Y1M'
parsed_duration_4 = OneLogin_Saml2_Utils.parse_duration(new_duration, timestamp)
self.assertEqual(1428091225, parsed_duration_4)
neg_duration = '-P14M'
parsed_duration_5 = OneLogin_Saml2_Utils.parse_duration(neg_duration, timestamp)
self.assertEqual(1357243225, parsed_duration_5)
def testParseSAML2Time(self):
"""
Tests the parse_SAML_to_time method of the OneLogin_Saml2_Utils
"""
time = 1386650371
saml_time = '2013-12-10T04:39:31Z'
self.assertEqual(time, OneLogin_Saml2_Utils.parse_SAML_to_time(saml_time))
self.assertRaisesRegexp(Exception, 'does not match format',
OneLogin_Saml2_Utils.parse_SAML_to_time, 'invalidSAMLTime')
# Now test if toolkit supports miliseconds
saml_time2 = '2013-12-10T04:39:31.120Z'
self.assertEqual(time, OneLogin_Saml2_Utils.parse_SAML_to_time(saml_time2))
def testParseTime2SAML(self):
"""
Tests the parse_time_to_SAML method of the OneLogin_Saml2_Utils
"""
time = 1386650371
saml_time = '2013-12-10T04:39:31Z'
self.assertEqual(saml_time, OneLogin_Saml2_Utils.parse_time_to_SAML(time))
self.assertRaisesRegexp(Exception, 'could not convert string to float',
OneLogin_Saml2_Utils.parse_time_to_SAML, 'invalidtime')
def testGetExpireTime(self):
"""
Tests the get_expire_time method of the OneLogin_Saml2_Utils
"""
self.assertEqual(None, OneLogin_Saml2_Utils.get_expire_time())
self.assertNotEqual(None, OneLogin_Saml2_Utils.get_expire_time('PT360000S'))
self.assertEqual('1291955971', OneLogin_Saml2_Utils.get_expire_time('PT360000S', '2010-12-10T04:39:31Z'))
self.assertEqual('1291955971', OneLogin_Saml2_Utils.get_expire_time('PT360000S', 1291955971))
self.assertNotEqual('3311642371', OneLogin_Saml2_Utils.get_expire_time('PT360000S', '2074-12-10T04:39:31Z'))
self.assertNotEqual('3311642371', OneLogin_Saml2_Utils.get_expire_time('PT360000S', 1418186371))
def testGenerateNameIdWithSPNameQualifier(self):
"""
Tests the generateNameId method of the OneLogin_Saml2_Utils
"""
name_id_value = 'ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde'
entity_id = 'http://stuff.com/endpoints/metadata.php'
name_id_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified'
name_id = OneLogin_Saml2_Utils.generate_name_id(name_id_value, entity_id, name_id_format)
expected_name_id = '<saml:NameID SPNameQualifier="http://stuff.com/endpoints/metadata.php" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified">ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde</saml:NameID>'
self.assertEqual(expected_name_id, name_id)
settings_info = self.loadSettingsJSON()
x509cert = settings_info['idp']['x509cert']
key = OneLogin_Saml2_Utils.format_cert(x509cert)
name_id_enc = OneLogin_Saml2_Utils.generate_name_id(name_id_value, entity_id, name_id_format, key)
expected_name_id_enc = '<saml:EncryptedID><xenc:EncryptedData xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" Type="http://www.w3.org/2001/04/xmlenc#Element">\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>\n<dsig:KeyInfo xmlns:dsig="http://www.w3.org/2000/09/xmldsig#">\n<xenc:EncryptedKey>\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>\n<xenc:CipherData>\n<xenc:CipherValue>'
self.assertIn(expected_name_id_enc, name_id_enc)
def testGenerateNameIdWithSPNameQualifier(self):
"""
Tests the generateNameId method of the OneLogin_Saml2_Utils
"""
name_id_value = 'ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde'
name_id_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified'
name_id = OneLogin_Saml2_Utils.generate_name_id(name_id_value, None, name_id_format)
expected_name_id = '<saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified">ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde</saml:NameID>'
self.assertEqual(expected_name_id, name_id)
settings_info = self.loadSettingsJSON()
x509cert = settings_info['idp']['x509cert']
key = OneLogin_Saml2_Utils.format_cert(x509cert)
name_id_enc = OneLogin_Saml2_Utils.generate_name_id(name_id_value, None, name_id_format, key)
expected_name_id_enc = '<saml:EncryptedID><xenc:EncryptedData xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" Type="http://www.w3.org/2001/04/xmlenc#Element">\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>\n<dsig:KeyInfo xmlns:dsig="http://www.w3.org/2000/09/xmldsig#">\n<xenc:EncryptedKey>\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>\n<xenc:CipherData>\n<xenc:CipherValue>'
self.assertIn(expected_name_id_enc, name_id_enc)
def testCalculateX509Fingerprint(self):
"""
Tests the calculateX509Fingerprint method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
cert_path = settings.get_cert_path()
key = self.file_contents(cert_path + 'sp.key')
cert = self.file_contents(cert_path + 'sp.crt')
self.assertEqual(None, OneLogin_Saml2_Utils.calculate_x509_fingerprint(key))
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert))
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha1'))
self.assertEqual('c51cfa06c7a49767f6eab18238eae1c56708e29264da3d11f538a12cd2c357ba', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha256'))
self.assertEqual('bc5826e6f9429247254bae5e3c650e6968a36a62d23075eb168134978d88600559c10830c28711b2c29c7947c0c2eb1d', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha384'))
self.assertEqual('3db29251b97559c67988ea0754cb0573fc409b6f75d89282d57cfb75089539b0bbdb2dcd9ec6e032549ecbc466439d5992e18db2cf5494ca2fe1b2e16f348dff', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha512'))
def testDeleteLocalSession(self):
"""
Tests the delete_local_session method of the OneLogin_Saml2_Utils
"""
global local_session_test
local_session_test = 1
OneLogin_Saml2_Utils.delete_local_session()
self.assertEqual(1, local_session_test)
dscb = lambda: self.session_cear()
OneLogin_Saml2_Utils.delete_local_session(dscb)
self.assertEqual(0, local_session_test)
def session_cear(self):
"""
Auxiliar method to test the delete_local_session method of the OneLogin_Saml2_Utils
"""
global local_session_test
local_session_test = 0
def testFormatFingerPrint(self):
"""
Tests the format_finger_print method of the OneLogin_Saml2_Utils
"""
finger_print_1 = 'AF:E7:1C:28:EF:74:0B:C8:74:25:BE:13:A2:26:3D:37:97:1D:A1:F9'
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.format_finger_print(finger_print_1))
finger_print_2 = 'afe71c28ef740bc87425be13a2263d37971da1f9'
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.format_finger_print(finger_print_2))
def testDecryptElement(self):
"""
Tests the decrypt_element method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
key = settings.get_sp_key()
xml_nameid_enc = b64decode(self.file_contents(join(self.data_path, 'responses', 'response_encrypted_nameid.xml.base64')))
dom_nameid_enc = etree.fromstring(xml_nameid_enc)
encrypted_nameid_nodes = dom_nameid_enc.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data = encrypted_nameid_nodes[0]
decrypted_nameid = OneLogin_Saml2_Utils.decrypt_element(encrypted_data, key)
self.assertEqual('{%s}NameID' % OneLogin_Saml2_Constants.NS_SAML, decrypted_nameid.tag)
self.assertEqual('2de11defd199f8d5bb63f9b7deb265ba5c675c10', decrypted_nameid.text)
xml_assertion_enc = b64decode(self.file_contents(join(self.data_path, 'responses', 'valid_encrypted_assertion_encrypted_nameid.xml.base64')))
dom_assertion_enc = etree.fromstring(xml_assertion_enc)
encrypted_assertion_enc_nodes = dom_assertion_enc.find('.//saml:EncryptedAssertion', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_assert = encrypted_assertion_enc_nodes[0]
decrypted_assertion = OneLogin_Saml2_Utils.decrypt_element(encrypted_data_assert, key)
self.assertEqual('{%s}Assertion' % OneLogin_Saml2_Constants.NS_SAML, decrypted_assertion.tag)
self.assertEqual('_6fe189b1c241827773902f2b1d3a843418206a5c97', decrypted_assertion.get('ID'))
encrypted_nameid_nodes = decrypted_assertion.xpath('./saml:Subject/saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data = encrypted_nameid_nodes[0][0]
decrypted_nameid = OneLogin_Saml2_Utils.decrypt_element(encrypted_data, key)
self.assertEqual('{%s}NameID' % OneLogin_Saml2_Constants.NS_SAML, decrypted_nameid.tag)
self.assertEqual('457bdb600de717891c77647b0806ce59c089d5b8', decrypted_nameid.text)
key_2_file_name = join(self.data_path, 'misc', 'sp2.key')
f = open(key_2_file_name, 'r')
key2 = f.read()
f.close()
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data, key2)
key_3_file_name = join(self.data_path, 'misc', 'sp2.key')
f = open(key_3_file_name, 'r')
key3 = f.read()
f.close()
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data, key3)
xml_nameid_enc_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'encrypted_nameID_without_EncMethod.xml.base64')))
dom_nameid_enc_2 = etree.fromstring(xml_nameid_enc_2)
encrypted_nameid_nodes_2 = dom_nameid_enc_2.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_2 = encrypted_nameid_nodes_2[0]
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data_2, key)
xml_nameid_enc_3 = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'encrypted_nameID_without_keyinfo.xml.base64')))
dom_nameid_enc_3 = etree.fromstring(xml_nameid_enc_3)
encrypted_nameid_nodes_3 = dom_nameid_enc_3.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_3 = encrypted_nameid_nodes_3[0]
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data_3, key)
def testAddSign(self):
"""
Tests the add_sign method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
key = settings.get_sp_key()
cert = settings.get_sp_cert()
xml_authn = b64decode(self.file_contents(join(self.data_path, 'requests', 'authn_request.xml.base64')))
xml_authn_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed)
res = parseString(xml_authn_signed)
ds_signature = res.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature.tagName)
xml_authn_dom = parseString(xml_authn)
xml_authn_signed_2 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_dom.toxml(), key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_2)
res_2 = parseString(xml_authn_signed_2)
ds_signature_2 = res_2.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_2.tagName)
xml_authn_signed_3 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_dom.firstChild.toxml(), key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_3)
res_3 = parseString(xml_authn_signed_3)
ds_signature_3 = res_3.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_3.tagName)
xml_authn_etree = etree.fromstring(xml_authn)
xml_authn_signed_4 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_etree, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_4)
res_4 = parseString(xml_authn_signed_4)
ds_signature_4 = res_4.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_4.tagName)
xml_authn_signed_5 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_etree, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_5)
res_5 = parseString(xml_authn_signed_5)
ds_signature_5 = res_5.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_5.tagName)
xml_logout_req = b64decode(self.file_contents(join(self.data_path, 'logout_requests', 'logout_request.xml.base64')))
xml_logout_req_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_logout_req, key, cert))
self.assertIn('<ds:SignatureValue>', xml_logout_req_signed)
res_6 = parseString(xml_logout_req_signed)
ds_signature_6 = res_6.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_6.tagName)
xml_logout_res = b64decode(self.file_contents(join(self.data_path, 'logout_responses', 'logout_response.xml.base64')))
xml_logout_res_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_logout_res, key, cert))
self.assertIn('<ds:SignatureValue>', xml_logout_res_signed)
res_7 = parseString(xml_logout_res_signed)
ds_signature_7 = res_7.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_7.tagName)
xml_metadata = self.file_contents(join(self.data_path, 'metadata', 'metadata_settings1.xml'))
xml_metadata_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_metadata, key, cert))
self.assertIn('<ds:SignatureValue>', xml_metadata_signed)
res_8 = parseString(xml_metadata_signed)
ds_signature_8 = res_8.firstChild.firstChild.nextSibling.firstChild.nextSibling
self.assertIn('ds:Signature', ds_signature_8.tagName)
def testValidateSign(self):
"""
Tests the validate_sign method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
idp_data = settings.get_idp_data()
cert = idp_data['x509cert']
settings_2 = OneLogin_Saml2_Settings(self.loadSettingsJSON('settings2.json'))
idp_data2 = settings_2.get_idp_data()
cert_2 = idp_data2['x509cert']
fingerprint_2 = OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert_2)
fingerprint_2_256 = OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert_2, 'sha256')
try:
self.assertFalse(OneLogin_Saml2_Utils.validate_sign('', cert))
except Exception as e:
self.assertEqual('Empty string supplied as input', str(e))
# expired cert
xml_metadata_signed = self.file_contents(join(self.data_path, 'metadata', 'signed_metadata_settings1.xml'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed, cert, validatecert=True))
xml_metadata_signed_2 = self.file_contents(join(self.data_path, 'metadata', 'signed_metadata_settings2.xml'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, None, fingerprint_2))
xml_response_msg_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_message_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert, validatecert=True))
# modified cert
other_cert_path = join(dirname(__file__), '..', '..', '..', 'certs')
f = open(other_cert_path + '/certificate1', 'r')
cert_x = f.read()
f.close()
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert_x))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert_x, validatecert=True))
xml_response_msg_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_message_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2, 'sha1'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2_256, 'sha256'))
xml_response_assert_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_assertion_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed, cert, validatecert=True))
xml_response_assert_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_assertion_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed_2, None, fingerprint_2))
xml_response_double_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'double_signed_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed, cert, validatecert=True))
xml_response_double_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'double_signed_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed_2, None, fingerprint_2))
dom = parseString(xml_response_msg_signed_2)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(dom.toxml(), cert_2))
dom.firstChild.firstChild.firstChild.nodeValue = 'https://idp.example.com/simplesaml/saml2/idp/metadata.php'
dom.firstChild.getAttributeNode('ID').nodeValue = u'_34fg27g212d63k1f923845324475802ac0fc24530b'
# Reference validation failed
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(dom.toxml(), cert_2))
invalid_fingerprint = 'afe71c34ef740bc87434be13a2263d31271da1f9'
# Wrong fingerprint
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, None, invalid_fingerprint))
dom_2 = parseString(xml_response_double_signed_2)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(dom_2.toxml(), cert_2))
dom_2.firstChild.firstChild.firstChild.nodeValue = 'https://example.com/other-idp'
# Modified message
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(dom_2.toxml(), cert_2))
dom_3 = parseString(xml_response_double_signed_2)
assert_elem_3 = dom_3.firstChild.firstChild.nextSibling.nextSibling.nextSibling
assert_elem_3.setAttributeNS(OneLogin_Saml2_Constants.NS_SAML, 'xmlns:saml', OneLogin_Saml2_Constants.NS_SAML)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(assert_elem_3.toxml(), cert_2))
no_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_signature.xml.base64')))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(no_signed, cert))
no_key = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_key.xml.base64')))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(no_key, cert))<|fim▁end|> | |
<|file_name|>lun.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The GoStor Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package scsi
import (
"errors"
"strings"
"github.com/gostor/gotgt/pkg/api"
"github.com/gostor/gotgt/pkg/config"
)
// NewSCSILu: create a new SCSI LU
// path format <protocol>:/absolute/file/path
func NewSCSILu(bs *config.BackendStorage) (*api.SCSILu, error) {
pathinfo := strings.SplitN(bs.Path, ":", 2)
if len(pathinfo) < 2 {
return nil, errors.New("invalid device path string")
}
backendType := pathinfo[0]
backendPath := pathinfo[1]
sbc := NewSBCDevice(api.TYPE_DISK)
backing, err := NewBackingStore(backendType)
if err != nil {
return nil, err
}
var lu = &api.SCSILu{
PerformCommand: luPerformCommand,
DeviceProtocol: sbc,
Storage: backing,
BlockShift: bs.BlockShift,
UUID: bs.DeviceID,
}
err = backing.Open(lu, backendPath)
if err != nil {
return nil, err
}
lu.Size = backing.Size(lu)
lu.DeviceProtocol.InitLu(lu)
lu.Attrs.ThinProvisioning = bs.ThinProvisioning
lu.Attrs.Online = bs.Online
lu.Attrs.Lbppbe = 3
return lu, nil
}
// NewLUN0: create a new fake SCSI LU
func NewLUN0() *api.SCSILu {
sbc := NewSBCDevice(api.TYPE_UNKNOWN)
backing, _ := NewBackingStore("null")
var lu = &api.SCSILu{
PerformCommand: luPerformCommand,
DeviceProtocol: sbc,
Storage: backing,
BlockShift: api.DefaultBlockShift,
UUID: 0,
}
lu.Size = backing.Size(lu)<|fim▁hole|>}
func GetReservation(dev *api.SCSILu, nexusID uint64) *api.SCSIReservation {
return nil
}
func luPerformCommand(tid int, cmd *api.SCSICommand) api.SAMStat {
op := int(cmd.SCB[0])
fn := cmd.Device.DeviceProtocol.PerformCommand(op)
if fn != nil {
fnop := fn.(SCSIDeviceOperation)
// TODO host := cmd.ITNexus.Host
host := 0
cmd.State = api.SCSICommandProcessed
return fnop.CommandPerformFunc(host, cmd)
}
return api.SAMStatGood
}
func luPreventRemoval(lu *api.SCSILu) bool {
// TODO
return false
}<|fim▁end|> | lu.DeviceProtocol.InitLu(lu)
lu.Attrs.Online = false
lu.Attrs.Lbppbe = 3
return lu |
<|file_name|>openerp_utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2013-11-25
@author: Martin H. Bramwell
'''
import oerplib
import sys
import socket
<|fim▁hole|>class OpenERP(object):
def __init__(self, credentials):
db = credentials['db_name']
user_id = credentials['user_id']
host_name = credentials['host_name']
host_port = credentials['host_port']
user_pwd = credentials['user_pwd']
print "Getting connection to {} for {}".format(db, user_id)
try:
oerp = oerplib.OERP(
server=host_name, protocol='xmlrpc', port=host_port)
OErpModel.openErpConnection['super'] = oerp
if db in oerp.db.list():
db_connect(user_id, user_pwd, db)
else:
print "There is no database called : {}".format(db)
except socket.gaierror:
sys.exit(
"Is this the correct URL : {}".format(host_name))
except socket.error:
sys.exit(
"Is this the correct port number : {}".format(host_port))
def db_connect(usr, pwd, db):
oerp = OErpModel.openErpConnection['super']
user = oerp.login(user=usr, database=db, passwd=pwd)
OErpModel.openErpConnection['admin'] = user
# print " - - {} - - ".format(user)<|fim▁end|> | from models.OErpModel import OErpModel
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================<|fim▁hole|>
"""Libraries to build Recurrent Neural Networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function<|fim▁end|> | |
<|file_name|>notebook_exporter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
@file
@brief Customer notebook exporters.
"""
import os
from textwrap import indent
from traitlets import default
from traitlets.config import Config
from jinja2 import DictLoader
from nbconvert.exporters import RSTExporter
from nbconvert.filters.pandoc import convert_pandoc
def convert_pandoc_rst(source, from_format, to_format, extra_args=None):
"""
Overwrites `convert_pandoc
<https://github.com/jupyter/nbconvert/blob/master/nbconvert/filters/pandoc.py>`_.
@param source string to convert
@param from_format from format
@param to_format to format
@param extra_args extra arguments
@return results
"""
return convert_pandoc(source, from_format, to_format, extra_args=None)
def process_raw_html(source, extra_args=None):
"""
Replaces the output of
`add_menu_notebook
<http://www.xavierdupre.fr/app/jyquickhelper/helpsphinx/jyquickhelper/
helper_in_notebook.html#jyquickhelper.helper_in_notebook.add_notebook_menu>`_
by:
::
.. contents::
:local:
"""
if source is None:
return source # pragma: no cover
if 'var update_menu = function() {' in source:
return "\n\n.. contents::\n :local:\n\n"
return "\n\n.. raw:: html\n\n" + indent(source, prefix=' ')
class UpgradedRSTExporter(RSTExporter):
"""
Exports :epkg:`rst` documents.
Overwrites `RSTExporter <https://github.com/jupyter/
nbconvert/blob/master/nbconvert/exporters/rst.py>`_.
* It replaces `convert_pandoc <https://github.com/jupyter/
nbconvert/blob/master/nbconvert/filters/pandoc.py>`_
by @see fn convert_pandoc_rst.
* It converts :epkg:`svg` into :epkg:`png` if possible,
see @see fn process_raw_html.
* It replaces some known :epkg:`javascript`. The output of function
`add_menu_notebook <http://www.xavierdupre.fr/app/jyquickhelper/helpsphinx/jyquickhelper/
helper_in_notebook.html#jyquickhelper.helper_in_notebook.add_notebook_menu>`_
is replaced by ``.. contents::``.
.. index:: notebook export, nbconvert
It extends the template
`rst.tpl <https://github.com/jupyter/nbconvert/blob/master/nbconvert/templates/rst.tpl>`_.
New template is `rst_modified.tpl <https://github.com/sdpython/pyquickhelper/blob/master/
src/pyquickhelper/helpgen/rst_modified.tpl>`_.
It follows the hints given at
`Programatically creating templates
<https://nbconvert.readthedocs.io/en/latest/
nbconvert_library.html#Programatically-creating-templates>`_.
:epkg:`jyquickhelper` should add a string highly recognizable when adding a menu.
"""
def __init__(self, *args, **kwargs):
"""
Overwrites the extra loaders to get the right template.
"""
filename = os.path.join(os.path.dirname(__file__), 'rst_modified.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content = f.read()
filename = os.path.join(os.path.dirname(__file__), 'rst.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content2 = f.read()
dl = DictLoader({'rst_modified.tpl': content, 'rst.tpl': content2})
kwargs['extra_loaders'] = [dl]
RSTExporter.__init__(self, *args, **kwargs)
def default_filters(self):
"""
Overrides in subclasses to provide extra filters.
This should return an iterable of 2-tuples: (name, class-or-function).
You should call the method on the parent class and include the filters
it provides.
If a name is repeated, the last filter provided wins. Filters from
user-supplied config win over filters provided by classes.
"""
for k, v in RSTExporter.default_filters(self):
yield (k, v)
yield ('convert_pandoc_rst', convert_pandoc_rst)
yield ('process_raw_html', process_raw_html)
output_mimetype = 'text/restructuredtext'
export_from_notebook = "reST"
@default('template_file')
def _template_file_default(self):
return "rst_modified.tpl"
@default('file_extension')
def _file_extension_default(self):
return '.rst'
@default('template_name')
def _template_name_default(self):
return 'rst'
@property
def default_config(self):
c = Config({
'ExtractOutputPreprocessor': {
'enabled': True,
'output_filename_template': '{unique_key}_{cell_index}_{index}{extension}'
},
'HighlightMagicsPreprocessor': {
'enabled': True
},
})
c.merge(super(UpgradedRSTExporter, self).default_config)<|fim▁hole|><|fim▁end|> | return c |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>from network import WLAN<|fim▁hole|>
###############################################################################
# Settings for WLAN STA mode
###############################################################################
WLAN_MODE = 'off'
#WLAN_SSID = ''
#WLAN_AUTH = (WLAN.WPA2,'')
###############################################################################
# LoRaWAN Configuration
###############################################################################
# May be either 'otaa', 'abp', or 'off'
LORA_MODE = 'otaa'
# Settings for mode 'otaa'
LORA_OTAA_EUI = '70B3D57EF0001ED4'
LORA_OTAA_KEY = None # See README.md for instructions!
# Settings for mode 'abp'
#LORA_ABP_DEVADDR = ''
#LORA_ABP_NETKEY = ''
#LORA_ABP_APPKEY = ''
# Interval between measures transmitted to TTN.
# Measured airtime of transmission is 56.6 ms, fair use policy limits us to
# 30 seconds per day (= roughly 500 messages). We default to a 180 second
# interval (=480 messages / day).
LORA_SEND_RATE = 180
###############################################################################
# GNSS Configuration
###############################################################################
GNSS_UART_PORT = 1
GNSS_UART_BAUD = 9600
GNSS_ENABLE_PIN = 'P8'<|fim▁end|> | |
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'
import { RouterModule } from '@angular/router';
import { rootRouterConfig } from './app.routes';
import { AppComponent } from './app.component';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { BrowserModule } from '@angular/platform-browser';
import { HttpModule, JsonpModule } from '@angular/http';
import {TagSearchService} from "./flickr/service/data.service";
import {TagSearchComponent} from "./flickr/component/search-result/search-result.component";
import {ScrollToTopComponent} from "./flickr/component/scroll-to-top/scroll-to-top.component";
@NgModule({
declarations: [
AppComponent,
TagSearchComponent,
ScrollToTopComponent
],
imports: [<|fim▁hole|> FormsModule,
ReactiveFormsModule,
HttpModule,
JsonpModule,
RouterModule.forRoot(rootRouterConfig)
],
providers: [
TagSearchService
],
bootstrap: [ AppComponent ]
})
export class AppModule {
}<|fim▁end|> | BrowserModule, |
<|file_name|>sonos.py<|end_file_name|><|fim▁begin|>"""
Support to interface with Sonos players (via SoCo).
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.sonos/
"""
import datetime
import logging
from os import path
import socket
import urllib
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE, DOMAIN, MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_CLEAR_PLAYLIST,
SUPPORT_SELECT_SOURCE, MediaPlayerDevice)
from homeassistant.const import (
STATE_IDLE, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, STATE_OFF,
ATTR_ENTITY_ID)
from homeassistant.config import load_yaml_config_file
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['SoCo==0.12']
_LOGGER = logging.getLogger(__name__)
# The soco library is excessively chatty when it comes to logging and
# causes a LOT of spam in the logs due to making a http connection to each
# speaker every 10 seconds. Quiet it down a bit to just actual problems.
_SOCO_LOGGER = logging.getLogger('soco')
_SOCO_LOGGER.setLevel(logging.ERROR)
_REQUESTS_LOGGER = logging.getLogger('requests')
_REQUESTS_LOGGER.setLevel(logging.ERROR)
SUPPORT_SONOS = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE |\
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA |\
SUPPORT_SEEK | SUPPORT_CLEAR_PLAYLIST | SUPPORT_SELECT_SOURCE
SERVICE_GROUP_PLAYERS = 'sonos_group_players'
SERVICE_UNJOIN = 'sonos_unjoin'
SERVICE_SNAPSHOT = 'sonos_snapshot'
SERVICE_RESTORE = 'sonos_restore'
SERVICE_SET_TIMER = 'sonos_set_sleep_timer'
SERVICE_CLEAR_TIMER = 'sonos_clear_sleep_timer'
SUPPORT_SOURCE_LINEIN = 'Line-in'
SUPPORT_SOURCE_TV = 'TV'
# Service call validation schemas
ATTR_SLEEP_TIME = 'sleep_time'
SONOS_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
})
SONOS_SET_TIMER_SCHEMA = SONOS_SCHEMA.extend({
vol.Required(ATTR_SLEEP_TIME): vol.All(vol.Coerce(int),
vol.Range(min=0, max=86399))
})
# List of devices that have been registered
DEVICES = []
# pylint: disable=unused-argument, too-many-locals
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Sonos platform."""
import soco
global DEVICES
if discovery_info:
player = soco.SoCo(discovery_info)
# if device allready exists by config
if player.uid in DEVICES:
return True
if player.is_visible:
device = SonosDevice(hass, player)
add_devices([device])
if not DEVICES:
register_services(hass)
DEVICES.append(device)
return True
return False
players = None
hosts = config.get('hosts', None)
if hosts:
# Support retro compatibility with comma separated list of hosts
# from config
hosts = hosts.split(',') if isinstance(hosts, str) else hosts
players = []
for host in hosts:
players.append(soco.SoCo(socket.gethostbyname(host)))
if not players:
players = soco.discover(interface_addr=config.get('interface_addr',
None))
if not players:
_LOGGER.warning('No Sonos speakers found.')
return False
DEVICES = [SonosDevice(hass, p) for p in players]
add_devices(DEVICES)
register_services(hass)
_LOGGER.info('Added %s Sonos speakers', len(players))
return True
def register_services(hass):
"""Register all services for sonos devices."""
descriptions = load_yaml_config_file(
path.join(path.dirname(__file__), 'services.yaml'))
hass.services.register(DOMAIN, SERVICE_GROUP_PLAYERS,
_group_players_service,
descriptions.get(SERVICE_GROUP_PLAYERS),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_UNJOIN,
_unjoin_service,
descriptions.get(SERVICE_UNJOIN),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SNAPSHOT,
_snapshot_service,
descriptions.get(SERVICE_SNAPSHOT),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_RESTORE,
_restore_service,
descriptions.get(SERVICE_RESTORE),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SET_TIMER,
_set_sleep_timer_service,
descriptions.get(SERVICE_SET_TIMER),
schema=SONOS_SET_TIMER_SCHEMA)
hass.services.register(DOMAIN, SERVICE_CLEAR_TIMER,
_clear_sleep_timer_service,
descriptions.get(SERVICE_CLEAR_TIMER),
schema=SONOS_SCHEMA)
def _apply_service(service, service_func, *service_func_args):
"""Internal func for applying a service."""
entity_ids = service.data.get('entity_id')
if entity_ids:
_devices = [device for device in DEVICES
if device.entity_id in entity_ids]
else:
_devices = DEVICES
for device in _devices:
service_func(device, *service_func_args)
device.update_ha_state(True)
def _group_players_service(service):
"""Group media players, use player as coordinator."""
_apply_service(service, SonosDevice.group_players)
def _unjoin_service(service):
"""Unjoin the player from a group."""
_apply_service(service, SonosDevice.unjoin)
def _snapshot_service(service):
"""Take a snapshot."""
_apply_service(service, SonosDevice.snapshot)
def _restore_service(service):
"""Restore a snapshot."""
_apply_service(service, SonosDevice.restore)
def _set_sleep_timer_service(service):
"""Set a timer."""
_apply_service(service,
SonosDevice.set_sleep_timer,
service.data[ATTR_SLEEP_TIME])
def _clear_sleep_timer_service(service):
"""Set a timer."""
_apply_service(service,
SonosDevice.clear_sleep_timer)
def only_if_coordinator(func):
"""Decorator for coordinator.
If used as decorator, avoid calling the decorated method if player is not
a coordinator. If not, a grouped speaker (not in coordinator role) will
throw soco.exceptions.SoCoSlaveException.
Also, partially catch exceptions like:
soco.exceptions.SoCoUPnPException: UPnP Error 701 received:
Transition not available from <player ip address>
"""
def wrapper(*args, **kwargs):
"""Decorator wrapper."""
if args[0].is_coordinator:
from soco.exceptions import SoCoUPnPException
try:
func(*args, **kwargs)
except SoCoUPnPException:
_LOGGER.error('command "%s" for Sonos device "%s" '
'not available in this mode',
func.__name__, args[0].name)
else:
_LOGGER.debug('Ignore command "%s" for Sonos device "%s" (%s)',
func.__name__, args[0].name, 'not coordinator')
return wrapper
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# pylint: disable=abstract-method
class SonosDevice(MediaPlayerDevice):
"""Representation of a Sonos device."""
# pylint: disable=too-many-arguments
def __init__(self, hass, player):
"""Initialize the Sonos device."""
from soco.snapshot import Snapshot
self.hass = hass
self.volume_increment = 5
self._player = player
self._speaker_info = None
self._name = None
self._coordinator = None
self._media_content_id = None
self._media_duration = None
self._media_image_url = None
self._media_artist = None
self._media_album_name = None
self._media_title = None
self.update()
self.soco_snapshot = Snapshot(self._player)
@property
def should_poll(self):
"""Polling needed."""
return True
def update_sonos(self, now):
"""Update state, called by track_utc_time_change."""
self.update_ha_state(True)
@property
def unique_id(self):
"""Return an unique ID."""
return self._player.uid
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._status == 'PAUSED_PLAYBACK':
return STATE_PAUSED
if self._status == 'PLAYING':
return STATE_PLAYING
if self._status == 'STOPPED':
return STATE_IDLE
if self._status == 'OFF':
return STATE_OFF
return STATE_UNKNOWN
@property
def is_coordinator(self):
"""Return true if player is a coordinator."""
return self._player.is_coordinator
def update(self):
"""Retrieve latest state."""
self._speaker_info = self._player.get_speaker_info()
self._name = self._speaker_info['zone_name'].replace(
' (R)', '').replace(' (L)', '')
if self.available:
self._status = self._player.get_current_transport_info().get(
'current_transport_state')
trackinfo = self._player.get_current_track_info()
if trackinfo['uri'].startswith('x-rincon:'):
# this speaker is a slave, find the coordinator
# the uri of the track is 'x-rincon:{coordinator-id}'
coordinator_id = trackinfo['uri'][9:]
coordinators = [device for device in DEVICES
if device.unique_id == coordinator_id]
self._coordinator = coordinators[0] if coordinators else None
else:
self._coordinator = None
if not self._coordinator:
mediainfo = self._player.avTransport.GetMediaInfo([
('InstanceID', 0)
])
duration = trackinfo.get('duration', '0:00')
# if the speaker is playing from the "line-in" source, getting
# track metadata can return NOT_IMPLEMENTED, which breaks the
# volume logic below
if duration == 'NOT_IMPLEMENTED':
duration = None
else:
duration = sum(60 ** x[0] * int(x[1]) for x in enumerate(
reversed(duration.split(':'))))
media_image_url = trackinfo.get('album_art', None)
media_artist = trackinfo.get('artist', None)
media_album_name = trackinfo.get('album', None)
media_title = trackinfo.get('title', None)
if media_image_url in ('', 'NOT_IMPLEMENTED', None):
# fallback to asking the speaker directly
media_image_url = \
'http://{host}:{port}/getaa?s=1&u={uri}'.format(
host=self._player.ip_address,
port=1400,
uri=urllib.parse.quote(mediainfo['CurrentURI'])
)
if media_artist in ('', 'NOT_IMPLEMENTED', None):
# if listening to a radio stream the media_artist field
# will be empty and the title field will contain the
# filename that is being streamed
current_uri_metadata = mediainfo["CurrentURIMetaData"]
if current_uri_metadata not in \
('', 'NOT_IMPLEMENTED', None):
# currently soco does not have an API for this
import soco
current_uri_metadata = soco.xml.XML.fromstring(
soco.utils.really_utf8(current_uri_metadata))
md_title = current_uri_metadata.findtext(
'.//{http://purl.org/dc/elements/1.1/}title')
if md_title not in ('', 'NOT_IMPLEMENTED', None):
media_artist = ''
media_title = md_title
self._media_content_id = trackinfo.get('title', None)
self._media_duration = duration
self._media_image_url = media_image_url
self._media_artist = media_artist
self._media_album_name = media_album_name
self._media_title = media_title
else:
self._status = 'OFF'
self._coordinator = None
self._media_content_id = None
self._media_duration = None
self._media_image_url = None
self._media_artist = None
self._media_album_name = None
self._media_title = None
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._player.volume / 100.0
@property
def is_volume_muted(self):
"""Return true if volume is muted."""
return self._player.mute
@property
def media_content_id(self):
"""Content ID of current playing media."""
if self._coordinator:
return self._coordinator.media_content_id
else:
return self._media_content_id
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
if self._coordinator:
return self._coordinator.media_duration
else:
return self._media_duration
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._coordinator:
return self._coordinator.media_image_url
else:
return self._media_image_url
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
if self._coordinator:
return self._coordinator.media_artist
else:
return self._media_artist
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
if self._coordinator:
return self._coordinator.media_album_name
else:
return self._media_album_name
@property
def media_title(self):
"""Title of current playing media."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
if self._coordinator:
return self._coordinator.media_title
else:
return self._media_title
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
if not self.source_list:
# some devices do not allow source selection
return SUPPORT_SONOS ^ SUPPORT_SELECT_SOURCE
return SUPPORT_SONOS
def volume_up(self):
"""Volume up media player."""
self._player.volume += self.volume_increment
def volume_down(self):
"""Volume down media player."""
self._player.volume -= self.volume_increment
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._player.volume = str(int(volume * 100))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self._player.mute = mute
def select_source(self, source):
"""Select input source."""
if source == SUPPORT_SOURCE_LINEIN:
self._player.switch_to_line_in()
elif source == SUPPORT_SOURCE_TV:
self._player.switch_to_tv()
@property
def source_list(self):
"""List of available input sources."""
model_name = self._speaker_info['model_name']<|fim▁hole|>
if 'PLAY:5' in model_name:
return [SUPPORT_SOURCE_LINEIN]
elif 'PLAYBAR' in model_name:
return [SUPPORT_SOURCE_LINEIN, SUPPORT_SOURCE_TV]
@property
def source(self):
"""Name of the current input source."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
return None
@only_if_coordinator
def turn_off(self):
"""Turn off media player."""
self._player.pause()
def media_play(self):
"""Send play command."""
if self._coordinator:
self._coordinator.media_play()
else:
self._player.play()
def media_pause(self):
"""Send pause command."""
if self._coordinator:
self._coordinator.media_pause()
else:
self._player.pause()
def media_next_track(self):
"""Send next track command."""
if self._coordinator:
self._coordinator.media_next_track()
else:
self._player.next()
def media_previous_track(self):
"""Send next track command."""
if self._coordinator:
self._coordinator.media_previous_track()
else:
self._player.previous()
def media_seek(self, position):
"""Send seek command."""
if self._coordinator:
self._coordinator.media_seek(position)
else:
self._player.seek(str(datetime.timedelta(seconds=int(position))))
def clear_playlist(self):
"""Clear players playlist."""
if self._coordinator:
self._coordinator.clear_playlist()
else:
self._player.clear_queue()
@only_if_coordinator
def turn_on(self):
"""Turn the media player on."""
self._player.play()
def play_media(self, media_type, media_id, **kwargs):
"""
Send the play_media command to the media player.
If ATTR_MEDIA_ENQUEUE is True, add `media_id` to the queue.
"""
if self._coordinator:
self._coordinator.play_media(media_type, media_id, **kwargs)
else:
if kwargs.get(ATTR_MEDIA_ENQUEUE):
from soco.exceptions import SoCoUPnPException
try:
self._player.add_uri_to_queue(media_id)
except SoCoUPnPException:
_LOGGER.error('Error parsing media uri "%s", '
"please check it's a valid media resource "
'supported by Sonos', media_id)
else:
self._player.play_uri(media_id)
def group_players(self):
"""Group all players under this coordinator."""
if self._coordinator:
self._coordinator.group_players()
else:
self._player.partymode()
@only_if_coordinator
def unjoin(self):
"""Unjoin the player from a group."""
self._player.unjoin()
@only_if_coordinator
def snapshot(self):
"""Snapshot the player."""
self.soco_snapshot.snapshot()
@only_if_coordinator
def restore(self):
"""Restore snapshot for the player."""
self.soco_snapshot.restore(True)
@only_if_coordinator
def set_sleep_timer(self, sleep_time):
"""Set the timer on the player."""
self._player.set_sleep_timer(sleep_time)
@only_if_coordinator
def clear_sleep_timer(self):
"""Clear the timer on the player."""
self._player.set_sleep_timer(None)
@property
def available(self):
"""Return True if player is reachable, False otherwise."""
try:
sock = socket.create_connection(
address=(self._player.ip_address, 1443),
timeout=3)
sock.close()
return True
except socket.error:
return False<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
from jsonfield import JSONField
import collections
# Create your models here.
class YelpvisState(models.Model):
title=models.CharField(max_length=255)
slug=models.SlugField(unique=True,max_length=255)
description = models.CharField(max_length=255)
content=models.TextField()
published=models.BooleanField(default=True)
created=models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-created']
<|fim▁hole|> def __unicode__(self):
return u'%s' % self.title
def get_absolute_url(self):
return reverse('blog:post', args=[self.slug])
class YelpvisCommentState(models.Model):
content=models.TextField()
pub_date = models.DateTimeField(auto_now_add=True)
vis_state = JSONField()
class Meta:
ordering = ['-pub_date']
def __unicode__(self):
return self.content<|fim▁end|> | |
<|file_name|>simultaneous.py<|end_file_name|><|fim▁begin|>import numpy as np
from scipy.linalg import norm
from .base import AppearanceLucasKanade
class SimultaneousForwardAdditive(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FA'
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute warp Jacobian
dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(
image, dW_dp, forward=(self.template, self.transform,
self.interpolator))
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
<|fim▁hole|> parameters = self.transform.as_vector() + delta_p[:n_params]
self.transform.from_vector_inplace(parameters)
lk_fitting.parameters.append(parameters)
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousForwardCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(IWxp, self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousInverseCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-IA'
def _set_up(self):
# Compute the Jacobian of the warp
self._dW_dp = self.transform.jacobian(
self.appearance_model.mean.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = -self.appearance_model._jacobian.T
# Baker-Matthews, Inverse Compositional Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VT_dW_dp
J = self.residual.steepest_descent_images(self.template,
self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, IWxp, self.template)
# Compute gradient descent parameter updates
delta_p = -np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting<|fim▁end|> | # Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights |
<|file_name|>test_ind_wmaenvelope.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<|fim▁hole|>###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import testcommon
import backtrader as bt
import backtrader.indicators as btind
chkdatas = 1
chkvals = [
['4076.212366', '3655.193634', '3576.228000'],
['4178.117675', '3746.573475', '3665.633700'],
['3974.307056', '3563.813794', '3486.822300'],
]
chkmin = 30
chkind = btind.WMAEnvelope
def test_run(main=False):
datas = [testcommon.getdata(i) for i in range(chkdatas)]
testcommon.runtest(datas,
testcommon.TestStrategy,
main=main,
plot=main,
chkind=chkind,
chkmin=chkmin,
chkvals=chkvals)
if __name__ == '__main__':
test_run(main=True)<|fim▁end|> | #
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
Database models for the badges app
"""
from importlib import import_module
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from lazy import lazy
from model_utils.models import TimeStampedModel
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from badges.utils import deserialize_count_specs
from config_models.models import ConfigurationModel
from xmodule.modulestore.django import modulestore
from xmodule_django.models import CourseKeyField
def validate_badge_image(image):
"""
Validates that a particular image is small enough to be a badge and square.
"""
if image.width != image.height:
raise ValidationError(_(u"The badge image must be square."))
if not image.size < (250 * 1024):
raise ValidationError(_(u"The badge image file size must be less than 250KB."))
def validate_lowercase(string):
"""
Validates that a string is lowercase.
"""
if not string.islower():
raise ValidationError(_(u"This value must be all lowercase."))
class CourseBadgesDisabledError(Exception):
"""
Exception raised when Course Badges aren't enabled, but an attempt to fetch one is made anyway.
"""
class BadgeClass(models.Model):
"""
Specifies a badge class to be registered with a backend.
"""
slug = models.SlugField(max_length=255, validators=[validate_lowercase])
issuing_component = models.SlugField(max_length=50, default='', blank=True, validators=[validate_lowercase])
display_name = models.CharField(max_length=255)
course_id = CourseKeyField(max_length=255, blank=True, default=None)
description = models.TextField()
criteria = models.TextField()
# Mode a badge was awarded for. Included for legacy/migration purposes.
mode = models.CharField(max_length=100, default='', blank=True)
image = models.ImageField(upload_to='badge_classes', validators=[validate_badge_image])
def __unicode__(self):
return u"<Badge '{slug}' for '{issuing_component}'>".format(
slug=self.slug, issuing_component=self.issuing_component
)
@classmethod
def get_badge_class(
cls, slug, issuing_component, display_name=None, description=None, criteria=None, image_file_handle=None,
mode='', course_id=None, create=True
):
"""
Looks up a badge class by its slug, issuing component, and course_id and returns it should it exist.
If it does not exist, and create is True, creates it according to the arguments. Otherwise, returns None.
The expectation is that an XBlock or platform developer should not need to concern themselves with whether
or not a badge class has already been created, but should just feed all requirements to this function
and it will 'do the right thing'. It should be the exception, rather than the common case, that a badge class
would need to be looked up without also being created were it missing.
"""
slug = slug.lower()
issuing_component = issuing_component.lower()
if course_id and not modulestore().get_course(course_id).issue_badges:
raise CourseBadgesDisabledError("This course does not have badges enabled.")
if not course_id:
course_id = CourseKeyField.Empty
try:
return cls.objects.get(slug=slug, issuing_component=issuing_component, course_id=course_id)
except cls.DoesNotExist:
if not create:
return None
badge_class = cls(
slug=slug,
issuing_component=issuing_component,
display_name=display_name,
course_id=course_id,
mode=mode,
description=description,
criteria=criteria,
)
badge_class.image.save(image_file_handle.name, image_file_handle)
badge_class.full_clean()
badge_class.save()
return badge_class
@lazy
def backend(self):
"""
Loads the badging backend.
"""
module, klass = settings.BADGING_BACKEND.rsplit('.', 1)
module = import_module(module)
return getattr(module, klass)()
def get_for_user(self, user):
"""
Get the assertion for this badge class for this user, if it has been awarded.
"""
return self.badgeassertion_set.filter(user=user)
def award(self, user, evidence_url=None):
"""
Contacts the backend to have a badge assertion created for this badge class for this user.
"""
return self.backend.award(self, user, evidence_url=evidence_url)
def save(self, **kwargs):
"""
Slugs must always be lowercase.
"""
self.slug = self.slug and self.slug.lower()
self.issuing_component = self.issuing_component and self.issuing_component.lower()
super(BadgeClass, self).save(**kwargs)
class Meta(object):
app_label = "badges"
unique_together = (('slug', 'issuing_component', 'course_id'),)
verbose_name_plural = "Badge Classes"
class BadgeAssertion(TimeStampedModel):
"""
Tracks badges on our side of the badge baking transaction
"""
user = models.ForeignKey(User)
badge_class = models.ForeignKey(BadgeClass)
data = JSONField()
backend = models.CharField(max_length=50)
image_url = models.URLField()
assertion_url = models.URLField()
def __unicode__(self):
return u"<{username} Badge Assertion for {slug} for {issuing_component}".format(
username=self.user.username, slug=self.badge_class.slug,
issuing_component=self.badge_class.issuing_component,
)
@classmethod
def assertions_for_user(cls, user, course_id=None):
"""
Get all assertions for a user, optionally constrained to a course.
"""
if course_id:
return cls.objects.filter(user=user, badge_class__course_id=course_id)
return cls.objects.filter(user=user)
class Meta(object):
app_label = "badges"
# Abstract model doesn't index this, so we have to.
BadgeAssertion._meta.get_field('created').db_index = True # pylint: disable=protected-access
class CourseCompleteImageConfiguration(models.Model):
"""
Contains the icon configuration for badges for a specific course mode.
"""
mode = models.CharField(
max_length=125,
help_text=_(u'The course mode for this badge image. For example, "verified" or "honor".'),
unique=True,
)
icon = models.ImageField(
# Actual max is 256KB, but need overhead for badge baking. This should be more than enough.
help_text=_(
u"Badge images must be square PNG files. The file size should be under 250KB."
),
upload_to='course_complete_badges',
validators=[validate_badge_image]
)
default = models.BooleanField(
help_text=_(
u"Set this value to True if you want this image to be the default image for any course modes "
u"that do not have a specified badge image. You can have only one default image."
),
default=False,
)
def __unicode__(self):
return u"<CourseCompleteImageConfiguration for '{mode}'{default}>".format(<|fim▁hole|> )
def clean(self):
"""
Make sure there's not more than one default.
"""
# pylint: disable=no-member
if self.default and CourseCompleteImageConfiguration.objects.filter(default=True).exclude(id=self.id):
raise ValidationError(_(u"There can be only one default image."))
@classmethod
def image_for_mode(cls, mode):
"""
Get the image for a particular mode.
"""
try:
return cls.objects.get(mode=mode).icon
except cls.DoesNotExist:
# Fall back to default, if there is one.
return cls.objects.get(default=True).icon
class Meta(object):
app_label = "badges"
class CourseEventBadgesConfiguration(ConfigurationModel):
"""
Determines the settings for meta course awards-- such as completing a certain
number of courses or enrolling in a certain number of them.
"""
courses_completed = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of completed courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
courses_enrolled = models.TextField(
blank=True, default='',
help_text=_(
u"On each line, put the number of enrolled courses to award a badge for, a comma, and the slug of a "
u"badge class you have created that has the issuing component 'openedx__course'. "
u"For example: 3,enrolled_3_courses"
)
)
course_groups = models.TextField(
blank=True, default='',
help_text=_(
u"Each line is a comma-separated list. The first item in each line is the slug of a badge class you "
u"have created that has an issuing component of 'openedx__course'. The remaining items in each line are "
u"the course keys the learner needs to complete to be awarded the badge. For example: "
u"slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second"
)
)
def __unicode__(self):
return u"<CourseEventBadgesConfiguration ({})>".format(u"Enabled" if self.enabled else u"Disabled")
@property
def completed_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_completed)
@property
def enrolled_settings(self):
"""
Parses the settings from the courses_completed field.
"""
return deserialize_count_specs(self.courses_enrolled)
@property
def course_group_settings(self):
"""
Parses the course group settings. In example, the format is:
slug_for_compsci_courses_group_badge,course-v1:CompSci+Course+First,course-v1:CompsSci+Course+Second
"""
specs = self.course_groups.strip()
if not specs:
return {}
specs = [line.split(',', 1) for line in specs.splitlines()]
return {
slug.strip().lower(): [CourseKey.from_string(key.strip()) for key in keys.strip().split(',')]
for slug, keys in specs
}
def clean_fields(self, exclude=tuple()):
"""
Verify the settings are parseable.
"""
errors = {}
error_message = _(u"Please check the syntax of your entry.")
if 'courses_completed' not in exclude:
try:
self.completed_settings
except (ValueError, InvalidKeyError):
errors['courses_completed'] = [unicode(error_message)]
if 'courses_enrolled' not in exclude:
try:
self.enrolled_settings
except (ValueError, InvalidKeyError):
errors['courses_enrolled'] = [unicode(error_message)]
if 'course_groups' not in exclude:
store = modulestore()
try:
for key_list in self.course_group_settings.values():
for course_key in key_list:
if not store.get_course(course_key):
ValueError(u"The course {course_key} does not exist.".format(course_key=course_key))
except (ValueError, InvalidKeyError):
errors['course_groups'] = [unicode(error_message)]
if errors:
raise ValidationError(errors)
class Meta(object):
app_label = "badges"<|fim▁end|> | mode=self.mode,
default=u" (default)" if self.default else u'' |
<|file_name|>glusterfs.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package glusterfs
import (
"os"
"path"
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/types"
"k8s.io/kubernetes/pkg/util"
"k8s.io/kubernetes/pkg/util/exec"
"k8s.io/kubernetes/pkg/util/mount"
"k8s.io/kubernetes/pkg/volume"
)
// This is the primary entrypoint for volume plugins.
func ProbeVolumePlugins() []volume.VolumePlugin {
return []volume.VolumePlugin{&glusterfsPlugin{nil}}
}
type glusterfsPlugin struct {
host volume.VolumeHost
}
var _ volume.VolumePlugin = &glusterfsPlugin{}
var _ volume.PersistentVolumePlugin = &glusterfsPlugin{}
const (
glusterfsPluginName = "kubernetes.io/glusterfs"
)
func (plugin *glusterfsPlugin) Init(host volume.VolumeHost) {
plugin.host = host
}
func (plugin *glusterfsPlugin) Name() string {
return glusterfsPluginName
}
func (plugin *glusterfsPlugin) CanSupport(spec *volume.Spec) bool {
return (spec.PersistentVolume != nil && spec.PersistentVolume.Spec.Glusterfs != nil) ||
(spec.Volume != nil && spec.Volume.Glusterfs != nil)
}
func (plugin *glusterfsPlugin) GetAccessModes() []api.PersistentVolumeAccessMode {
return []api.PersistentVolumeAccessMode{
api.ReadWriteOnce,
api.ReadOnlyMany,
api.ReadWriteMany,
}
}
func (plugin *glusterfsPlugin) NewBuilder(spec *volume.Spec, pod *api.Pod, _ volume.VolumeOptions) (volume.Builder, error) {
source, _ := plugin.getGlusterVolumeSource(spec)
ep_name := source.EndpointsName
ns := pod.Namespace
ep, err := plugin.host.GetKubeClient().Endpoints(ns).Get(ep_name)
if err != nil {
glog.Errorf("Glusterfs: failed to get endpoints %s[%v]", ep_name, err)
return nil, err
}
glog.V(1).Infof("Glusterfs: endpoints %v", ep)
return plugin.newBuilderInternal(spec, ep, pod, plugin.host.GetMounter(), exec.New())
}
func (plugin *glusterfsPlugin) getGlusterVolumeSource(spec *volume.Spec) (*api.GlusterfsVolumeSource, bool) {
// Glusterfs volumes used directly in a pod have a ReadOnly flag set by the pod author.
// Glusterfs volumes used as a PersistentVolume gets the ReadOnly flag indirectly through the persistent-claim volume used to mount the PV
if spec.Volume != nil && spec.Volume.Glusterfs != nil {<|fim▁hole|> } else {
return spec.PersistentVolume.Spec.Glusterfs, spec.ReadOnly
}
}
func (plugin *glusterfsPlugin) newBuilderInternal(spec *volume.Spec, ep *api.Endpoints, pod *api.Pod, mounter mount.Interface, exe exec.Interface) (volume.Builder, error) {
source, readOnly := plugin.getGlusterVolumeSource(spec)
return &glusterfsBuilder{
glusterfs: &glusterfs{
volName: spec.Name(),
mounter: mounter,
pod: pod,
plugin: plugin,
},
hosts: ep,
path: source.Path,
readOnly: readOnly,
exe: exe}, nil
}
func (plugin *glusterfsPlugin) NewCleaner(volName string, podUID types.UID) (volume.Cleaner, error) {
return plugin.newCleanerInternal(volName, podUID, plugin.host.GetMounter())
}
func (plugin *glusterfsPlugin) newCleanerInternal(volName string, podUID types.UID, mounter mount.Interface) (volume.Cleaner, error) {
return &glusterfsCleaner{&glusterfs{
volName: volName,
mounter: mounter,
pod: &api.Pod{ObjectMeta: api.ObjectMeta{UID: podUID}},
plugin: plugin,
}}, nil
}
// Glusterfs volumes represent a bare host file or directory mount of an Glusterfs export.
type glusterfs struct {
volName string
pod *api.Pod
mounter mount.Interface
plugin *glusterfsPlugin
}
type glusterfsBuilder struct {
*glusterfs
hosts *api.Endpoints
path string
readOnly bool
exe exec.Interface
}
var _ volume.Builder = &glusterfsBuilder{}
// SetUp attaches the disk and bind mounts to the volume path.
func (b *glusterfsBuilder) SetUp() error {
return b.SetUpAt(b.GetPath())
}
func (b *glusterfsBuilder) SetUpAt(dir string) error {
notMnt, err := b.mounter.IsLikelyNotMountPoint(dir)
glog.V(4).Infof("Glusterfs: mount set up: %s %v %v", dir, !notMnt, err)
if err != nil && !os.IsNotExist(err) {
return err
}
if !notMnt {
return nil
}
os.MkdirAll(dir, 0750)
err = b.setUpAtInternal(dir)
if err == nil {
return nil
}
// Cleanup upon failure.
c := &glusterfsCleaner{b.glusterfs}
c.cleanup(dir)
return err
}
func (b *glusterfsBuilder) IsReadOnly() bool {
return b.readOnly
}
func (glusterfsVolume *glusterfs) GetPath() string {
name := glusterfsPluginName
return glusterfsVolume.plugin.host.GetPodVolumeDir(glusterfsVolume.pod.UID, util.EscapeQualifiedNameForDisk(name), glusterfsVolume.volName)
}
type glusterfsCleaner struct {
*glusterfs
}
var _ volume.Cleaner = &glusterfsCleaner{}
func (c *glusterfsCleaner) TearDown() error {
return c.TearDownAt(c.GetPath())
}
func (c *glusterfsCleaner) TearDownAt(dir string) error {
return c.cleanup(dir)
}
func (c *glusterfsCleaner) cleanup(dir string) error {
notMnt, err := c.mounter.IsLikelyNotMountPoint(dir)
if err != nil {
glog.Errorf("Glusterfs: Error checking IsLikelyNotMountPoint: %v", err)
return err
}
if notMnt {
return os.RemoveAll(dir)
}
if err := c.mounter.Unmount(dir); err != nil {
glog.Errorf("Glusterfs: Unmounting failed: %v", err)
return err
}
notMnt, mntErr := c.mounter.IsLikelyNotMountPoint(dir)
if mntErr != nil {
glog.Errorf("Glusterfs: IsLikelyNotMountPoint check failed: %v", mntErr)
return mntErr
}
if notMnt {
if err := os.RemoveAll(dir); err != nil {
return err
}
}
return nil
}
func (b *glusterfsBuilder) setUpAtInternal(dir string) error {
var errs error
options := []string{}
if b.readOnly {
options = append(options, "ro")
}
p := path.Join(b.glusterfs.plugin.host.GetPluginDir(glusterfsPluginName), b.glusterfs.volName)
if err := os.MkdirAll(p, 0750); err != nil {
return err
}
log := path.Join(p, "glusterfs.log")
options = append(options, "log-file="+log)
addr := make(map[string]struct{})
for _, s := range b.hosts.Subsets {
for _, a := range s.Addresses {
addr[a.IP] = struct{}{}
}
}
// Avoid mount storm, pick a host randomly.
// Iterate all hosts until mount succeeds.
for hostIP := range addr {
errs = b.mounter.Mount(hostIP+":"+b.path, dir, "glusterfs", options)
if errs == nil {
return nil
}
}
glog.Errorf("Glusterfs: mount failed: %v", errs)
return errs
}<|fim▁end|> | return spec.Volume.Glusterfs, spec.Volume.Glusterfs.ReadOnly |
<|file_name|>snippets.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Code snippets used in webdocs.
The examples here are written specifically to read well with the accompanying
web docs. Do not rewrite them until you make sure the webdocs still read well
and the rewritten code supports the concept being described. For example, there
are snippets that could be shorter but they are written like this to make a
specific point in the docs.
The code snippets are all organized as self contained functions. Parts of the
function body delimited by [START tag] and [END tag] will be included
automatically in the web docs. The naming convention for the tags is to have as
prefix the PATH_TO_HTML where they are included followed by a descriptive
string. The tags can contain only letters, digits and _.
"""
import apache_beam as beam
from apache_beam.test_pipeline import TestPipeline
from apache_beam.metrics import Metrics
# Quiet some pylint warnings that happen because of the somewhat special
# format for the code snippets.
# pylint:disable=invalid-name
# pylint:disable=expression-not-assigned
# pylint:disable=redefined-outer-name
# pylint:disable=reimported
# pylint:disable=unused-variable
# pylint:disable=wrong-import-order, wrong-import-position
class SnippetUtils(object):
from apache_beam.pipeline import PipelineVisitor
class RenameFiles(PipelineVisitor):
"""RenameFiles will rewire read/write paths for unit testing.
RenameFiles will replace the GCS files specified in the read and
write transforms to local files so the pipeline can be run as a
unit test. This assumes that read and write transforms defined in snippets
have already been replaced by transforms 'DummyReadForTesting' and
'DummyReadForTesting' (see snippets_test.py).
This is as close as we can get to have code snippets that are
executed and are also ready to presented in webdocs.
"""
def __init__(self, renames):
self.renames = renames
def visit_transform(self, transform_node):
if transform_node.full_label.find('DummyReadForTesting') >= 0:
transform_node.transform.fn.file_to_read = self.renames['read']
elif transform_node.full_label.find('DummyWriteForTesting') >= 0:
transform_node.transform.fn.file_to_write = self.renames['write']
def construct_pipeline(renames):
"""A reverse words snippet as an example for constructing a pipeline."""
import re
class ReverseWords(beam.PTransform):
"""A PTransform that reverses individual elements in a PCollection."""
def expand(self, pcoll):
return pcoll | beam.Map(lambda e: e[::-1])
def filter_words(unused_x):
"""Pass through filter to select everything."""
return True
# [START pipelines_constructing_creating]
from apache_beam.utils.pipeline_options import PipelineOptions
p = beam.Pipeline(options=PipelineOptions())
# [END pipelines_constructing_creating]
p = TestPipeline() # Use TestPipeline for testing.
# [START pipelines_constructing_reading]
lines = p | 'ReadMyFile' >> beam.io.ReadFromText('gs://some/inputData.txt')
# [END pipelines_constructing_reading]
# [START pipelines_constructing_applying]
words = lines | beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x))
reversed_words = words | ReverseWords()
# [END pipelines_constructing_applying]
# [START pipelines_constructing_writing]
filtered_words = reversed_words | 'FilterWords' >> beam.Filter(filter_words)
filtered_words | 'WriteMyFile' >> beam.io.WriteToText(
'gs://some/outputData.txt')
# [END pipelines_constructing_writing]
p.visit(SnippetUtils.RenameFiles(renames))
# [START pipelines_constructing_running]
p.run()
# [END pipelines_constructing_running]
def model_pipelines(argv):
"""A wordcount snippet as a simple pipeline example."""
# [START model_pipelines]
import re
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
class MyOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--input',
dest='input',
default='gs://dataflow-samples/shakespeare/kinglear'
'.txt',
help='Input file to process.')
parser.add_argument('--output',
dest='output',
required=True,
help='Output file to write results to.')
pipeline_options = PipelineOptions(argv)
my_options = pipeline_options.view_as(MyOptions)
p = beam.Pipeline(options=pipeline_options)
(p
| beam.io.ReadFromText(my_options.input)
| beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x))
| beam.Map(lambda x: (x, 1))
| beam.combiners.Count.PerKey()
| beam.io.WriteToText(my_options.output))
result = p.run()
# [END model_pipelines]
result.wait_until_finish()
def model_pcollection(argv):
"""Creating a PCollection from data in local memory."""
from apache_beam.utils.pipeline_options import PipelineOptions
class MyOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--output',
dest='output',
required=True,
help='Output file to write results to.')
pipeline_options = PipelineOptions(argv)
my_options = pipeline_options.view_as(MyOptions)
# [START model_pcollection]
p = beam.Pipeline(options=pipeline_options)
(p
| beam.Create([
'To be, or not to be: that is the question: ',
'Whether \'tis nobler in the mind to suffer ',
'The slings and arrows of outrageous fortune, ',
'Or to take arms against a sea of troubles, '])
| beam.io.WriteToText(my_options.output))
result = p.run()
# [END model_pcollection]
result.wait_until_finish()
def pipeline_options_remote(argv):
"""Creating a Pipeline using a PipelineOptions object for remote execution."""
from apache_beam import Pipeline
from apache_beam.utils.pipeline_options import PipelineOptions
# [START pipeline_options_create]
options = PipelineOptions(flags=argv)
# [END pipeline_options_create]
# [START pipeline_options_define_custom]
class MyOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--input')
parser.add_argument('--output')
# [END pipeline_options_define_custom]
from apache_beam.utils.pipeline_options import GoogleCloudOptions
from apache_beam.utils.pipeline_options import StandardOptions
# [START pipeline_options_dataflow_service]
# Create and set your PipelineOptions.
options = PipelineOptions(flags=argv)
# For Cloud execution, set the Cloud Platform project, job_name,
# staging location, temp_location and specify DataflowRunner.
google_cloud_options = options.view_as(GoogleCloudOptions)
google_cloud_options.project = 'my-project-id'
google_cloud_options.job_name = 'myjob'
google_cloud_options.staging_location = 'gs://my-bucket/binaries'
google_cloud_options.temp_location = 'gs://my-bucket/temp'
options.view_as(StandardOptions).runner = 'DataflowRunner'
# Create the Pipeline with the specified options.
p = Pipeline(options=options)
# [END pipeline_options_dataflow_service]
my_options = options.view_as(MyOptions)
my_input = my_options.input
my_output = my_options.output
p = TestPipeline() # Use TestPipeline for testing.
lines = p | beam.io.ReadFromText(my_input)
lines | beam.io.WriteToText(my_output)
p.run()
def pipeline_options_local(argv):
"""Creating a Pipeline using a PipelineOptions object for local execution."""
from apache_beam import Pipeline
from apache_beam.utils.pipeline_options import PipelineOptions
options = PipelineOptions(flags=argv)
# [START pipeline_options_define_custom_with_help_and_default]
class MyOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--input',
help='Input for the pipeline',
default='gs://my-bucket/input')
parser.add_argument('--output',
help='Output for the pipeline',
default='gs://my-bucket/output')
# [END pipeline_options_define_custom_with_help_and_default]
my_options = options.view_as(MyOptions)
my_input = my_options.input
my_output = my_options.output
# [START pipeline_options_local]
# Create and set your Pipeline Options.
options = PipelineOptions()
p = Pipeline(options=options)
# [END pipeline_options_local]
p = TestPipeline() # Use TestPipeline for testing.
lines = p | beam.io.ReadFromText(my_input)
lines | beam.io.WriteToText(my_output)
p.run()
def pipeline_options_command_line(argv):
"""Creating a Pipeline by passing a list of arguments."""
# [START pipeline_options_command_line]
# Use Python argparse module to parse custom arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--input')
parser.add_argument('--output')
known_args, pipeline_args = parser.parse_known_args(argv)
# Create the Pipeline with remaining arguments.
p = beam.Pipeline(argv=pipeline_args)
lines = p | 'ReadFromText' >> beam.io.ReadFromText(known_args.input)
lines | 'WriteToText' >> beam.io.WriteToText(known_args.output)
# [END pipeline_options_command_line]
p.run().wait_until_finish()
def pipeline_logging(lines, output):
"""Logging Pipeline Messages."""
import re
import apache_beam as beam
# [START pipeline_logging]
# import Python logging module.
import logging
class ExtractWordsFn(beam.DoFn):
def process(self, element):
words = re.findall(r'[A-Za-z\']+', element)
for word in words:
yield word
if word.lower() == 'love':
# Log using the root logger at info or higher levels
logging.info('Found : %s', word.lower())
# Remaining WordCount example code ...
# [END pipeline_logging]
p = TestPipeline() # Use TestPipeline for testing.
(p
| beam.Create(lines)
| beam.ParDo(ExtractWordsFn())
| beam.io.WriteToText(output))
p.run()
def pipeline_monitoring(renames):
"""Using monitoring interface snippets."""
import re
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
class WordCountOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--input',
help='Input for the pipeline',
default='gs://my-bucket/input')
parser.add_argument('--output',
help='output for the pipeline',
default='gs://my-bucket/output')
class ExtractWordsFn(beam.DoFn):
def process(self, element):
words = re.findall(r'[A-Za-z\']+', element)
for word in words:
yield word
class FormatCountsFn(beam.DoFn):
def process(self, element):
word, count = element
yield '%s: %s' % (word, count)
# [START pipeline_monitoring_composite]
# The CountWords Composite Transform inside the WordCount pipeline.
class CountWords(beam.PTransform):
def expand(self, pcoll):
return (pcoll
# Convert lines of text into individual words.
| 'ExtractWords' >> beam.ParDo(ExtractWordsFn())
# Count the number of times each word occurs.
| beam.combiners.Count.PerElement()
# Format each word and count into a printable string.
| 'FormatCounts' >> beam.ParDo(FormatCountsFn()))
# [END pipeline_monitoring_composite]
pipeline_options = PipelineOptions()
options = pipeline_options.view_as(WordCountOptions)
p = TestPipeline() # Use TestPipeline for testing.
# [START pipeline_monitoring_execution]
(p
# Read the lines of the input text.
| 'ReadLines' >> beam.io.ReadFromText(options.input)
# Count the words.
| CountWords()
# Write the formatted word counts to output.
| 'WriteCounts' >> beam.io.WriteToText(options.output))
# [END pipeline_monitoring_execution]
p.visit(SnippetUtils.RenameFiles(renames))
p.run()
def examples_wordcount_minimal(renames):
"""MinimalWordCount example snippets."""
import re
import apache_beam as beam
from apache_beam.utils.pipeline_options import GoogleCloudOptions
from apache_beam.utils.pipeline_options import StandardOptions
from apache_beam.utils.pipeline_options import PipelineOptions
# [START examples_wordcount_minimal_options]
options = PipelineOptions()
google_cloud_options = options.view_as(GoogleCloudOptions)
google_cloud_options.project = 'my-project-id'
google_cloud_options.job_name = 'myjob'
google_cloud_options.staging_location = 'gs://your-bucket-name-here/staging'
google_cloud_options.temp_location = 'gs://your-bucket-name-here/temp'
options.view_as(StandardOptions).runner = 'DataflowRunner'
# [END examples_wordcount_minimal_options]
# Run it locally for testing.
options = PipelineOptions()
# [START examples_wordcount_minimal_create]
p = beam.Pipeline(options=options)
# [END examples_wordcount_minimal_create]
(
# [START examples_wordcount_minimal_read]
p | beam.io.ReadFromText(
'gs://dataflow-samples/shakespeare/kinglear.txt')
# [END examples_wordcount_minimal_read]
# [START examples_wordcount_minimal_pardo]
| 'ExtractWords' >> beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x))
# [END examples_wordcount_minimal_pardo]
# [START examples_wordcount_minimal_count]
| beam.combiners.Count.PerElement()
# [END examples_wordcount_minimal_count]
# [START examples_wordcount_minimal_map]
| beam.Map(lambda (word, count): '%s: %s' % (word, count))
# [END examples_wordcount_minimal_map]
# [START examples_wordcount_minimal_write]
| beam.io.WriteToText('gs://my-bucket/counts.txt')
# [END examples_wordcount_minimal_write]
)
p.visit(SnippetUtils.RenameFiles(renames))
# [START examples_wordcount_minimal_run]
result = p.run()
# [END examples_wordcount_minimal_run]
result.wait_until_finish()
def examples_wordcount_wordcount(renames):
"""WordCount example snippets."""
import re
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
argv = []
# [START examples_wordcount_wordcount_options]
class WordCountOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--input',
help='Input for the pipeline',
default='gs://my-bucket/input')
options = PipelineOptions(argv)
p = beam.Pipeline(options=options)
# [END examples_wordcount_wordcount_options]
lines = p | beam.io.ReadFromText(
'gs://dataflow-samples/shakespeare/kinglear.txt')
# [START examples_wordcount_wordcount_composite]
class CountWords(beam.PTransform):
def expand(self, pcoll):
return (pcoll
# Convert lines of text into individual words.
| 'ExtractWords' >> beam.FlatMap(
lambda x: re.findall(r'[A-Za-z\']+', x))
# Count the number of times each word occurs.
| beam.combiners.Count.PerElement())
counts = lines | CountWords()
# [END examples_wordcount_wordcount_composite]
# [START examples_wordcount_wordcount_dofn]
class FormatAsTextFn(beam.DoFn):
def process(self, element):
word, count = element
yield '%s: %s' % (word, count)
formatted = counts | beam.ParDo(FormatAsTextFn())
# [END examples_wordcount_wordcount_dofn]
formatted | beam.io.WriteToText('gs://my-bucket/counts.txt')
p.visit(SnippetUtils.RenameFiles(renames))
p.run().wait_until_finish()
def examples_wordcount_debugging(renames):
"""DebuggingWordCount example snippets."""
import re
import apache_beam as beam
# [START example_wordcount_debugging_logging]
# [START example_wordcount_debugging_aggregators]
import logging
class FilterTextFn(beam.DoFn):
"""A DoFn that filters for a specific key based on a regular expression."""
def __init__(self, pattern):
self.pattern = pattern
# A custom metric can track values in your pipeline as it runs. Create
# custom metrics matched_word and unmatched_words.
self.matched_words = Metrics.counter(self.__class__, 'matched_words')
self.umatched_words = Metrics.counter(self.__class__, 'umatched_words')
def process(self, element):
word, _ = element
if re.match(self.pattern, word):
# Log at INFO level each element we match. When executing this pipeline
# using the Dataflow service, these log lines will appear in the Cloud
# Logging UI.
logging.info('Matched %s', word)
# Add 1 to the custom metric counter matched_words
self.matched_words.inc()
yield element
else:
# Log at the "DEBUG" level each element that is not matched. Different
# log levels can be used to control the verbosity of logging providing
# an effective mechanism to filter less important information. Note
# currently only "INFO" and higher level logs are emitted to the Cloud
# Logger. This log message will not be visible in the Cloud Logger.
logging.debug('Did not match %s', word)
# Add 1 to the custom metric counter umatched_words
self.umatched_words.inc()
# [END example_wordcount_debugging_logging]
# [END example_wordcount_debugging_aggregators]
p = TestPipeline() # Use TestPipeline for testing.
filtered_words = (
p
| beam.io.ReadFromText(
'gs://dataflow-samples/shakespeare/kinglear.txt')
| 'ExtractWords' >> beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x))
| beam.combiners.Count.PerElement()
| 'FilterText' >> beam.ParDo(FilterTextFn('Flourish|stomach')))
# [START example_wordcount_debugging_assert]
beam.assert_that(
filtered_words, beam.equal_to([('Flourish', 3), ('stomach', 1)]))
# [END example_wordcount_debugging_assert]
output = (filtered_words
| 'format' >> beam.Map(lambda (word, c): '%s: %s' % (word, c))
| 'Write' >> beam.io.WriteToText('gs://my-bucket/counts.txt'))
p.visit(SnippetUtils.RenameFiles(renames))
p.run()
def model_custom_source(count):
"""Demonstrates creating a new custom source and using it in a pipeline.
Defines a new source ``CountingSource`` that produces integers starting from 0
up to a given size.
Uses the new source in an example pipeline.
Additionally demonstrates how a source should be implemented using a
``PTransform``. This is the recommended way to develop sources that are to
distributed to a large number of end users.
This method runs two pipelines.
(1) A pipeline that uses ``CountingSource`` directly using the ``df.Read``
transform.
(2) A pipeline that uses a custom ``PTransform`` that wraps
``CountingSource``.
Args:
count: the size of the counting source to be used in the pipeline
demonstrated in this method.
"""
import apache_beam as beam
from apache_beam.io import iobase
from apache_beam.io.range_trackers import OffsetRangeTracker
from apache_beam.transforms.core import PTransform
from apache_beam.utils.pipeline_options import PipelineOptions
# Defining a new source.
# [START model_custom_source_new_source]
class CountingSource(iobase.BoundedSource):
def __init__(self, count):
self._count = count
def estimate_size(self):
return self._count
def get_range_tracker(self, start_position, stop_position):
if start_position is None:
start_position = 0
if stop_position is None:
stop_position = self._count
return OffsetRangeTracker(start_position, stop_position)
def read(self, range_tracker):
for i in range(self._count):
if not range_tracker.try_claim(i):
return
yield i
def split(self, desired_bundle_size, start_position=None,
stop_position=None):
if start_position is None:
start_position = 0
if stop_position is None:
stop_position = self._count
bundle_start = start_position
while bundle_start < self._count:
bundle_stop = max(self._count, bundle_start + desired_bundle_size)
yield iobase.SourceBundle(weight=(bundle_stop - bundle_start),
source=self,
start_position=bundle_start,
stop_position=bundle_stop)
bundle_start = bundle_stop
# [END model_custom_source_new_source]
# Using the source in an example pipeline.
# [START model_custom_source_use_new_source]
p = beam.Pipeline(options=PipelineOptions())
numbers = p | 'ProduceNumbers' >> beam.io.Read(CountingSource(count))
# [END model_custom_source_use_new_source]
lines = numbers | beam.core.Map(lambda number: 'line %d' % number)
beam.assert_that(
lines, beam.equal_to(
['line ' + str(number) for number in range(0, count)]))
p.run().wait_until_finish()
# We recommend users to start Source classes with an underscore to discourage
# using the Source class directly when a PTransform for the source is
# available. We simulate that here by simply extending the previous Source
# class.
class _CountingSource(CountingSource):
pass
# [START model_custom_source_new_ptransform]
class ReadFromCountingSource(PTransform):
def __init__(self, count, **kwargs):
super(ReadFromCountingSource, self).__init__(**kwargs)
self._count = count
def expand(self, pcoll):
return pcoll | iobase.Read(_CountingSource(count))
# [END model_custom_source_new_ptransform]
# [START model_custom_source_use_ptransform]
p = beam.Pipeline(options=PipelineOptions())
numbers = p | 'ProduceNumbers' >> ReadFromCountingSource(count)
# [END model_custom_source_use_ptransform]
lines = numbers | beam.core.Map(lambda number: 'line %d' % number)
beam.assert_that(
lines, beam.equal_to(
['line ' + str(number) for number in range(0, count)]))
p.run().wait_until_finish()
def model_custom_sink(simplekv, KVs, final_table_name_no_ptransform,
final_table_name_with_ptransform):
"""Demonstrates creating a new custom sink and using it in a pipeline.
Defines a new sink ``SimpleKVSink`` that demonstrates writing to a simple
key-value based storage system which has following API.
simplekv.connect(url) -
connects to the storage system and returns an access token which can be
used to perform further operations
simplekv.open_table(access_token, table_name) -
creates a table named 'table_name'. Returns a table object.
simplekv.write_to_table(access_token, table, key, value) -
writes a key-value pair to the given table.
simplekv.rename_table(access_token, old_name, new_name) -
renames the table named 'old_name' to 'new_name'.
Uses the new sink in an example pipeline.
Additionally demonstrates how a sink should be implemented using a
``PTransform``. This is the recommended way to develop sinks that are to be
distributed to a large number of end users.
This method runs two pipelines.
(1) A pipeline that uses ``SimpleKVSink`` directly using the ``df.Write``
transform.
(2) A pipeline that uses a custom ``PTransform`` that wraps
``SimpleKVSink``.
Args:
simplekv: an object that mocks the key-value storage.
KVs: the set of key-value pairs to be written in the example pipeline.
final_table_name_no_ptransform: the prefix of final set of tables to be
created by the example pipeline that uses
``SimpleKVSink`` directly.
final_table_name_with_ptransform: the prefix of final set of tables to be
created by the example pipeline that uses
a ``PTransform`` that wraps
``SimpleKVSink``.
"""
import apache_beam as beam
from apache_beam.io import iobase
from apache_beam.transforms.core import PTransform
from apache_beam.utils.pipeline_options import PipelineOptions
# Defining the new sink.
# [START model_custom_sink_new_sink]
class SimpleKVSink(iobase.Sink):
def __init__(self, url, final_table_name):
self._url = url
self._final_table_name = final_table_name
def initialize_write(self):
access_token = simplekv.connect(self._url)
return access_token
def open_writer(self, access_token, uid):
table_name = 'table' + uid
return SimpleKVWriter(access_token, table_name)
def finalize_write(self, access_token, table_names):
for i, table_name in enumerate(table_names):
simplekv.rename_table(
access_token, table_name, self._final_table_name + str(i))
# [END model_custom_sink_new_sink]
# Defining a writer for the new sink.
# [START model_custom_sink_new_writer]
class SimpleKVWriter(iobase.Writer):
def __init__(self, access_token, table_name):
self._access_token = access_token
self._table_name = table_name
self._table = simplekv.open_table(access_token, table_name)
def write(self, record):
key, value = record
simplekv.write_to_table(self._access_token, self._table, key, value)
def close(self):
return self._table_name
# [END model_custom_sink_new_writer]
final_table_name = final_table_name_no_ptransform
# Using the new sink in an example pipeline.
# [START model_custom_sink_use_new_sink]
p = beam.Pipeline(options=PipelineOptions())
kvs = p | 'CreateKVs' >> beam.Create(KVs)
kvs | 'WriteToSimpleKV' >> beam.io.Write(
SimpleKVSink('http://url_to_simple_kv/', final_table_name))
# [END model_custom_sink_use_new_sink]
p.run().wait_until_finish()
# We recommend users to start Sink class names with an underscore to
# discourage using the Sink class directly when a PTransform for the sink is
# available. We simulate that here by simply extending the previous Sink
# class.
class _SimpleKVSink(SimpleKVSink):
pass
# [START model_custom_sink_new_ptransform]
class WriteToKVSink(PTransform):
def __init__(self, url, final_table_name, **kwargs):
super(WriteToKVSink, self).__init__(**kwargs)
self._url = url
self._final_table_name = final_table_name
def expand(self, pcoll):
return pcoll | iobase.Write(_SimpleKVSink(self._url,
self._final_table_name))
# [END model_custom_sink_new_ptransform]
final_table_name = final_table_name_with_ptransform
# [START model_custom_sink_use_ptransform]
p = beam.Pipeline(options=PipelineOptions())
kvs = p | 'CreateKVs' >> beam.core.Create(KVs)
kvs | 'WriteToSimpleKV' >> WriteToKVSink(
'http://url_to_simple_kv/', final_table_name)
# [END model_custom_sink_use_ptransform]
p.run().wait_until_finish()
def model_textio(renames):
"""Using a Read and Write transform to read/write text files."""
def filter_words(x):
import re
return re.findall(r'[A-Za-z\']+', x)
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
# [START model_textio_read]
p = beam.Pipeline(options=PipelineOptions())
# [START model_pipelineio_read]
lines = p | 'ReadFromText' >> beam.io.ReadFromText('path/to/input-*.csv')
# [END model_pipelineio_read]
# [END model_textio_read]
# [START model_textio_write]
filtered_words = lines | 'FilterWords' >> beam.FlatMap(filter_words)
# [START model_pipelineio_write]
filtered_words | 'WriteToText' >> beam.io.WriteToText(
'/path/to/numbers', file_name_suffix='.csv')
# [END model_pipelineio_write]
# [END model_textio_write]
p.visit(SnippetUtils.RenameFiles(renames))
p.run().wait_until_finish()
def model_textio_compressed(renames, expected):
"""Using a Read Transform to read compressed text files."""
p = TestPipeline()
# [START model_textio_write_compressed]
lines = p | 'ReadFromText' >> beam.io.ReadFromText(
'/path/to/input-*.csv.gz',
compression_type=beam.io.fileio.CompressionTypes.GZIP)
# [END model_textio_write_compressed]
beam.assert_that(lines, beam.equal_to(expected))
p.visit(SnippetUtils.RenameFiles(renames))
p.run().wait_until_finish()
def model_datastoreio():
"""Using a Read and Write transform to read/write to Cloud Datastore."""
import uuid
from google.cloud.proto.datastore.v1 import entity_pb2
from google.cloud.proto.datastore.v1 import query_pb2
import googledatastore
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
from apache_beam.io.gcp.datastore.v1.datastoreio import ReadFromDatastore
from apache_beam.io.gcp.datastore.v1.datastoreio import WriteToDatastore
project = 'my_project'
kind = 'my_kind'
query = query_pb2.Query()
query.kind.add().name = kind
# [START model_datastoreio_read]
p = beam.Pipeline(options=PipelineOptions())
entities = p | 'Read From Datastore' >> ReadFromDatastore(project, query)
# [END model_datastoreio_read]
# [START model_datastoreio_write]
p = beam.Pipeline(options=PipelineOptions())
musicians = p | 'Musicians' >> beam.Create(
['Mozart', 'Chopin', 'Beethoven', 'Vivaldi'])
def to_entity(content):
entity = entity_pb2.Entity()
googledatastore.helper.add_key_path(entity.key, kind, str(uuid.uuid4()))
googledatastore.helper.add_properties(entity, {'content': unicode(content)})
return entity
entities = musicians | 'To Entity' >> beam.Map(to_entity)
entities | 'Write To Datastore' >> WriteToDatastore(project)
# [END model_datastoreio_write]
def model_bigqueryio():
"""Using a Read and Write transform to read/write to BigQuery."""
import apache_beam as beam
from apache_beam.utils.pipeline_options import PipelineOptions
# [START model_bigqueryio_read]
p = beam.Pipeline(options=PipelineOptions())
weather_data = p | 'ReadWeatherStations' >> beam.io.Read(
beam.io.BigQuerySource(
'clouddataflow-readonly:samples.weather_stations'))
# [END model_bigqueryio_read]
# [START model_bigqueryio_query]
p = beam.Pipeline(options=PipelineOptions())
weather_data = p | 'ReadYearAndTemp' >> beam.io.Read(
beam.io.BigQuerySource(
query='SELECT year, mean_temp FROM samples.weather_stations'))
# [END model_bigqueryio_query]
# [START model_bigqueryio_query_standard_sql]
p = beam.Pipeline(options=PipelineOptions())
weather_data = p | 'ReadYearAndTemp' >> beam.io.Read(
beam.io.BigQuerySource(
query='SELECT year, mean_temp FROM `samples.weather_stations`',
use_standard_sql=True))
# [END model_bigqueryio_query_standard_sql]
# [START model_bigqueryio_schema]
schema = 'source:STRING, quote:STRING'
# [END model_bigqueryio_schema]
# [START model_bigqueryio_write]
quotes = p | beam.Create(
[{'source': 'Mahatma Ghandi', 'quote': 'My life is my message.'}])
quotes | 'Write' >> beam.io.Write(
beam.io.BigQuerySink(
'my-project:output.output_table',
schema=schema,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED))
# [END model_bigqueryio_write]
def model_composite_transform_example(contents, output_path):
"""Example of a composite transform.
To declare a composite transform, define a subclass of PTransform.
To override the apply method, define a method "apply" that
takes a PCollection as its only parameter and returns a PCollection.
"""
import re
import apache_beam as beam
# [START composite_transform_example]
# [START composite_ptransform_apply_method]
# [START composite_ptransform_declare]
class CountWords(beam.PTransform):
# [END composite_ptransform_declare]
def expand(self, pcoll):
return (pcoll
| beam.FlatMap(lambda x: re.findall(r'\w+', x))
| beam.combiners.Count.PerElement()
| beam.Map(lambda (word, c): '%s: %s' % (word, c)))
# [END composite_ptransform_apply_method]
# [END composite_transform_example]
p = TestPipeline() # Use TestPipeline for testing.
(p
| beam.Create(contents)
| CountWords()
| beam.io.WriteToText(output_path))
p.run()
def model_multiple_pcollections_flatten(contents, output_path):
"""Merging a PCollection with Flatten."""
some_hash_fn = lambda s: ord(s[0])
import apache_beam as beam
p = TestPipeline() # Use TestPipeline for testing.
partition_fn = lambda element, partitions: some_hash_fn(element) % partitions
# Partition into deciles
partitioned = p | beam.Create(contents) | beam.Partition(partition_fn, 3)
pcoll1 = partitioned[0]
pcoll2 = partitioned[1]
pcoll3 = partitioned[2]
# Flatten them back into 1
# A collection of PCollection objects can be represented simply
# as a tuple (or list) of PCollections.
# (The SDK for Python has no separate type to store multiple
# PCollection objects, whether containing the same or different
# types.)
# [START model_multiple_pcollections_flatten]
merged = (
# [START model_multiple_pcollections_tuple]
(pcoll1, pcoll2, pcoll3)
# [END model_multiple_pcollections_tuple]
# A list of tuples can be "piped" directly into a Flatten transform.
| beam.Flatten())
# [END model_multiple_pcollections_flatten]
merged | beam.io.WriteToText(output_path)
p.run()
def model_multiple_pcollections_partition(contents, output_path):
"""Splitting a PCollection with Partition."""
some_hash_fn = lambda s: ord(s[0])
def get_percentile(i):
"""Assume i in [0,100)."""
return i
import apache_beam as beam
p = TestPipeline() # Use TestPipeline for testing.
students = p | beam.Create(contents)
# [START model_multiple_pcollections_partition]
def partition_fn(student, num_partitions):
return int(get_percentile(student) * num_partitions / 100)
by_decile = students | beam.Partition(partition_fn, 10)
# [END model_multiple_pcollections_partition]
# [START model_multiple_pcollections_partition_40th]
fortieth_percentile = by_decile[4]
# [END model_multiple_pcollections_partition_40th]
([by_decile[d] for d in xrange(10) if d != 4] + [fortieth_percentile]
| beam.Flatten()
| beam.io.WriteToText(output_path))
p.run()
def model_group_by_key(contents, output_path):
"""Applying a GroupByKey Transform."""
import re
import apache_beam as beam
p = TestPipeline() # Use TestPipeline for testing.
words_and_counts = (
p
| beam.Create(contents)
| beam.FlatMap(lambda x: re.findall(r'\w+', x))
| 'one word' >> beam.Map(lambda w: (w, 1)))
# GroupByKey accepts a PCollection of (w, 1) and
# outputs a PCollection of (w, (1, 1, ...)).
# (A key/value pair is just a tuple in Python.)
# This is a somewhat forced example, since one could
# simply use beam.combiners.Count.PerElement here.
# [START model_group_by_key_transform]
grouped_words = words_and_counts | beam.GroupByKey()
# [END model_group_by_key_transform]
(grouped_words
| 'count words' >> beam.Map(lambda (word, counts): (word, len(counts)))
| beam.io.WriteToText(output_path))
p.run()
def model_co_group_by_key_tuple(email_list, phone_list, output_path):
"""Applying a CoGroupByKey Transform to a tuple."""
import apache_beam as beam
p = TestPipeline() # Use TestPipeline for testing.
# [START model_group_by_key_cogroupbykey_tuple]
# Each data set is represented by key-value pairs in separate PCollections.
# Both data sets share a common key type (in this example str).
# The email_list contains values such as: ('joe', '[email protected]') with
# multiple possible values for each key.
# The phone_list contains values such as: ('mary': '111-222-3333') with
# multiple possible values for each key.
emails = p | 'email' >> beam.Create(email_list)
phones = p | 'phone' >> beam.Create(phone_list)
# The result PCollection contains one key-value element for each key in the
# input PCollections. The key of the pair will be the key from the input and
# the value will be a dictionary with two entries: 'emails' - an iterable of
# all values for the current key in the emails PCollection and 'phones': an
# iterable of all values for the current key in the phones PCollection.
# For instance, if 'emails' contained ('joe', '[email protected]') and
# ('joe', '[email protected]'), then 'result' will contain the element
# ('joe', {'emails': ['[email protected]', '[email protected]'], 'phones': ...})
result = {'emails': emails, 'phones': phones} | beam.CoGroupByKey()
def join_info((name, info)):
return '; '.join(['%s' % name,
'%s' % ','.join(info['emails']),
'%s' % ','.join(info['phones'])])
contact_lines = result | beam.Map(join_info)
# [END model_group_by_key_cogroupbykey_tuple]
contact_lines | beam.io.WriteToText(output_path)
p.run()
def model_join_using_side_inputs(
name_list, email_list, phone_list, output_path):
"""Joining PCollections using side inputs."""
import apache_beam as beam
from apache_beam.pvalue import AsIter
p = TestPipeline() # Use TestPipeline for testing.
# [START model_join_using_side_inputs]
# This code performs a join by receiving the set of names as an input and
# passing PCollections that contain emails and phone numbers as side inputs
# instead of using CoGroupByKey.
names = p | 'names' >> beam.Create(name_list)
emails = p | 'email' >> beam.Create(email_list)
phones = p | 'phone' >> beam.Create(phone_list)
def join_info(name, emails, phone_numbers):
filtered_emails = []
for name_in_list, email in emails:
if name_in_list == name:
filtered_emails.append(email)
filtered_phone_numbers = []
for name_in_list, phone_number in phone_numbers:
if name_in_list == name:
filtered_phone_numbers.append(phone_number)<|fim▁hole|> '%s' % ','.join(filtered_phone_numbers)])
contact_lines = names | 'CreateContacts' >> beam.core.Map(
join_info, AsIter(emails), AsIter(phones))
# [END model_join_using_side_inputs]
contact_lines | beam.io.WriteToText(output_path)
p.run()
# [START model_library_transforms_keys]
class Keys(beam.PTransform):
def expand(self, pcoll):
return pcoll | 'Keys' >> beam.Map(lambda (k, v): k)
# [END model_library_transforms_keys]
# pylint: enable=invalid-name
# [START model_library_transforms_count]
class Count(beam.PTransform):
def expand(self, pcoll):
return (
pcoll
| 'PairWithOne' >> beam.Map(lambda v: (v, 1))
| beam.CombinePerKey(sum))
# [END model_library_transforms_count]<|fim▁end|> |
return '; '.join(['%s' % name,
'%s' % ','.join(filtered_emails), |
<|file_name|>indextree.cpp<|end_file_name|><|fim▁begin|>#include "indextree.h"
#include <queue>
namespace zl
{
bool IndexTreeNodeCompare::operator()(IndexTreeNode* first, IndexTreeNode* sencond) const
{
if(first == sencond)
return true;
return first->compare(sencond);
}
IndexTreeNode* IndexTree::CreateNode(char value)
{
IndexTreeNode* node = new IndexTreeNode;
node->m_value = value;
node->m_next.clear();
node->m_isEnd = false;
node->count = 1;
return node;
}
void IndexTree::DestoryNode(IndexTreeNode* node)
{
delete node;
}
void IndexTree::DestoryTree()
{
release();
delete m_root;
}
int IndexTree::init(CSimpleVector<basic_string>& stringlist)
{
if(m_root == NULL)
{
m_root = CreateNode(' ');
}
for(int i = 0; i < stringlist.GetSize(); i++)
{
if(!add(stringlist[i].c_str(), stringlist[i].Size(),i))
return false;
}
return true;
}
int IndexTree::add(const char* Data, int len, int id)
{
IndexTreeNode* cur = m_root;
std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator iter;
std::pair<std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator, bool> ret;
for(int i=0; i<len; i++)
{
IndexTreeNode* node = CreateNode(Data[i]);
iter = cur->m_next.find(node);
if(iter == cur->m_next.end())
{
ret = cur->m_next.insert(node);
if(ret.second == true)
{
cur = *ret.first;
if(i == len-1)
{
cur->m_isEnd = true;
cur->id = id;
}
}
else
return false;
}
else
{
(*iter)->count++;
cur = *iter;
DestoryNode(node);
}
}
return true;
}
bool IndexTree::remove(const char* Data, int len)
{
IndexTreeNode* cur = m_root;
std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator iter;
std::pair<std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator, bool> ret;
zl::CSimpleVector<IndexTreeNode*> vecNode;
bool flag = true;
for(int i=0; i<len; i++)
{
IndexTreeNode* node = CreateNode(Data[i]);
iter = cur->m_next.find(node);
DestoryNode(node);
if(iter != cur->m_next.end())
{
vecNode.Add(*iter);
cur = *iter;
}
else
{
flag = false;
break;
}
}
if(flag)
{
for(int i = 0; i < vecNode.GetSize(); i++)
{
if(vecNode[i]->count == 0)
{
DestoryNode(vecNode[i]);
}
else
{
vecNode[i]->count--;
}
}
}
return false;
}
IndexTreeNode* IndexTree::find(const char* FoundData)
{
int nLen = strlen(FoundData);
IndexTreeNode* cur = m_root;
std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator iter;
for(int i = 0; i < nLen; i++)
{
IndexTreeNode* node = CreateNode(FoundData[i]);
iter = cur->m_next.find(node);
DestoryNode(node);
if(iter == cur->m_next.end())
{
return NULL;
}
else
{
cur = *iter;
}
}
if(cur->m_isEnd == true)
return cur;
else
return NULL;
}
void IndexTree::release()
{
std::queue<IndexTreeNode*> q;
std::set<IndexTreeNode*, IndexTreeNodeCompare>::iterator iter;
q.push(m_root);
while(!q.empty())<|fim▁hole|> {
for(iter = temp->m_next.begin(); iter != temp->m_next.end(); iter++)
{
q.push(*iter);
}
}
q.pop();
if(temp != m_root)
delete(temp);
}
m_root->m_next.clear();
}
}<|fim▁end|> | {
IndexTreeNode* temp = q.front();
if(!temp->m_next.empty()) |
<|file_name|>batch_token_test.go<|end_file_name|><|fim▁begin|>package token
import (
"strings"
"testing"
"time"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/builtin/credential/approle"
vaulthttp "github.com/hashicorp/vault/http"
"github.com/hashicorp/vault/sdk/helper/consts"
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/vault"
)
func TestBatchTokens(t *testing.T) {
coreConfig := &vault.CoreConfig{
LogicalBackends: map[string]logical.Factory{
"kv": vault.LeasedPassthroughBackendFactory,
},
CredentialBackends: map[string]logical.Factory{
"approle": approle.Factory,
},
}
cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0].Core
vault.TestWaitActive(t, core)
client := cluster.Cores[0].Client
rootToken := client.Token()
var err error
// Set up a KV path
err = client.Sys().Mount("kv", &api.MountInput{
Type: "kv",
})
if err != nil {
t.Fatal(err)
}
_, err = client.Logical().Write("kv/foo", map[string]interface{}{
"foo": "bar",
"ttl": "5m",
})
if err != nil {
t.Fatal(err)
}
// Write the test policy
err = client.Sys().PutPolicy("test", `
path "kv/*" {
capabilities = ["read"]
}`)
if err != nil {
t.Fatal(err)
}
// Mount the auth backend
err = client.Sys().EnableAuthWithOptions("approle", &api.EnableAuthOptions{
Type: "approle",
})
if err != nil {
t.Fatal(err)
}
// Tune the mount
if err = client.Sys().TuneMount("auth/approle", api.MountConfigInput{
DefaultLeaseTTL: "5s",
MaxLeaseTTL: "5s",
}); err != nil {
t.Fatal(err)
}
// Create role
resp, err := client.Logical().Write("auth/approle/role/test", map[string]interface{}{
"policies": "test",
})
if err != nil {
t.Fatal(err)
}
// Get role_id
resp, err = client.Logical().Read("auth/approle/role/test/role-id")
if err != nil {
t.Fatal(err)
}
if resp == nil {
t.Fatal("expected a response for fetching the role-id")
}
roleID := resp.Data["role_id"]
// Get secret_id
resp, err = client.Logical().Write("auth/approle/role/test/secret-id", map[string]interface{}{})
if err != nil {
t.Fatal(err)
}
if resp == nil {
t.Fatal("expected a response for fetching the secret-id")
}
secretID := resp.Data["secret_id"]
// Login
testLogin := func(mountTuneType, roleType string, batch bool) string {
t.Helper()
if err = client.Sys().TuneMount("auth/approle", api.MountConfigInput{
TokenType: mountTuneType,
}); err != nil {
t.Fatal(err)
}
_, err = client.Logical().Write("auth/approle/role/test", map[string]interface{}{
"token_type": roleType,
})
if err != nil {
t.Fatal(err)
}
resp, err = client.Logical().Write("auth/approle/login", map[string]interface{}{
"role_id": roleID,
"secret_id": secretID,
})
if err != nil {
t.Fatal(err)
}
if resp == nil {
t.Fatal("expected a response for login")
}
if resp.Auth == nil {
t.Fatal("expected auth object from response")
}
if resp.Auth.ClientToken == "" {
t.Fatal("expected a client token")
}
if batch && !strings.HasPrefix(resp.Auth.ClientToken, consts.BatchTokenPrefix) {
t.Fatal("expected a batch token")
}
if !batch && strings.HasPrefix(resp.Auth.ClientToken, consts.BatchTokenPrefix) {
t.Fatal("expected a non-batch token")
}
return resp.Auth.ClientToken
}
testLogin("service", "default", false)
testLogin("service", "batch", false)
testLogin("service", "service", false)
testLogin("batch", "default", true)
testLogin("batch", "batch", true)
testLogin("batch", "service", true)
testLogin("default-service", "default", false)
testLogin("default-service", "batch", true)
testLogin("default-service", "service", false)
testLogin("default-batch", "default", true)
testLogin("default-batch", "batch", true)
testLogin("default-batch", "service", false)
finalToken := testLogin("batch", "batch", true)
client.SetToken(finalToken)
resp, err = client.Logical().Read("kv/foo")
if err != nil {
t.Fatal(err)
}
if resp.Data["foo"].(string) != "bar" {
t.Fatal("bad")
}
if resp.LeaseID == "" {
t.Fatal("expected lease")
}
if !resp.Renewable {
t.Fatal("expected renewable")
}
if resp.LeaseDuration > 5 {
t.Fatalf("lease duration too big: %d", resp.LeaseDuration)
}
leaseID := resp.LeaseID
lastDuration := resp.LeaseDuration
for i := 0; i < 3; i++ {
time.Sleep(time.Second)
resp, err = client.Sys().Renew(leaseID, 0)
if err != nil {
t.Fatal(err)
}
if resp.LeaseDuration >= lastDuration {
t.Fatal("expected duration to go down")
}
lastDuration = resp.LeaseDuration
}
client.SetToken(rootToken)
time.Sleep(2 * time.Second)
resp, err = client.Logical().Write("sys/leases/lookup", map[string]interface{}{
"lease_id": leaseID,
})
if err == nil {
t.Fatal("expected error")
}
}<|fim▁hole|>
func TestBatchToken_ParentLeaseRevoke(t *testing.T) {
coreConfig := &vault.CoreConfig{
LogicalBackends: map[string]logical.Factory{
"kv": vault.LeasedPassthroughBackendFactory,
},
CredentialBackends: map[string]logical.Factory{
"approle": approle.Factory,
},
}
cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0].Core
vault.TestWaitActive(t, core)
client := cluster.Cores[0].Client
rootToken := client.Token()
var err error
// Set up a KV path
err = client.Sys().Mount("kv", &api.MountInput{
Type: "kv",
})
if err != nil {
t.Fatal(err)
}
_, err = client.Logical().Write("kv/foo", map[string]interface{}{
"foo": "bar",
"ttl": "5m",
})
if err != nil {
t.Fatal(err)
}
// Write the test policy
err = client.Sys().PutPolicy("test", `
path "kv/*" {
capabilities = ["read"]
}`)
if err != nil {
t.Fatal(err)
}
// Create a second root token
secret, err := client.Auth().Token().Create(&api.TokenCreateRequest{
Policies: []string{"root"},
})
if err != nil {
t.Fatal(err)
}
rootToken2 := secret.Auth.ClientToken
// Use this new token to create a batch token
client.SetToken(rootToken2)
secret, err = client.Auth().Token().Create(&api.TokenCreateRequest{
Policies: []string{"test"},
Type: "batch",
})
if err != nil {
t.Fatal(err)
}
batchToken := secret.Auth.ClientToken
client.SetToken(batchToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.BatchTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
// Get a lease with the batch token
resp, err := client.Logical().Read("kv/foo")
if err != nil {
t.Fatal(err)
}
if resp.Data["foo"].(string) != "bar" {
t.Fatal("bad")
}
if resp.LeaseID == "" {
t.Fatal("expected lease")
}
leaseID := resp.LeaseID
// Check the lease
resp, err = client.Logical().Write("sys/leases/lookup", map[string]interface{}{
"lease_id": leaseID,
})
if err != nil {
t.Fatal(err)
}
// Revoke the parent
client.SetToken(rootToken2)
err = client.Auth().Token().RevokeSelf("")
if err != nil {
t.Fatal(err)
}
time.Sleep(1 * time.Second)
// Verify the batch token is not usable anymore
client.SetToken(rootToken)
_, err = client.Auth().Token().Lookup(batchToken)
if err == nil {
t.Fatal("expected error")
}
// Verify the lease has been revoked
resp, err = client.Logical().Write("sys/leases/lookup", map[string]interface{}{
"lease_id": leaseID,
})
if err == nil {
t.Fatal("expected error")
}
}
func TestTokenStore_Roles_Batch(t *testing.T) {
cluster := vault.NewTestCluster(t, nil, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0].Core
vault.TestWaitActive(t, core)
client := cluster.Cores[0].Client
rootToken := client.Token()
var err error
var secret *api.Secret
// Test service
{
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"bound_cidrs": []string{},
"token_type": "service",
})
if err != nil {
t.Fatal(err)
}
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "batch",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.ServiceTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
}
// Test batch
{
client.SetToken(rootToken)
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"token_type": "batch",
})
// Orphan not set so we should error
if err == nil {
t.Fatal("expected error")
}
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"token_type": "batch",
"orphan": true,
})
// Renewable set so we should error
if err == nil {
t.Fatal("expected error")
}
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"token_type": "batch",
"orphan": true,
"renewable": false,
})
if err != nil {
t.Fatal(err)
}
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "service",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.BatchTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
}
// Test default-service
{
client.SetToken(rootToken)
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"token_type": "default-service",
})
if err != nil {
t.Fatal(err)
}
// Client specifies batch
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "batch",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.BatchTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
// Client specifies service
client.SetToken(rootToken)
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "service",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.ServiceTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
// Client doesn't specify
client.SetToken(rootToken)
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.ServiceTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
}
// Test default-batch
{
client.SetToken(rootToken)
_, err = client.Logical().Write("auth/token/roles/testrole", map[string]interface{}{
"token_type": "default-batch",
})
if err != nil {
t.Fatal(err)
}
// Client specifies batch
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "batch",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.BatchTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
// Client specifies service
client.SetToken(rootToken)
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
Type: "service",
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.ServiceTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
// Client doesn't specify
client.SetToken(rootToken)
secret, err = client.Auth().Token().CreateWithRole(&api.TokenCreateRequest{
Policies: []string{"default"},
}, "testrole")
if err != nil {
t.Fatal(err)
}
client.SetToken(secret.Auth.ClientToken)
_, err = client.Auth().Token().LookupSelf()
if err != nil {
t.Fatal(err)
}
if secret.Auth.ClientToken[0:vault.TokenPrefixLength] != consts.BatchTokenPrefix {
t.Fatal(secret.Auth.ClientToken)
}
}
}<|fim▁end|> | |
<|file_name|>HabitListAdapter.java<|end_file_name|><|fim▁begin|>package com.example.habitup.View;
<|fim▁hole|>import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.example.habitup.Model.Attributes;
import com.example.habitup.Model.Habit;
import com.example.habitup.R;
import java.util.ArrayList;
/**
* This is the adapter for creating the habit list, which displays the habit name, and
* it's schedule.
*
* @author Shari Barboza
*/
public class HabitListAdapter extends ArrayAdapter<Habit> {
// The habits array
private ArrayList<Habit> habits;
public HabitListAdapter(Context context, int resource, ArrayList<Habit> habits) {
super(context, resource, habits);
this.habits = habits;
}
@Override
public View getView(int position, View view, ViewGroup viewGroup) {
View v = view;
// Inflate a new view
if (v == null) {
LayoutInflater inflater = LayoutInflater.from(getContext());
v = inflater.inflate(R.layout.habit_list_item, null);
}
// Get the habit
Habit habit = habits.get(position);
String attributeName = habit.getHabitAttribute();
String attributeColour = Attributes.getColour(attributeName);
// Set the name of the habit
TextView habitNameView = v.findViewById(R.id.habit_name);
habitNameView.setText(habit.getHabitName());
habitNameView.setTextColor(Color.parseColor(attributeColour));
// Get habit schedule
boolean[] schedule = habit.getHabitSchedule();
View monView = v.findViewById(R.id.mon_box);
View tueView = v.findViewById(R.id.tue_box);
View wedView = v.findViewById(R.id.wed_box);
View thuView = v.findViewById(R.id.thu_box);
View friView = v.findViewById(R.id.fri_box);
View satView = v.findViewById(R.id.sat_box);
View sunView = v.findViewById(R.id.sun_box);
View[] textViews = {monView, tueView, wedView, thuView, friView, satView, sunView};
// Display days of the month for the habit's schedule
for (int i = 1; i < schedule.length; i++) {
if (schedule[i]) {
textViews[i-1].setVisibility(View.VISIBLE);
} else {
textViews[i-1].setVisibility(View.GONE);
}
}
return v;
}
}<|fim▁end|> | import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.support.v4.content.ContextCompat; |
<|file_name|>model-approval.service.ts<|end_file_name|><|fim▁begin|>// /*
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// */
//<|fim▁hole|>// import {Page} from '@domain/common/page';
// import {CommonUtil} from '@common/util/common.util';
//
// @Injectable()
// export class ModelApprovalService extends AbstractService {
//
// constructor(protected injector: Injector) {
// super(injector);
// }
//
//
// // Model List를 불러온다
// public searchModels(params: any, page: Page): Promise<any> {
// // public searchModels(name: string, user: string, begindate: any, enddate: any, status: string, subscribe: string, page: Page): Promise<any> {
// let url = this.API_URL + `nbmodels/search?projection=forListView`;
//
// url += '&' + CommonUtil.objectToUrlString(params);
// url += '&' + CommonUtil.objectToUrlString(page);
//
// return this.get(url);
// }
//
// // Approval Model List 에서 삭제
// public deleteModels(modelIds: string) {
// return this.delete(this.API_URL + 'nbmodels/batch/' + modelIds);
// }
//
// // Model 한개 detail
// public getModelApprovalDetail(modelId: string, _projection: string = 'forDetailView') {
// return this.get(this.API_URL + `nbmodels/${modelId}`);
// }
//
// /**
// * 상세 조회
// * @param {string} id
// * @param {Page} _page
// * @param {string} projection
// * @param {Object} _options
// * @returns {Promise<any>}
// */
// public getModelDetail(id: string, _page: Page, projection: string = 'forDetailView', _options?: object): Promise<any> {
// const url = this.API_URL + `nbmodels/` + id + `&projection=${projection}`;
// return this.get(url);
// }
//
// /**
// * 결재 승인 / 반려 / 대기 => APPROVAL or REJECT or PENDING
// * @param {string} id
// * @param status
// * @returns {Promise<any>}
// */
// public updateModel(id: string, status): Promise<any> {
// const params = {
// statusType: status
// };
// return this.patch(this.API_URL + `nbmodels/` + id, params);
// }
//
// // 테스트 코드 돌리기.
// public runTest(id: string): Promise<any> {
// return this.get(this.API_URL + `nbmodels/subscribe/` + id);
// }
//
// // 테스트 코드 이력 가져오기
// public getRunTestHistory(id: string, projection: string = 'forHistoryListView') {
// return this.get(this.API_URL + `nbmodels/${id}?projection=${projection}`);
// }
// }<|fim▁end|> | // import {Injectable, Injector} from '@angular/core';
// import {AbstractService} from '@common/service/abstract.service'; |
<|file_name|>AlipayTradeVendorpayDevicedataUploadResponse.java<|end_file_name|><|fim▁begin|>package com.alipay.api.response;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.trade.vendorpay.devicedata.upload response.<|fim▁hole|> * @author auto create
* @since 1.0, 2016-12-08 00:51:39
*/
public class AlipayTradeVendorpayDevicedataUploadResponse extends AlipayResponse {
private static final long serialVersionUID = 5272579554188824387L;
}<|fim▁end|> | * |
<|file_name|>ngexception.hpp<|end_file_name|><|fim▁begin|>#ifndef FILE_NGEXCEPTION
#define FILE_NGEXCEPTION
/**************************************************************************/
/* File: ngexception.hpp */
/* Author: Joachim Schoeberl */
/* Date: 16. Jan. 2002 */
/**************************************************************************/
namespace netgen
{
/// Base class for all ng exceptions
class NgException
{
/// verbal description of exception
std::string what;
public:
///
DLL_HEADER NgException (const std::string & s);
///<|fim▁hole|> /// append string to description
DLL_HEADER void Append (const std::string & s);
// void Append (const char * s);
/// verbal description of exception
const std::string & What() const { return what; }
};
}
#endif<|fim▁end|> | DLL_HEADER virtual ~NgException ();
|
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>// /*
// Copyright The Kubernetes Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// */
//
// Code generated by MockGen. DO NOT EDIT.
// Source: /go/src/sigs.k8s.io/cloud-provider-azure/pkg/azureclients/diskclient/interface.go
// Package mockdiskclient is a generated GoMock package.
package mockdiskclient
import (
context "context"
reflect "reflect"
compute "github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-12-01/compute"
gomock "github.com/golang/mock/gomock"<|fim▁hole|>type MockInterface struct {
ctrl *gomock.Controller
recorder *MockInterfaceMockRecorder
}
// MockInterfaceMockRecorder is the mock recorder for MockInterface.
type MockInterfaceMockRecorder struct {
mock *MockInterface
}
// NewMockInterface creates a new mock instance.
func NewMockInterface(ctrl *gomock.Controller) *MockInterface {
mock := &MockInterface{ctrl: ctrl}
mock.recorder = &MockInterfaceMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockInterface) EXPECT() *MockInterfaceMockRecorder {
return m.recorder
}
// Get mocks base method.
func (m *MockInterface) Get(ctx context.Context, resourceGroupName, diskName string) (compute.Disk, *retry.Error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Get", ctx, resourceGroupName, diskName)
ret0, _ := ret[0].(compute.Disk)
ret1, _ := ret[1].(*retry.Error)
return ret0, ret1
}
// Get indicates an expected call of Get.
func (mr *MockInterfaceMockRecorder) Get(ctx, resourceGroupName, diskName interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockInterface)(nil).Get), ctx, resourceGroupName, diskName)
}
// CreateOrUpdate mocks base method.
func (m *MockInterface) CreateOrUpdate(ctx context.Context, resourceGroupName, diskName string, diskParameter compute.Disk) *retry.Error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CreateOrUpdate", ctx, resourceGroupName, diskName, diskParameter)
ret0, _ := ret[0].(*retry.Error)
return ret0
}
// CreateOrUpdate indicates an expected call of CreateOrUpdate.
func (mr *MockInterfaceMockRecorder) CreateOrUpdate(ctx, resourceGroupName, diskName, diskParameter interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateOrUpdate", reflect.TypeOf((*MockInterface)(nil).CreateOrUpdate), ctx, resourceGroupName, diskName, diskParameter)
}
// Update mocks base method.
func (m *MockInterface) Update(ctx context.Context, resourceGroupName, diskName string, diskParameter compute.DiskUpdate) *retry.Error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Update", ctx, resourceGroupName, diskName, diskParameter)
ret0, _ := ret[0].(*retry.Error)
return ret0
}
// Update indicates an expected call of Update.
func (mr *MockInterfaceMockRecorder) Update(ctx, resourceGroupName, diskName, diskParameter interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockInterface)(nil).Update), ctx, resourceGroupName, diskName, diskParameter)
}
// Delete mocks base method.
func (m *MockInterface) Delete(ctx context.Context, resourceGroupName, diskName string) *retry.Error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Delete", ctx, resourceGroupName, diskName)
ret0, _ := ret[0].(*retry.Error)
return ret0
}
// Delete indicates an expected call of Delete.
func (mr *MockInterfaceMockRecorder) Delete(ctx, resourceGroupName, diskName interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockInterface)(nil).Delete), ctx, resourceGroupName, diskName)
}
// ListByResourceGroup mocks base method.
func (m *MockInterface) ListByResourceGroup(ctx context.Context, resourceGroupName string) ([]compute.Disk, *retry.Error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListByResourceGroup", ctx, resourceGroupName)
ret0, _ := ret[0].([]compute.Disk)
ret1, _ := ret[1].(*retry.Error)
return ret0, ret1
}
// ListByResourceGroup indicates an expected call of ListByResourceGroup.
func (mr *MockInterfaceMockRecorder) ListByResourceGroup(ctx, resourceGroupName interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListByResourceGroup", reflect.TypeOf((*MockInterface)(nil).ListByResourceGroup), ctx, resourceGroupName)
}<|fim▁end|> | retry "sigs.k8s.io/cloud-provider-azure/pkg/retry"
)
// MockInterface is a mock of Interface interface. |
<|file_name|>model_get_corporations_corporation_id_fw_stats_kills.go<|end_file_name|><|fim▁begin|>package esilatest
/*<|fim▁hole|>type GetCorporationsCorporationIdFwStatsKills struct {
/*
Last week's total number of kills by members of the given corporation against enemy factions */
LastWeek int32 `json:"last_week,omitempty"`
/*
Total number of kills by members of the given corporation against enemy factions since the corporation enlisted */
Total int32 `json:"total,omitempty"`
/*
Yesterday's total number of kills by members of the given corporation against enemy factions */
Yesterday int32 `json:"yesterday,omitempty"`
}<|fim▁end|> | Summary of kills done by the given corporation against enemy factions */ |
<|file_name|>createRegistry.js<|end_file_name|><|fim▁begin|>export default function createRegistry(repositories) {
const storage = { ...repositories };
const registry = {
register(repositoryName, repository) {
storage[repositoryName] = repository;
return registry;
},
has(repositoryName) {
return !!storage[repositoryName];
},
get(repositoryName) {
return storage[repositoryName];
},
reduce(reducer, initialValue) {
return Object
.keys(storage)
.reduce((previous, repositoryName) => reducer(
previous,
storage[repositoryName],
repositoryName,<|fim▁hole|> };
return registry;
}<|fim▁end|> | ), initialValue);
}, |
<|file_name|>FormSelectField.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import { SelectField, MenuItem } from 'fusionui-shared-components-react';
import PropTypes from 'prop-types';
import '../../FormField.scss';
import './FormSelectField.scss';
const style = {
width: '100%',
height: 30,
border: 'none',
backgroundColor: 'rgb(239, 239, 239)',
fontSize: 12,
};
class FormSelectField extends Component {
constructor(props) {
super(props);
this.state = {
value: props.value
};
}
componentWillReceiveProps(nextProps) {
this.setState({ value: nextProps.value });
}
handleChange = (ev, idx, value) => {
this.props.onChange(ev, value, this.props.onBlur);
this.setState({ value });
}
render() {
const { label, fchar, options } = this.props;
return (
<div className="form-field__wrapper form-select__wrapper">
<label className="form-field__label" htmlFor={ name }>{ label[fchar] || label.DEFAULT }</label>
<SelectField
style={ style }
menuStyle={ style }
selectedMenuItemStyle={ { fontWeight: 'bold', color: '#000' } }
value={ this.state.value }
onChange={ this.handleChange }
underlineStyle={ { marginBottom: -8, width: '100%' } }
labelStyle={ { paddingLeft: 10, lineHeight: '40px', height: 40 } }<|fim▁hole|> </SelectField>
</div>
);
}
}
FormSelectField.propTypes = {
fchar: PropTypes.string.isRequired,
label: PropTypes.object,
options: PropTypes.arrayOf(
PropTypes.shape({
text: PropTypes.string,
value: PropTypes.string
})
).isRequired,
onChange: PropTypes.func.isRequired,
onBlur: PropTypes.func.isRequired,
value: PropTypes.string
};
FormSelectField.defaultProps = {
errorText: '',
label: {},
value: ''
};
export default FormSelectField;<|fim▁end|> | >
<MenuItem value="" primaryText="Select..." />
{ options.map(option => <MenuItem value={ option.value } key={ option.value } primaryText={ option.text } />) } |
<|file_name|>bbr_leader.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2019, The OpenThread Authors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file
* This file implements Primary Backbone Router service management in the Thread Network.
*/
#include "bbr_leader.hpp"
#if (OPENTHREAD_CONFIG_THREAD_VERSION >= OT_THREAD_VERSION_1_2)
#include "common/instance.hpp"
#include "common/locator-getters.hpp"
#include "common/logging.hpp"
namespace ot {
namespace BackboneRouter {
Leader::Leader(Instance &aInstance)
: InstanceLocator(aInstance)
{
Reset();
}
void Leader::Reset(void)
{
// Invalid server short address indicates no available Backbone Router service in the Thread Network.
mConfig.mServer16 = Mac::kShortAddrInvalid;
// Domain Prefix Length 0 indicates no available Domain Prefix in the Thread network.
mDomainPrefix.SetLength(0);
}
otError Leader::GetConfig(BackboneRouterConfig &aConfig) const
{
otError error = OT_ERROR_NONE;
VerifyOrExit(HasPrimary(), error = OT_ERROR_NOT_FOUND);
aConfig = mConfig;
exit:
return error;
}
otError Leader::GetServiceId(uint8_t &aServiceId) const
{
otError error = OT_ERROR_NONE;
uint8_t serviceData = NetworkData::ServiceTlv::kServiceDataBackboneRouter;
VerifyOrExit(HasPrimary(), error = OT_ERROR_NOT_FOUND);
error = Get<NetworkData::Leader>().GetServiceId(NetworkData::ServiceTlv::kThreadEnterpriseNumber, &serviceData,
sizeof(serviceData), true, aServiceId);
exit:
return error;
}
#if (OPENTHREAD_CONFIG_LOG_LEVEL >= OT_LOG_LEVEL_INFO) && (OPENTHREAD_CONFIG_LOG_BBR == 1)
void Leader::LogBackboneRouterPrimary(State aState, const BackboneRouterConfig &aConfig) const
{
OT_UNUSED_VARIABLE(aConfig);
otLogInfoBbr("PBBR state: %s", StateToString(aState));
if (aState != kStateRemoved && aState != kStateNone)
{
otLogInfoBbr("Rloc16: 0x%4X, seqno: %d, delay: %d, timeout %d", aConfig.mServer16, aConfig.mSequenceNumber,
aConfig.mReregistrationDelay, aConfig.mMlrTimeout);
}
}
void Leader::LogDomainPrefix(DomainPrefixState aState, const Ip6::Prefix &aPrefix) const
{
otLogInfoBbr("Domain Prefix: %s, state: %s", aPrefix.ToString().AsCString(), DomainPrefixStateToString(aState));
}
const char *Leader::StateToString(State aState)
{
const char *logString = "Unknown";
switch (aState)
{
case kStateNone:
logString = "None";
break;
case kStateAdded:
logString = "Added";
break;
case kStateRemoved:
logString = "Removed";
break;
case kStateToTriggerRereg:
logString = "Rereg triggered";
break;
case kStateRefreshed:
logString = "Refreshed";
break;
case kStateUnchanged:
logString = "Unchanged";
break;
default:
break;
}
return logString;
}
const char *Leader::DomainPrefixStateToString(DomainPrefixState aState)
{
const char *logString = "Unknown";
switch (aState)
{
case kDomainPrefixNone:
logString = "None";
break;
case kDomainPrefixAdded:
logString = "Added";
break;
case kDomainPrefixRemoved:
logString = "Removed";
break;
case kDomainPrefixRefreshed:
logString = "Refreshed";
break;
case kDomainPrefixUnchanged:
logString = "Unchanged";
break;
}
return logString;
}
#endif
void Leader::Update(void)
{
UpdateBackboneRouterPrimary();
UpdateDomainPrefixConfig();
}
void Leader::UpdateBackboneRouterPrimary(void)
{
BackboneRouterConfig config;
State state;
uint32_t origMlrTimeout;
IgnoreError(Get<NetworkData::Leader>().GetBackboneRouterPrimary(config));
if (config.mServer16 != mConfig.mServer16)
{
if (config.mServer16 == Mac::kShortAddrInvalid)
{
state = kStateRemoved;
}
else if (mConfig.mServer16 == Mac::kShortAddrInvalid)
{
state = kStateAdded;
}
else
{
// Short Address of PBBR changes.
state = kStateToTriggerRereg;
}
}
else if (config.mServer16 == Mac::kShortAddrInvalid)
{
// If no Primary all the time.
state = kStateNone;
}
else if (config.mSequenceNumber != mConfig.mSequenceNumber)
{
state = kStateToTriggerRereg;
}
else if (config.mReregistrationDelay != mConfig.mReregistrationDelay || config.mMlrTimeout != mConfig.mMlrTimeout)
{
state = kStateRefreshed;
}
else
{
state = kStateUnchanged;
}
// Restrain the range of MLR timeout to be always valid
if (config.mServer16 != Mac::kShortAddrInvalid)
{
origMlrTimeout = config.mMlrTimeout;
config.mMlrTimeout = config.mMlrTimeout < static_cast<uint32_t>(Mle::kMlrTimeoutMin)
? static_cast<uint32_t>(Mle::kMlrTimeoutMin)
: config.mMlrTimeout;
config.mMlrTimeout = config.mMlrTimeout > static_cast<uint32_t>(Mle::kMlrTimeoutMax)
? static_cast<uint32_t>(Mle::kMlrTimeoutMax)
: config.mMlrTimeout;
if (config.mMlrTimeout != origMlrTimeout)
{
otLogNoteBbr("Leader MLR Timeout is normalized from %u to %u", origMlrTimeout, config.mMlrTimeout);
}
}
mConfig = config;
LogBackboneRouterPrimary(state, mConfig);
#if OPENTHREAD_FTD && OPENTHREAD_CONFIG_BACKBONE_ROUTER_ENABLE
Get<BackboneRouter::Local>().HandleBackboneRouterPrimaryUpdate(state, mConfig);
#endif
#if OPENTHREAD_CONFIG_MLR_ENABLE || OPENTHREAD_CONFIG_TMF_PROXY_MLR_ENABLE
Get<MlrManager>().HandleBackboneRouterPrimaryUpdate(state, mConfig);
#endif
#if OPENTHREAD_CONFIG_DUA_ENABLE || OPENTHREAD_CONFIG_TMF_PROXY_DUA_ENABLE
Get<DuaManager>().HandleBackboneRouterPrimaryUpdate(state, mConfig);
#endif
}
void Leader::UpdateDomainPrefixConfig(void)
{
NetworkData::Iterator iterator = NetworkData::kIteratorInit;
NetworkData::OnMeshPrefixConfig config;
DomainPrefixState state;
bool found = false;
<|fim▁hole|> found = true;
break;
}
}
if (!found)
{
if (mDomainPrefix.GetLength() != 0)
{
// Domain Prefix does not exist any more.
mDomainPrefix.SetLength(0);
state = kDomainPrefixRemoved;
}
else
{
state = kDomainPrefixNone;
}
}
else if (config.GetPrefix() == mDomainPrefix)
{
state = kDomainPrefixUnchanged;
}
else
{
if (mDomainPrefix.mLength == 0)
{
state = kDomainPrefixAdded;
}
else
{
state = kDomainPrefixRefreshed;
}
mDomainPrefix = config.GetPrefix();
}
LogDomainPrefix(state, mDomainPrefix);
#if OPENTHREAD_FTD && OPENTHREAD_CONFIG_BACKBONE_ROUTER_ENABLE
Get<Local>().HandleDomainPrefixUpdate(state);
Get<NdProxyTable>().HandleDomainPrefixUpdate(state);
#endif
#if OPENTHREAD_CONFIG_DUA_ENABLE || OPENTHREAD_CONFIG_TMF_PROXY_DUA_ENABLE
Get<DuaManager>().HandleDomainPrefixUpdate(state);
#endif
}
bool Leader::IsDomainUnicast(const Ip6::Address &aAddress) const
{
return HasDomainPrefix() && aAddress.MatchesPrefix(mDomainPrefix);
}
} // namespace BackboneRouter
} // namespace ot
#endif // (OPENTHREAD_CONFIG_THREAD_VERSION >= OT_THREAD_VERSION_1_2)<|fim▁end|> | while (Get<NetworkData::Leader>().GetNextOnMeshPrefix(iterator, config) == OT_ERROR_NONE)
{
if (config.mDp)
{ |
<|file_name|>UpdateMethodResponseRequestMarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model.transform;
import java.util.List;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.apigateway.model.*;
<|fim▁hole|>/**
* UpdateMethodResponseRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UpdateMethodResponseRequestMarshaller {
private static final MarshallingInfo<String> RESTAPIID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("restapi_id").build();
private static final MarshallingInfo<String> RESOURCEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("resource_id").build();
private static final MarshallingInfo<String> HTTPMETHOD_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("http_method").build();
private static final MarshallingInfo<String> STATUSCODE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("status_code").build();
private static final MarshallingInfo<List> PATCHOPERATIONS_BINDING = MarshallingInfo.builder(MarshallingType.LIST)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("patchOperations").build();
private static final UpdateMethodResponseRequestMarshaller instance = new UpdateMethodResponseRequestMarshaller();
public static UpdateMethodResponseRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UpdateMethodResponseRequest updateMethodResponseRequest, ProtocolMarshaller protocolMarshaller) {
if (updateMethodResponseRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateMethodResponseRequest.getRestApiId(), RESTAPIID_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getHttpMethod(), HTTPMETHOD_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getStatusCode(), STATUSCODE_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getPatchOperations(), PATCHOPERATIONS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}<|fim▁end|> | import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
|
<|file_name|>reacher.py<|end_file_name|><|fim▁begin|># Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Reacher domain."""
<|fim▁hole|>from dm_control.rl import control
from dm_control.suite import base
from dm_control.suite import common
from dm_control.suite.utils import randomizers
from dm_control.utils import containers
from dm_control.utils import rewards
import numpy as np
SUITE = containers.TaggedTasks()
_DEFAULT_TIME_LIMIT = 20
_BIG_TARGET = .05
_SMALL_TARGET = .015
def get_model_and_assets():
"""Returns a tuple containing the model XML string and a dict of assets."""
return common.read_model('reacher.xml'), common.ASSETS
@SUITE.add('benchmarking', 'easy')
def easy(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns reacher with sparse reward with 5e-2 tol and randomized target."""
physics = Physics.from_xml_string(*get_model_and_assets())
task = Reacher(target_size=_BIG_TARGET, random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics, task, time_limit=time_limit, **environment_kwargs)
@SUITE.add('benchmarking')
def hard(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns reacher with sparse reward with 1e-2 tol and randomized target."""
physics = Physics.from_xml_string(*get_model_and_assets())
task = Reacher(target_size=_SMALL_TARGET, random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics, task, time_limit=time_limit, **environment_kwargs)
class Physics(mujoco.Physics):
"""Physics simulation with additional features for the Reacher domain."""
def finger_to_target(self):
"""Returns the vector from target to finger in global coordinates."""
return (self.named.data.geom_xpos['target', :2] -
self.named.data.geom_xpos['finger', :2])
def finger_to_target_dist(self):
"""Returns the signed distance between the finger and target surface."""
return np.linalg.norm(self.finger_to_target())
class Reacher(base.Task):
"""A reacher `Task` to reach the target."""
def __init__(self, target_size, random=None):
"""Initialize an instance of `Reacher`.
Args:
target_size: A `float`, tolerance to determine whether finger reached the
target.
random: Optional, either a `numpy.random.RandomState` instance, an
integer seed for creating a new `RandomState`, or None to select a seed
automatically (default).
"""
self._target_size = target_size
super().__init__(random=random)
def initialize_episode(self, physics):
"""Sets the state of the environment at the start of each episode."""
physics.named.model.geom_size['target', 0] = self._target_size
randomizers.randomize_limited_and_rotational_joints(physics, self.random)
# Randomize target position
angle = self.random.uniform(0, 2 * np.pi)
radius = self.random.uniform(.05, .20)
physics.named.model.geom_pos['target', 'x'] = radius * np.sin(angle)
physics.named.model.geom_pos['target', 'y'] = radius * np.cos(angle)
super().initialize_episode(physics)
def get_observation(self, physics):
"""Returns an observation of the state and the target position."""
obs = collections.OrderedDict()
obs['position'] = physics.position()
obs['to_target'] = physics.finger_to_target()
obs['velocity'] = physics.velocity()
return obs
def get_reward(self, physics):
radii = physics.named.model.geom_size[['target', 'finger'], 0].sum()
return rewards.tolerance(physics.finger_to_target_dist(), (0, radii))<|fim▁end|> | import collections
from dm_control import mujoco |
<|file_name|>v1.py<|end_file_name|><|fim▁begin|>from .base import *
class OpStash(object):
cache = {}
@classmethod
def Add(cls, object):
t = object.type
cls.cache[t] = object
@classmethod
def Lookup(cls, type):
return cls.cache[type]
@classmethod
def Define(cls, pt):
cls.Add(pt)
return pt
class OpRecord(pstruct.type):
def __data(self):
return OpStash.Lookup( int(self['code'].li) )
_fields_ = [
(Opcode_v1, 'code'),
(__data, 'data'),
]<|fim▁hole|> _fields_ = [
(Integer, 'size'),
(Integer, 'top'),
(Integer, 'left'),
(Integer, 'bottom'),
(Integer, 'right'),
]
class picFrame(pstruct.type):
_fields_ = [
(uint8, 'version'),
(uint8, 'picture'),
]
class bounds(pstruct.type):
_fields_ = [
(Integer, 'top'),
(Integer, 'left'),
(Integer, 'bottom'),
(Integer, 'right'),
]
class pixMap(pstruct.type):
_fields_ = [
(Long, 'baseAddr'),
(Integer, 'rowBytes'),
(bounds, 'bounds'),
(Integer, 'pmVersion'),
(Integer, 'packType'),
(Long, 'packSize'),
(Long, 'hRes'),
(Long, 'vRes'),
(Integer, 'pixelType'),
(Integer, 'pixelSize'),
(Integer, 'cmpCount'),
(Integer, 'cmpSize'),
(Long, 'planeBytes'),
(Long, 'pmTable'),
(Long, 'pmReserved'),
]
class directBitsRect(pstruct.type):
opcode = 0x009a
_fields_ = [
(pixMap, 'pixMap'),
(bounds, 'srcRect'),
(bounds, 'dstRect'),
(Integer, 'mode'),
]
class File(parray.terminated):
_object_ = OpRecord
def isTerminator(self, value):
return int(value['code']) == 0xff<|fim▁end|> |
class picSize(pstruct.type): |
<|file_name|>docker_image.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#<|fim▁hole|># Copyright 2016 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = '''
---
module: docker_image
short_description: Manage docker images.
version_added: "1.5"
description:
- Build, load or pull an image, making the image available for creating containers. Also supports tagging an
image into a repository and archiving an image to a .tar file.
options:
archive_path:
description:
- Use with state C(present) to archive an image to a .tar file.
required: false
version_added: "2.1"
load_path:
description:
- Use with state C(present) to load an image from a .tar file.
required: false
version_added: "2.2"
dockerfile:
description:
- Use with state C(present) to provide an alternate name for the Dockerfile to use when building an image.
default: Dockerfile
required: false
version_added: "2.0"
force:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name. Use with state
C(present) to build, load or pull an image when the image already exists.
default: false
required: false
version_added: "2.1"
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
required: false
version_added: "2.1"
name:
description:
- "Image name. Name format will be one of: name, repository/name, registry_server:port/name.
When pushing or pulling an image the name can optionally include the tag by appending ':tag_name'."
required: true
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
aliases:
- build_path
required: false
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
default: true
required: false
version_added: "2.1"
push:
description:
- Push the image to the registry. Specify the registry as part of the I(name) or I(repository) parameter.
default: false
required: false
version_added: "2.2"
rm:
description:
- Remove intermediate containers after build.
default: true
required: false
version_added: "2.1"
nocache:
description:
- Do not use cache when building an image.
default: false
required: false
repository:
description:
- Full path to a repository. Use with state C(present) to tag the image into the repository. Expects
format I(repository:tag). If no tag is provided, will use the value of the C(tag) parameter or I(latest).
required: false
version_added: "2.1"
state:
description:
- Make assertions about the state of an image.
- When C(absent) an image will be removed. Use the force option to un-tag and remove all images
matching the provided name.
- When C(present) check if an image exists using the provided name and tag. If the image is not found or the
force option is used, the image will either be pulled, built or loaded. By default the image will be pulled
from Docker Hub. To build the image, provide a path value set to a directory containing a context and
Dockerfile. To load an image, specify load_path to provide a path to an archive file. To tag an image to a
repository, provide a repository path. If the name contains a repository path, it will be pushed.
- "NOTE: C(build) is DEPRECATED and will be removed in release 2.3. Specifying C(build) will behave the
same as C(present)."
required: false
default: present
choices:
- absent
- present
- build
tag:
description:
- Used to select an image when pulling. Will be added to the image when pushing, tagging or building. Defaults to
I(latest).
- If C(name) parameter format is I(name:tag), then tag value from C(name) will take precedence.
default: latest
required: false
buildargs:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21 and docker-py >= 1.7.0.
type: complex
required: false
version_added: "2.2"
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
required: false
version_added: "2.1"
type: complex
contains:
memory:
description: Set memory limit for build
type: int
memswap:
description: Total memory (memory + swap), -1 to disable swap
type: int
cpushares:
description: CPU shares (relative weight)
type: int
cpusetcpus:
description: CPUs in which to allow execution, e.g., "0-3", "0,1"
type: str
use_tls:
description:
- "DEPRECATED. Whether to use tls to connect to the docker server. Set to C(no) when TLS will not be used. Set to
C(encrypt) to use TLS. And set to C(verify) to use TLS and verify that the server's certificate is valid for the
server. NOTE: If you specify this option, it will set the value of the tls or tls_verify parameters."
choices:
- no
- encrypt
- verify
default: no
required: false
version_added: "2.0"
extends_documentation_fragment:
- docker
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "Docker API >= 1.20"
authors:
- Pavel Antonov (@softzilla)
- Chris Houseknecht (@chouseknecht)
- James Tanner (@jctanner)
'''
EXAMPLES = '''
- name: pull an image
docker_image:
name: pacur/centos-7
- name: Tag and push to docker hub
docker_image:
name: pacur/centos-7
repository: dcoppenhagan/myimage
tag: 7.0
push: yes
- name: Tag and push to local registry
docker_image:
name: centos
repository: localhost:5000/centos
tag: 7
push: yes
- name: Remove image
docker_image:
state: absent
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
- name: Build an image and push it to a private repo
docker_image:
path: ./sinatra
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
push: yes
- name: Archive image
docker_image:
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
archive_path: my_sinatra.tar
- name: Load image from archive and push to a private registry
docker_image:
name: localhost:5000/myimages/sinatra
tag: v1
push: yes
load_path: my_sinatra.tar
- name: Build image and with buildargs
docker_image:
path: /path/to/build/dir
name: myimage
buildargs:
log_volume: /var/log/myapp
listen_port: 8080
'''
RETURN = '''
image:
description: Image inspection results for the affected image.
returned: success
type: complex
sample: {}
'''
from ansible.module_utils.docker_common import *
try:
from docker.auth.auth import resolve_repository_name
from docker.utils.utils import parse_repository_tag
except ImportError:
# missing docker-py handled in docker_common
pass
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.archive_path = parameters.get('archive_path')
self.container_limits = parameters.get('container_limits')
self.dockerfile = parameters.get('dockerfile')
self.force = parameters.get('force')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.nocache = parameters.get('nocache')
self.path = parameters.get('path')
self.pull = parameters.get('pull')
self.repository = parameters.get('repository')
self.rm = parameters.get('rm')
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = parameters.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = parameters.get('buildargs')
# If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name)
if repo_tag:
self.name = repo
self.tag = repo_tag
if self.state in ['present', 'build']:
self.present()
elif self.state == 'absent':
self.absent()
def fail(self, msg):
self.client.fail(msg)
def present(self):
'''
Handles state = 'present', which includes building, loading or pulling an image,
depending on user provided parameters.
:returns None
'''
image = self.client.find_image(name=self.name, tag=self.tag)
if not image or self.force:
if self.path:
# Build the image
if not os.path.isdir(self.path):
self.fail("Requested build path %s could not be found or you do not have access." % self.path)
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.log("Building image %s" % image_name)
self.results['actions'].append("Built image %s from %s" % (image_name, self.path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.build_image()
elif self.load_path:
# Load the image from an archive
if not os.path.isfile(self.load_path):
self.fail("Error loading image %s. Specified path %s does not exist." % (self.name,
self.load_path))
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.results['actions'].append("Loaded image %s from %s" % (image_name, self.load_path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.load_image()
else:
# pull the image
self.results['actions'].append('Pulled image %s:%s' % (self.name, self.tag))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.client.pull_image(self.name, tag=self.tag)
if self.archive_path:
self.archive_image(self.name, self.tag)
if self.push and not self.repository:
self.push_image(self.name, self.tag)
elif self.repository:
self.tag_image(self.name, self.tag, self.repository, force=self.force, push=self.push)
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
image = self.client.find_image(self.name, self.tag)
if image:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted'
def archive_image(self, name, tag):
'''
Archive an image to a .tar file. Called when archive_path is passed.
:param name - name of the image. Type: str
:return None
'''
if not tag:
tag = "latest"
image = self.client.find_image(name=name, tag=tag)
if not image:
self.log("archive image: image %s:%s not found" % (name, tag))
return
image_name = "%s:%s" % (name, tag)
self.results['actions'].append('Archived image %s to %s' % (image_name, self.archive_path))
self.results['changed'] = True
if not self.check_mode:
self.log("Getting archive of image %s" % image_name)
try:
image = self.client.get_image(image_name)
except Exception as exc:
self.fail("Error getting image %s - %s" % (image_name, str(exc)))
try:
image_tar = open(self.archive_path, 'w')
image_tar.write(image.data)
image_tar.close()
except Exception as exc:
self.fail("Error writing image archive %s - %s" % (self.archive_path, str(exc)))
image = self.client.find_image(name=name, tag=tag)
if image:
self.results['image'] = image
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status
def tag_image(self, name, tag, repository, force=False, push=False):
'''
Tag an image into a repository.
:param name: name of the image. required.
:param tag: image tag.
:param repository: path to the repository. required.
:param force: bool. force tagging, even it image already exists with the repository path.
:param push: bool. push the image once it's tagged.
:return: None
'''
repo, repo_tag = parse_repository_tag(repository)
if not repo_tag:
repo_tag = "latest"
if tag:
repo_tag = tag
image = self.client.find_image(name=repo, tag=repo_tag)
found = 'found' if image else 'not found'
self.log("image %s was %s" % (repo, found))
if not image or force:
self.log("tagging %s:%s to %s:%s" % (name, tag, repo, repo_tag))
self.results['changed'] = True
self.results['actions'].append("Tagged image %s:%s to %s:%s" % (name, tag, repo, repo_tag))
if not self.check_mode:
try:
# Finding the image does not always work, especially running a localhost registry. In those
# cases, if we don't set force=True, it errors.
image_name = name
if tag and not re.search(tag, name):
image_name = "%s:%s" % (name, tag)
tag_status = self.client.tag(image_name, repo, tag=repo_tag, force=True)
if not tag_status:
raise Exception("Tag operation failed.")
except Exception as exc:
self.fail("Error: failed to tag image - %s" % str(exc))
self.results['image'] = self.client.find_image(name=repo, tag=repo_tag)
if push:
self.push_image(repo, repo_tag)
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
stream=True,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True
)
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
if not isinstance(value, basestring):
self.buildargs[key] = str(value)
params['buildargs'] = self.buildargs
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail("Error building %s - code: %s message: %s" % (self.name,
errorDetail.get('code'),
errorDetail.get('message')))
else:
self.fail("Error building %s - %s" % (self.name, line.get('error')))
return self.client.find_image(name=self.name, tag=self.tag)
def load_image(self):
'''
Load an image from a .tar archive
:return: image dict
'''
try:
self.log("Opening image %s" % self.load_path)
image_tar = open(self.load_path, 'r')
except Exception as exc:
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
try:
self.log("Loading image from %s" % self.load_path)
self.client.load_image(image_tar)
except Exception as exc:
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
try:
image_tar.close()
except Exception as exc:
self.fail("Error closing image %s - %s" % (self.name, str(exc)))
return self.client.find_image(self.name, self.tag)
def main():
argument_spec = dict(
archive_path=dict(type='path'),
container_limits=dict(type='dict'),
dockerfile=dict(type='str'),
force=dict(type='bool', default=False),
http_timeout=dict(type='int'),
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='str', default=False),
path=dict(type='path', aliases=['build_path']),
pull=dict(type='bool', default=True),
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True),
state=dict(type='str', choices=['absent', 'present', 'build'], default='present'),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', default='no', choices=['no', 'encrypt', 'verify']),
buildargs=dict(type='dict', default=None),
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True,
)
results = dict(
changed=False,
actions=[],
image={}
)
ImageManager(client, results)
client.module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>auth.go<|end_file_name|><|fim▁begin|>package gitnotify
import (
"errors"
"fmt"
"html/template"
"net/http"
"os"
"sort"
"github.com/gorilla/mux"
"github.com/markbates/goth"
"github.com/markbates/goth/gothic"
"github.com/markbates/goth/providers/github"
"github.com/markbates/goth/providers/gitlab"
"github.com/sairam/kinli"
)
// Authentication data/$provider/$user/$settingsFile
type Authentication struct {
Provider string `yaml:"provider"` // github/gitlab
Name string `yaml:"name"` // name of the person addressing to
Email string `yaml:"email"` // email that we will send to
UserName string `yaml:"username"` // username for identification
Token string `yaml:"token"` // used to query the provider
}
// UserInfo provides provider/username
func (userInfo *Authentication) UserInfo() string {
return fmt.Sprintf("%s/%s", userInfo.Provider, userInfo.UserName)
}
func (userInfo *Authentication) save() {
conf := new(Setting)
os.MkdirAll(userInfo.getConfigDir(), 0700)
conf.load(userInfo.getConfigFile())
conf.Auth = userInfo
conf.save(userInfo.getConfigFile())
}
func (userInfo *Authentication) getConfigDir() string {
if userInfo.Provider == "" {
return ""
}
return fmt.Sprintf("data/%s/%s", userInfo.Provider, userInfo.UserName)
}
func (userInfo *Authentication) getConfigFile() string {
if userInfo.Provider == "" {
return ""
}
return fmt.Sprintf("%s/%s", userInfo.getConfigDir(), config.SettingsFile)
}
func preInitAuth() {
// ProviderNames is the map of key/value providers configured
config.Providers = make(map[string]string)
var providers []goth.Provider
if provider := configureGithub(); provider != nil {
providers = append(providers, provider)
}
if provider := configureGitlab(); provider != nil {
providers = append(providers, provider)
}
goth.UseProviders(providers...)
}
func initAuth(p *mux.Router) {
p.HandleFunc("/{provider}/callback", authProviderCallbackHandler).Methods("GET")
p.HandleFunc("/{provider}", authProviderHandler).Methods("GET")
p.HandleFunc("/", authListHandler).Methods("GET")
}
func configureGithub() goth.Provider {
if config.GithubURLEndPoint != "" && config.GithubAPIEndPoint != "" {
if os.Getenv("GITHUB_KEY") == "" || os.Getenv("GITHUB_SECRET") == "" {
panic("Missing Configuration: Github Authentication is not set!")
}
github.AuthURL = config.GithubURLEndPoint + "login/oauth/authorize"
github.TokenURL = config.GithubURLEndPoint + "login/oauth/access_token"
github.ProfileURL = config.GithubAPIEndPoint + "user"
config.Providers[GithubProvider] = "Github"
// for github, add scope: "repo:status" to access private repositories
return github.New(os.Getenv("GITHUB_KEY"), os.Getenv("GITHUB_SECRET"), config.websiteURL()+"/auth/github/callback", "user:email")
}
return nil
}
func configureGitlab() goth.Provider {
if config.GitlabURLEndPoint != "" && config.GitlabAPIEndPoint != "" {
if os.Getenv("GITLAB_KEY") == "" || os.Getenv("GITLAB_SECRET") == "" {
panic("Missing Configuration: Github Authentication is not set!")
}
gitlab.AuthURL = config.GitlabURLEndPoint + "oauth/authorize"
gitlab.TokenURL = config.GitlabURLEndPoint + "oauth/token"
gitlab.ProfileURL = config.GitlabAPIEndPoint + "user"
config.Providers[GitlabProvider] = "Gitlab"
// gitlab does not have any scopes, you get full access to the user's account
return gitlab.New(os.Getenv("GITLAB_KEY"), os.Getenv("GITLAB_SECRET"), config.websiteURL()+"/auth/gitlab/callback")
}
return nil
}
func authListHandler(res http.ResponseWriter, req *http.Request) {
var keys []string
for k := range config.Providers {
keys = append(keys, k)
}
sort.Strings(keys)
providerIndex := &ProviderIndex{Providers: keys, ProvidersMap: config.Providers}
t, _ := template.New("foo").Parse(indexTemplate)
t.Execute(res, providerIndex)
}
func authProviderHandler(res http.ResponseWriter, req *http.Request) {
hc := &kinli.HttpContext{W: res, R: req}
if isAuthed(hc) {
text := "User is already logged in"
kinli.DisplayText(hc, res, text)
} else {
statCount("auth.start")
gothic.BeginAuthHandler(res, req)
}
}
func authProviderCallbackHandler(res http.ResponseWriter, req *http.Request) {
statCount("auth.complete")
user, err := gothic.CompleteUserAuth(res, req)
if err != nil {
fmt.Fprintln(res, err)
return
}
authType, _ := getProviderName(req)
auth := &Authentication{
Provider: authType,
UserName: user.NickName,
Name: user.Name,<|fim▁hole|> Token: user.AccessToken,
}
auth.save()
hc := &kinli.HttpContext{W: res, R: req}
loginTheUser(hc, auth, authType)
http.Redirect(res, req, kinli.HomePathAuthed, 302)
}
// ProviderIndex is used for setting up the providers
type ProviderIndex struct {
Providers []string
ProvidersMap map[string]string
}
// See gothic/gothic.go: GetProviderName function
// Overridden since we use mux
func getProviderName(req *http.Request) (string, error) {
vars := mux.Vars(req)
provider := vars["provider"]
if provider == "" {
return provider, errors.New("you must select a provider")
}
return provider, nil
}
var indexTemplate = `{{range $key,$value:=.Providers}}
<p><a href="/auth/{{$value}}">Log in with {{index $.ProvidersMap $value}}</a></p>
{{end}}`<|fim▁end|> | Email: user.Email, |
<|file_name|>GeometryFactory.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import Colors
import Shapes
from abc import ABCMeta, abstractmethod
class AbstractFactory(object):
__metaclass__ = ABCMeta
@abstractmethod
def get_color(self, color):
pass
@abstractmethod
def get_shape(self, shape):
pass
class ShapeFactory(AbstractFactory):
def get_color(self, color):
raise 'Use color factory'
def get_shape(self, shape):
shape = shape.lower()
if shape == 'circle':
return Shapes.Circle()
elif shape == 'rectangle':
return Shapes.Rectangle()
elif shape == 'square':
return Shapes.Square()
else:
raise InvalidShapeException('unknown shape: {0}'.format(shape))
<|fim▁hole|> return Colors.Red()
elif color == 'green':
return Colors.Green()
elif color == 'blue':
return Colors.Blue()
else:
raise InvalidColorException('unknown color: {0}'.format(color))
def get_shape(self, shape):
raise 'use shape factory'
class FactoryProducer(object):
@staticmethod
def get_factory(factory_name):
factory_name = factory_name.lower()
if factory_name == 'shape':
return ShapeFactory()
elif factory_name == 'color':
return ColorFactory()
else:
raise InvalidFactoryException('unknown factory specified')
class InvalidFactoryException(Exception):
pass
class InvalidColorException(Exception):
pass
class InvalidShapeException(Exception):
pass<|fim▁end|> | class ColorFactory(AbstractFactory):
def get_color(self, color):
color = color.lower()
if color == 'red': |
<|file_name|>h5f.rs<|end_file_name|><|fim▁begin|>pub use self::H5F_scope_t::*;
pub use self::H5F_close_degree_t::*;
pub use self::H5F_mem_t::*;
pub use self::H5F_libver_t::*;
use libc::{c_int, c_uint, c_void, c_char, c_double, size_t, ssize_t};
use h5::{herr_t, hsize_t, htri_t, hssize_t, H5_ih_info_t};
use h5i::hid_t;
use h5ac::H5AC_cache_config_t;
/* these flags call H5check() in the C library */
pub const H5F_ACC_RDONLY: c_uint = 0x0000;
pub const H5F_ACC_RDWR: c_uint = 0x0001;
pub const H5F_ACC_TRUNC: c_uint = 0x0002;
pub const H5F_ACC_EXCL: c_uint = 0x0004;
pub const H5F_ACC_DEBUG: c_uint = 0x0008;
pub const H5F_ACC_CREAT: c_uint = 0x0010;
pub const H5F_ACC_DEFAULT: c_uint = 0xffff;
pub const H5F_OBJ_FILE: c_uint = 0x0001;
pub const H5F_OBJ_DATASET: c_uint = 0x0002;
pub const H5F_OBJ_GROUP: c_uint = 0x0004;
pub const H5F_OBJ_DATATYPE: c_uint = 0x0008;
pub const H5F_OBJ_ATTR: c_uint = 0x0010;
pub const H5F_OBJ_ALL: c_uint = H5F_OBJ_FILE |
H5F_OBJ_DATASET |
H5F_OBJ_GROUP |
H5F_OBJ_DATATYPE |
H5F_OBJ_ATTR;
pub const H5F_OBJ_LOCAL: c_uint = 0x0020;
pub const H5F_FAMILY_DEFAULT: hsize_t = 0;
pub const H5F_MPIO_DEBUG_KEY: &'static str = "H5F_mpio_debug_key";
pub const H5F_UNLIMITED: hsize_t = !0;
#[repr(C)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub enum H5F_scope_t {
H5F_SCOPE_LOCAL = 0,
H5F_SCOPE_GLOBAL = 1,
}
#[repr(C)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub enum H5F_close_degree_t {
H5F_CLOSE_DEFAULT = 0,
H5F_CLOSE_WEAK = 1,
H5F_CLOSE_SEMI = 2,
H5F_CLOSE_STRONG = 3,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct H5F_info_t {
pub super_ext_size: hsize_t,
pub sohm: __H5F_info_t__sohm,
}
impl ::std::default::Default for H5F_info_t {
fn default() -> H5F_info_t { unsafe { ::std::mem::zeroed() } }
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __H5F_info_t__sohm {
pub hdr_size: hsize_t,
pub msgs_info: H5_ih_info_t,
}
impl ::std::default::Default for __H5F_info_t__sohm {
fn default() -> __H5F_info_t__sohm { unsafe { ::std::mem::zeroed() } }
}
#[repr(C)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub enum H5F_mem_t {
H5FD_MEM_NOLIST = -1,
H5FD_MEM_DEFAULT = 0,
H5FD_MEM_SUPER = 1,
H5FD_MEM_BTREE = 2,
H5FD_MEM_DRAW = 3,
H5FD_MEM_GHEAP = 4,
H5FD_MEM_LHEAP = 5,
H5FD_MEM_OHDR = 6,
H5FD_MEM_NTYPES = 7,
}
#[repr(C)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub enum H5F_libver_t {
H5F_LIBVER_EARLIEST = 0,
H5F_LIBVER_LATEST = 1,
}
pub const H5F_LIBVER_18: H5F_libver_t = H5F_LIBVER_LATEST;
extern {
pub fn H5Fis_hdf5(filename: *const c_char) -> htri_t;
pub fn H5Fcreate(filename: *const c_char, flags: c_uint, create_plist: hid_t, access_plist:
hid_t) -> hid_t;
pub fn H5Fopen(filename: *const c_char, flags: c_uint, access_plist: hid_t) -> hid_t;
pub fn H5Freopen(file_id: hid_t) -> hid_t;
pub fn H5Fflush(object_id: hid_t, scope: H5F_scope_t) -> herr_t;
pub fn H5Fclose(file_id: hid_t) -> herr_t;
pub fn H5Fget_create_plist(file_id: hid_t) -> hid_t;
pub fn H5Fget_access_plist(file_id: hid_t) -> hid_t;
pub fn H5Fget_intent(file_id: hid_t, intent: *mut c_uint) -> herr_t;
pub fn H5Fget_obj_count(file_id: hid_t, types: c_uint) -> ssize_t;
pub fn H5Fget_obj_ids(file_id: hid_t, types: c_uint, max_objs: size_t, obj_id_list: *mut hid_t)
-> ssize_t;
pub fn H5Fget_vfd_handle(file_id: hid_t, fapl: hid_t, file_handle: *mut *mut c_void) -> herr_t;
pub fn H5Fmount(loc: hid_t, name: *const c_char, child: hid_t, plist: hid_t) -> herr_t;
pub fn H5Funmount(loc: hid_t, name: *const c_char) -> herr_t;
pub fn H5Fget_freespace(file_id: hid_t) -> hssize_t;
pub fn H5Fget_filesize(file_id: hid_t, size: *mut hsize_t) -> herr_t;
pub fn H5Fget_file_image(file_id: hid_t, buf_ptr: *mut c_void, buf_len: size_t) -> ssize_t;
pub fn H5Fget_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t;
pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t;
pub fn H5Fget_mdc_hit_rate(file_id: hid_t, hit_rate_ptr: *mut c_double) -> herr_t;
pub fn H5Fget_mdc_size(file_id: hid_t, max_size_ptr: *mut size_t, min_clean_size_ptr: *mut
size_t, cur_size_ptr: *mut size_t, cur_num_entries_ptr: *mut c_int) -><|fim▁hole|> pub fn H5Fget_info(obj_id: hid_t, bh_info: *mut H5F_info_t) -> herr_t;
pub fn H5Fclear_elink_file_cache(file_id: hid_t) -> herr_t;
}<|fim▁end|> | herr_t;
pub fn H5Freset_mdc_hit_rate_stats(file_id: hid_t) -> herr_t;
pub fn H5Fget_name(obj_id: hid_t, name: *mut c_char, size: size_t) -> ssize_t; |
<|file_name|>warehouse.go<|end_file_name|><|fim▁begin|>package def
/*<|fim▁hole|> FIXME this is really ambiguous vs `rio.SiloURI`, should probably try
to refactor to only be one.
*/
type WarehouseCoords []WarehouseCoord
type WarehouseCoord string<|fim▁end|> | A list of warehouse coordinates, as simple strings (they're serialized
as such).
|
<|file_name|>BlockBuilder.ts<|end_file_name|><|fim▁begin|>module Block {
export interface BlockBuilderInterface {
buildRandomBlocks(numberOfBlocks: number);
buildCustomBlocks(blocks: BlockDimensionModel[]);
}
export class BlockBuilder implements BlockBuilderInterface {
private _inputCanvasElement: HTMLCanvasElement;
private _outputCanvasElement: HTMLCanvasElement;
private _randomBlocks: BlockDimensionModel[];
private _storageProvider: StorageProvider.LocalStorageWrapper;
constructor(inputCanvas: HTMLCanvasElement, outputCanvas: HTMLCanvasElement, storageProvider: StorageProvider.LocalStorageWrapper) {
this._inputCanvasElement = inputCanvas;
this._outputCanvasElement = outputCanvas;
this._storageProvider = storageProvider;
}<|fim▁hole|> this.writeRandomBlocksToStorageProvider();
var storedBlocks = this._storageProvider.retrieveBlocks();
var context = this._inputCanvasElement.getContext("2d");
context.clearRect(0, 0, this._inputCanvasElement.width, this._inputCanvasElement.height);
this.drawBlocks(context, storedBlocks);
}
buildCustomBlocks(blocks: BlockDimensionModel[]) {
var context = this._outputCanvasElement.getContext("2d");
context.clearRect(0, 0, this._outputCanvasElement.width, this._outputCanvasElement.height);
this.drawBlocks(context, blocks);
}
private writeRandomBlocksToStorageProvider() {
this._storageProvider.storeBlocks(this._randomBlocks);
}
private drawBlocks(canvasContext: CanvasRenderingContext2D, blocks: BlockDimensionModel[]) {
//Force reading of blocks back from local storage. This is bad practice and uncessary as the class
//already has a reference (dependancy), holding an array of blocks which can be easily untilised.
//See commented out code below the following code for actual best practice. This was done as requested in the requirements document.
blocks.forEach(item => {
canvasContext.beginPath();
canvasContext.rect(item.xCoordinate, item.yCoordinate, item.width, item.height);
canvasContext.stroke();
canvasContext.closePath();
});
//this._blocks.forEach(item => {
// context.rect(item.xCoordinate, item.yCoordinate, item.width, item.height);
// context.stroke();
//});
}
private generateBlockSet(numberOfBlocks: number) {
var blockDimensionGenerator = new RandomBlockDimensionGenerator(this._inputCanvasElement.clientWidth, this._inputCanvasElement.height);
this._randomBlocks = blockDimensionGenerator.generateRandomSet(numberOfBlocks);
}
}
}<|fim▁end|> |
buildRandomBlocks(numberOfBlocks: number) {
this.generateBlockSet(numberOfBlocks);
|
<|file_name|>builder.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::time::Duration;
use anyhow::{Context, Error};
use url::Url;
use anyhow::anyhow;
use auth::AuthSection;
use configparser::config::ConfigSet;
use http_client::HttpVersion;
use crate::client::Client;
use crate::errors::{ConfigError, EdenApiError};
/// Builder for creating new EdenAPI clients.
#[derive(Debug, Default)]
pub struct Builder {
server_url: Option<Url>,
cert: Option<PathBuf>,
key: Option<PathBuf>,
ca_bundle: Option<PathBuf>,
headers: HashMap<String, String>,
max_files: Option<usize>,
max_trees: Option<usize>,
max_history: Option<usize>,
max_location_to_hash: Option<usize>,
timeout: Option<Duration>,
debug: bool,
correlator: Option<String>,
http_version: Option<HttpVersion>,
validate_certs: bool,
log_dir: Option<PathBuf>,
}
impl Builder {
pub fn new() -> Self {
Default::default()
}<|fim▁hole|> pub fn build(self) -> Result<Client, EdenApiError> {
self.try_into().map(Client::with_config)
}
/// Populate a `Builder` from a Mercurial configuration.
pub fn from_config(config: &ConfigSet) -> Result<Self, EdenApiError> {
let server_url = config
.get_opt::<String>("edenapi", "url")
.map_err(|e| ConfigError::Malformed("edenapi.url".into(), e))?
.ok_or(ConfigError::MissingUrl)?
.parse::<Url>()
.map_err(ConfigError::InvalidUrl)?;
let validate_certs = config
.get_opt::<bool>("edenapi", "validate-certs")
.map_err(|e| ConfigError::Malformed("edenapi.validate-certs".into(), e))?
.unwrap_or_default();
let (cert, key, ca_bundle) = AuthSection::from_config(&config)
.validate(validate_certs)
.best_match_for(&server_url)?
.map(|auth| (auth.cert, auth.key, auth.cacerts))
.unwrap_or_default();
let headers = config
.get_opt::<String>("edenapi", "headers")
.map_err(|e| ConfigError::Malformed("edenapi.headers".into(), e))?
.map(parse_headers)
.transpose()
.map_err(|e| ConfigError::Malformed("edenapi.headers".into(), e))?
.unwrap_or_default();
let max_files = config
.get_opt("edenapi", "maxfiles")
.map_err(|e| ConfigError::Malformed("edenapi.maxfiles".into(), e))?;
let max_trees = config
.get_opt("edenapi", "maxtrees")
.map_err(|e| ConfigError::Malformed("edenapi.maxtrees".into(), e))?;
let max_history = config
.get_opt("edenapi", "maxhistory")
.map_err(|e| ConfigError::Malformed("edenapi.maxhistory".into(), e))?;
let max_location_to_hash = config
.get_opt("edenapi", "maxlocationtohash")
.map_err(|e| ConfigError::Malformed("edenapi.maxlocationtohash".into(), e))?;
let timeout = config
.get_opt("edenapi", "timeout")
.map_err(|e| ConfigError::Malformed("edenapi.timeout".into(), e))?
.map(Duration::from_secs);
let debug = config
.get_opt("edenapi", "debug")
.map_err(|e| ConfigError::Malformed("edenapi.timeout".into(), e))?
.unwrap_or_default();
let http_version = config
.get_opt("edenapi", "http-version")
.map_err(|e| ConfigError::Malformed("edenapi.http-version".into(), e))?
.unwrap_or_else(|| "2".to_string());
let http_version = Some(match http_version.as_str() {
"1.1" => HttpVersion::V11,
"2" => HttpVersion::V2,
x => {
return Err(EdenApiError::BadConfig(ConfigError::Malformed(
"edenapi.http-version".into(),
anyhow!("invalid http version {}", x),
)));
}
});
let log_dir = config
.get_opt::<PathBuf>("edenapi", "logdir")
.map_err(|e| ConfigError::Malformed("edenapi.logdir".into(), e))?;
Ok(Self {
server_url: Some(server_url),
cert,
key,
ca_bundle,
headers,
max_files,
max_trees,
max_history,
max_location_to_hash,
timeout,
debug,
correlator: None,
http_version,
validate_certs,
log_dir,
})
}
/// Set the server URL.
pub fn server_url(mut self, url: Url) -> Self {
self.server_url = Some(url);
self
}
/// Specify a client certificate for authenticating with the server.
/// The caller should provide a path to PEM-encoded X.509 certificate file.
/// The corresponding private key may either be provided in the same file
/// as the certificate, or separately using the `key` method.
pub fn cert(mut self, cert: impl AsRef<Path>) -> Self {
self.cert = Some(cert.as_ref().into());
self
}
/// Specify the client's private key
pub fn key(mut self, key: impl AsRef<Path>) -> Self {
self.key = Some(key.as_ref().into());
self
}
/// Specify a CA certificate bundle to be used to validate the server's
/// TLS certificate in place of the default system certificate bundle.
/// Primarily used in tests.
pub fn ca_bundle(mut self, ca: impl AsRef<Path>) -> Self {
self.ca_bundle = Some(ca.as_ref().into());
self
}
/// Extra HTTP headers that should be sent with each request.
pub fn headers<T, K, V>(mut self, headers: T) -> Self
where
T: IntoIterator<Item = (K, V)>,
K: ToString,
V: ToString,
{
let headers = headers
.into_iter()
.map(|(k, v)| (k.to_string(), v.to_string()));
self.headers.extend(headers);
self
}
/// Add an extra HTTP header that should be sent with each request.
pub fn header(mut self, name: impl ToString, value: impl ToString) -> Self {
self.headers.insert(name.to_string(), value.to_string());
self
}
/// Maximum number of keys per file request. Larger requests will be
/// split up into concurrently-sent batches.
pub fn max_files(mut self, size: Option<usize>) -> Self {
self.max_files = size;
self
}
/// Maximum number of keys per tree request. Larger requests will be
/// split up into concurrently-sent batches.
pub fn max_trees(mut self, size: Option<usize>) -> Self {
self.max_trees = size;
self
}
/// Maximum number of keys per history request. Larger requests will be
/// split up into concurrently-sent batches.
pub fn max_history(mut self, size: Option<usize>) -> Self {
self.max_history = size;
self
}
/// Maximum number of locations per location to has request. Larger requests will be split up
/// into concurrently-sent batches.
pub fn max_location_to_hash(mut self, size: Option<usize>) -> Self {
self.max_location_to_hash = size;
self
}
/// Timeout for HTTP requests sent by the client.
pub fn timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
/// Unique identifier that will be logged by both the client and server for
/// every request, allowing log entries on both sides to be correlated. Also
/// allows correlating multiple requests that were made by the same instance
/// of the client.
pub fn correlator(mut self, correlator: Option<impl ToString>) -> Self {
self.correlator = correlator.map(|s| s.to_string());
self
}
/// Set the HTTP version that the client should use.
pub fn http_version(mut self, version: HttpVersion) -> Self {
self.http_version = Some(version);
self
}
/// Specify whether the client should validate the user's client certificate
/// before each request.
pub fn validate_certs(mut self, validate_certs: bool) -> Self {
self.validate_certs = validate_certs;
self
}
/// If specified, the client will write a JSON version of every request
/// it sends to the specified directory. This is primarily useful for
/// debugging. The JSON requests can be sent with the `edenapi_cli`, or
/// converted to CBOR with the `make_req` tool and sent with `curl`.
pub fn log_dir(mut self, dir: impl AsRef<Path>) -> Self {
self.log_dir = Some(dir.as_ref().into());
self
}
}
/// Configuration for a `Client`. Essentially has the same fields as a
/// `Builder`, but required fields are not optional and values have been
/// appropriately parsed and validated.
#[derive(Debug)]
pub(crate) struct Config {
pub(crate) server_url: Url,
pub(crate) cert: Option<PathBuf>,
pub(crate) key: Option<PathBuf>,
pub(crate) ca_bundle: Option<PathBuf>,
pub(crate) headers: HashMap<String, String>,
pub(crate) max_files: Option<usize>,
pub(crate) max_trees: Option<usize>,
pub(crate) max_history: Option<usize>,
pub(crate) max_location_to_hash: Option<usize>,
pub(crate) timeout: Option<Duration>,
pub(crate) debug: bool,
pub(crate) correlator: Option<String>,
pub(crate) http_version: Option<HttpVersion>,
pub(crate) validate_certs: bool,
pub(crate) log_dir: Option<PathBuf>,
}
impl TryFrom<Builder> for Config {
type Error = EdenApiError;
fn try_from(builder: Builder) -> Result<Self, Self::Error> {
let Builder {
server_url,
cert,
key,
ca_bundle,
headers,
max_files,
max_trees,
max_history,
max_location_to_hash,
timeout,
debug,
correlator,
http_version,
validate_certs,
log_dir,
} = builder;
// Check for missing required fields.
let mut server_url = server_url.ok_or(ConfigError::MissingUrl)?;
// Ensure the base URL's path ends with a slash so that `Url::join`
// won't strip the final path component.
if !server_url.path().ends_with('/') {
let path = format!("{}/", server_url.path());
server_url.set_path(&path);
}
// Setting these to 0 is the same as None.
let max_files = max_files.filter(|n| *n > 0);
let max_trees = max_trees.filter(|n| *n > 0);
let max_history = max_history.filter(|n| *n > 0);
Ok(Config {
server_url,
cert,
key,
ca_bundle,
headers,
max_files,
max_trees,
max_history,
max_location_to_hash,
timeout,
debug,
correlator,
http_version,
validate_certs,
log_dir,
})
}
}
/// Parse headers from a JSON object.
fn parse_headers(headers: impl AsRef<str>) -> Result<HashMap<String, String>, Error> {
Ok(serde_json::from_str(headers.as_ref())
.context(format!("Not a valid JSON object: {:?}", headers.as_ref()))?)
}<|fim▁end|> |
/// Build the client. |
<|file_name|>mailer.js<|end_file_name|><|fim▁begin|>/*
UTILIZZO DEL MODULO:
var mailer = require('percorso/per/questoFile.js');
mailer.inviaEmail(nome, cognome, emailDestinatario, oggetto, corpoInHtml);
OPPURE
mailer.inviaEmail(opzioniEmail);
dove opzioniEmail è un oggetto JSON formato così:
{
from: '"Nome Visualizzato" <[email protected]>', // mail del mittente
to: '[email protected]', // email destinatario, eventualmente può essere una lista con elementi separati da virgole
subject: 'Oggetto', // l'oggetto dell'email
text: 'Ciao', // email solo testo
html: '<h3>Ciao</h3>' // email HTML
}
L'indirizzo email usato per l'invio nel primo caso è quello del gruppo dei developer (creato ad hoc per il progetto) , ovvero
[email protected]
Eventualmente basta cambiare i settaggi nel transporter
Un futuro lavoro potrebbe essere quello di fare in modo che anche il
transporter sia settabile a piacimento dall'applicazione stessa
*/
'use strict';
const nodemailer = require('nodemailer');
var jsonfile = require('jsonfile')
var file = './server/config/mailSettings.json';
var fs = require('fs');
var transporter;
//var passwords = require('../config/passwords');
var scriviFileDummy = function() {
var settaggi = {
host: 'smtp.example.org',
port: 465,
secure: 'true',
user: '[email protected]',
pass: 'passwordExample'
};
// scrivo il file dummy
jsonfile.writeFileSync(file, settaggi);
return settaggi;
};
var impostaTransporter = function() {
var settaggi = leggiPrivate();
// svuoto il vecchio transporter e lo ricreo
transporter = null;
transporter = nodemailer.createTransport({
host: settaggi.host,
port: settaggi.port,
secure: settaggi.secure,
auth: {
user: settaggi.user,
pass: settaggi.pass
}
});
console.log('transporter impostato');
}
var leggiPrivate = function() {
if (fs.existsSync(file)) {
console.log('File exists');
return jsonfile.readFileSync(file)
} else {
// file does not exist
return scriviFileDummy();
}
}
exports.leggiSettaggi = function() {
console.log('chiamata dalla api al mailer')
if (fs.existsSync(file)) {
console.log('File exists');
return jsonfile.readFileSync(file)
} else {
// file does not exist<|fim▁hole|> return scriviFileDummy();
}
}
exports.scriviSettaggi = function(obj) {
// se non ci sono settaggi li creo dummy
if (obj === null)
scriviFileDummy()
else jsonfile.writeFile(file, obj, function(err) {
if (err) return console.log('ERRORE NELLA SCRITTURA DEI SETTAGGI EMAIL');
impostaTransporter();
});
}
exports.inviaEmail = function(opzioniEmail) {
if (transporter === null || transporter === undefined) {
// lo popolo al volo
impostaTransporter();
}
transporter.sendMail(opzioniEmail, (error, info) => {
if (error) {
return console.log(error);
}
console.log('Message %s sent: %s', info.messageId, info.response);
})
}
exports.inviaEmail = function(nome, cognome, emailDestinatario, oggetto, corpoInHtml) {
/*
FORMA JSON DELLE OPZIONI:
{
from: '"Fred Foo 👻" <[email protected]>', // indirizzo mittente
to: '[email protected], [email protected]', // lista riceventi
subject: 'Hello ✔', // Intestazione
text: 'Hello world ?', // email con testo normale
html: '<b>Hello world ?</b>' // email con testo in html
}
*/
if (transporter === null || transporter === undefined) {
// lo popolo al volo
impostaTransporter();
}
var opzioniEmail = {
from: '"Sito Tranquillo" <[email protected]>',
to: emailDestinatario,
subject: oggetto,
html: corpoInHtml
}
transporter.sendMail(opzioniEmail, (error, info) => {
if (error) {
return console.log(error);
}
console.log('Message %s sent: %s', info.messageId, info.response);
})
}<|fim▁end|> | |
<|file_name|>pyodbc_runner.py<|end_file_name|><|fim▁begin|>import pyodbc
import config
def main():
# formatで`{`を使うため、`{`を重ねることでエスケープ<|fim▁hole|> conn = pyodbc.connect(con_str)
cur = conn.cursor()
cur.execute("select item_name from item")
for c in cur.fetchall():
print(c[0]) #=> `ringo`, `みかん
cur.close()
conn.close()
if __name__ == '__main__':
main()<|fim▁end|> | con_str = 'Driver={{Microsoft Access Driver (*.mdb, *.accdb)}};Dbq={0};'.format(config.PATH_ACCDB) |
<|file_name|>twoneurons.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# twoneurons.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import matplotlib
# matplotlib.use("macosx")
import pylab
import nest
import nest.voltage_trace
weight=20.0<|fim▁hole|>neuron2 = nest.Create("iaf_neuron")
voltmeter = nest.Create("voltmeter")
nest.SetStatus(neuron1, {"I_e": stim})
nest.Connect(neuron1, neuron2, syn_spec={'weight':weight, 'delay':delay})
nest.Connect(voltmeter, neuron2)
nest.Simulate(100.0)
nest.voltage_trace.from_device(voltmeter)
nest.voltage_trace.show()<|fim▁end|> | delay=1.0
stim=1000.0
neuron1 = nest.Create("iaf_neuron") |
<|file_name|>GsonConverter.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 P100 OG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shiftconnects.android.auth.example.util;
import com.google.gson.Gson;
import com.google.gson.JsonParseException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import retrofit.converter.ConversionException;
import retrofit.converter.Converter;
import retrofit.mime.MimeUtil;
import retrofit.mime.TypedInput;
import retrofit.mime.TypedOutput;
/**
* A {@link Converter} which uses GSON for serialization and deserialization of entities.
*
* @author Jake Wharton ([email protected])
*/
public class GsonConverter implements Converter {
private final Gson gson;
private String charset;
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use UTF-8.
*/
public GsonConverter(Gson gson) {
this(gson, "UTF-8");
}
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use the specified charset.
*/
public GsonConverter(Gson gson, String charset) {
this.gson = gson;
this.charset = charset;
}
@Override public Object fromBody(TypedInput body, Type type) throws ConversionException {
String charset = this.charset;
if (body.mimeType() != null) {
charset = MimeUtil.parseCharset(body.mimeType(), charset);
}
InputStreamReader isr = null;
try {
isr = new InputStreamReader(body.in(), charset);
return gson.fromJson(isr, type);
} catch (IOException e) {
throw new ConversionException(e);
} catch (JsonParseException e) {
throw new ConversionException(e);
} finally {
if (isr != null) {
try {
isr.close();
} catch (IOException ignored) {
}
}
}
}
@Override public TypedOutput toBody(Object object) {
try {
return new JsonTypedOutput(gson.toJson(object).getBytes(charset), charset);
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
private static class JsonTypedOutput implements TypedOutput {
private final byte[] jsonBytes;
private final String mimeType;
JsonTypedOutput(byte[] jsonBytes, String encode) {
this.jsonBytes = jsonBytes;
this.mimeType = "application/json; charset=" + encode;
}
@Override public String fileName() {
return null;
}
@Override public String mimeType() {
return mimeType;
}
@Override public long length() {
return jsonBytes.length;
}
@Override public void writeTo(OutputStream out) throws IOException {<|fim▁hole|><|fim▁end|> | out.write(jsonBytes);
}
}
} |
<|file_name|>rx-stream-pacing.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
'''
Make a stream emit at the pace of a slower stream
Pros:
Introduce a delay between events in an otherwise rapid stream (like range)
Cons:
When the stream being delayed runs out of events to push, the zipped stream
will keep pushing events, defined with the lambda fn passed to the zip operation.
'''
from time import sleep
from rx import Observable
# Generate an interval sequece, firing once each second
interval = Observable.interval(1000)
# 5..10
numbers = Observable.from_(range(5, 11))<|fim▁hole|> numbers,
# Because we only push the elements of the `numbers` stream,
# As soon as it runs out of events, it will keep sending empty
# events to the subscribers
lambda _, n: n
)
sub1 = source.subscribe(
lambda v : print("Value published to observer 1: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
sub2 = source.subscribe(
lambda v : print("Value published to observer 2: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
# As noted above, we have to dispose the subscriptions before the `numbers`
# streams runs out, or the program will get stuck listening to empty events
sleep(5)
sub1.dispose()
sub2.dispose()
# => Value published to observer 1: 5
# => Value published to observer 2: 5
# => Value published to observer 1: 6
# => Value published to observer 2: 6
# => Value published to observer 2: 7
# => Value published to observer 1: 7
# => Value published to observer 2: 8
# => Value published to observer 1: 8<|fim▁end|> |
# Zip two streams together so it emits at the pace of the slowest stream
source = Observable.zip(
interval, |
<|file_name|>ItemListFragment.java<|end_file_name|><|fim▁begin|>package com.kimkha.finanvita.ui;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.util.SparseBooleanArray;
import android.view.*;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.ListView;
import com.kimkha.finanvita.R;
import com.kimkha.finanvita.adapters.AbstractCursorAdapter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
public abstract class ItemListFragment extends BaseFragment implements AdapterView.OnItemClickListener, LoaderManager.LoaderCallbacks<Cursor>
{
public static final String RESULT_EXTRA_ITEM_ID = ItemListFragment.class.getName() + ".RESULT_EXTRA_ITEM_ID";
public static final String RESULT_EXTRA_ITEM_IDS = ItemListFragment.class.getName() + ".RESULT_EXTRA_ITEM_IDS";
// -----------------------------------------------------------------------------------------------------------------
public static final int SELECTION_TYPE_NONE = 0;
public static final int SELECTION_TYPE_SINGLE = 1;
public static final int SELECTION_TYPE_MULTI = 2;
// -----------------------------------------------------------------------------------------------------------------
protected static final String ARG_SELECTION_TYPE = "ARG_SELECTION_TYPE";
protected static final String ARG_ITEM_IDS = "ARG_ITEM_IDS";
protected static final String ARG_IS_OPEN_DRAWER_LAYOUT = "ARG_IS_OPEN_DRAWER_LAYOUT";
// -----------------------------------------------------------------------------------------------------------------
protected static final String STATE_SELECTED_POSITIONS = "STATE_SELECTED_POSITIONS";
// -----------------------------------------------------------------------------------------------------------------
protected static final int LOADER_ITEMS = 1468;
// -----------------------------------------------------------------------------------------------------------------
protected ListView list_V;
protected View create_V;
// -----------------------------------------------------------------------------------------------------------------
protected AbstractCursorAdapter adapter;
protected int selectionType;
public static Bundle makeArgs(int selectionType, long[] itemIDs)
{
return makeArgs(selectionType, itemIDs, false);
}
public static Bundle makeArgs(int selectionType, long[] itemIDs, boolean isOpenDrawerLayout)
{
final Bundle args = new Bundle();
args.putInt(ARG_SELECTION_TYPE, selectionType);
args.putLongArray(ARG_ITEM_IDS, itemIDs);
args.putBoolean(ARG_IS_OPEN_DRAWER_LAYOUT, isOpenDrawerLayout);
return args;
}
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
// Get arguments
final Bundle args = getArguments();
selectionType = args != null ? args.getInt(ARG_SELECTION_TYPE, SELECTION_TYPE_NONE) : SELECTION_TYPE_NONE;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
return inflater.inflate(R.layout.fragment_items_list, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState)
{
super.onViewCreated(view, savedInstanceState);
// Get views
list_V = (ListView) view.findViewById(R.id.list_V);
}
<|fim▁hole|>
// Setup
if (selectionType == SELECTION_TYPE_NONE)
{
create_V = LayoutInflater.from(getActivity()).inflate(R.layout.li_create_new, list_V, false);
list_V.addFooterView(create_V);
}
adapter = createAdapter(getActivity());
list_V.setAdapter(adapter);
list_V.setOnItemClickListener(this);
if (getArguments().getBoolean(ARG_IS_OPEN_DRAWER_LAYOUT, false))
{
final int paddingHorizontal = getResources().getDimensionPixelSize(R.dimen.dynamic_margin_drawer_narrow_horizontal);
list_V.setPadding(paddingHorizontal, list_V.getPaddingTop(), paddingHorizontal, list_V.getPaddingBottom());
}
if (selectionType == SELECTION_TYPE_MULTI)
{
list_V.setChoiceMode(AbsListView.CHOICE_MODE_MULTIPLE);
if (savedInstanceState != null)
{
final ArrayList<Integer> selectedPositions = savedInstanceState.getIntegerArrayList(STATE_SELECTED_POSITIONS);
list_V.setTag(selectedPositions);
}
else
{
final long[] selectedIDs = getArguments().getLongArray(ARG_ITEM_IDS);
list_V.setTag(selectedIDs);
}
}
// Loader
getLoaderManager().initLoader(LOADER_ITEMS, null, this);
}
@Override
public void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
if (selectionType == SELECTION_TYPE_MULTI)
{
final ArrayList<Integer> selectedPositions = new ArrayList<Integer>();
final SparseBooleanArray listPositions = list_V.getCheckedItemPositions();
if (listPositions != null)
{
for (int i = 0; i < listPositions.size(); i++)
{
if (listPositions.get(listPositions.keyAt(i)))
selectedPositions.add(listPositions.keyAt(i));
}
}
outState.putIntegerArrayList(STATE_SELECTED_POSITIONS, selectedPositions);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater)
{
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.items_list, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
switch (item.getItemId())
{
case R.id.action_create:
startItemCreate(getActivity(), item.getActionView());
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle bundle)
{
switch (id)
{
case LOADER_ITEMS:
return createItemsLoader();
}
return null;
}
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor)
{
switch (cursorLoader.getId())
{
case LOADER_ITEMS:
bindItems(cursor);
break;
}
}
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader)
{
switch (cursorLoader.getId())
{
case LOADER_ITEMS:
bindItems(null);
break;
}
}
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int position, long id)
{
switch (selectionType)
{
case SELECTION_TYPE_NONE:
if (position == adapter.getCount())
startItemCreate(getActivity(), view);
else
startItemDetails(getActivity(), id, position, adapter, adapter.getCursor(), view);
break;
case SELECTION_TYPE_SINGLE:
// Prepare extras
final Bundle extras = new Bundle();
onItemSelected(id, adapter, adapter.getCursor(), extras);
Intent data = new Intent();
data.putExtra(RESULT_EXTRA_ITEM_ID, id);
data.putExtras(extras);
getActivity().setResult(Activity.RESULT_OK, data);
getActivity().finish();
break;
case SELECTION_TYPE_MULTI:
adapter.setSelectedIDs(list_V.getCheckedItemIds());
break;
}
}
protected abstract AbstractCursorAdapter createAdapter(Context context);
protected abstract Loader<Cursor> createItemsLoader();
/**
* Called when item id along with extras should be returned to another activity. If only item id is necessary, you don't need to do anything. Just extra values should be put in outExtras.
*
* @param itemId Id of selected item. You don't need to put it to extras. This will be done automatically.
* @param adapter Adapter for convenience.
* @param c Cursor.
* @param outExtras Put all additional data in here.
*/
protected abstract void onItemSelected(long itemId, AbstractCursorAdapter adapter, Cursor c, Bundle outExtras);
/**
* Called when you should start item detail activity
*
* @param context Context.
* @param itemId Id of selected item.
* @param position Selected position.
* @param adapter Adapter for convenience.
* @param c Cursor.
* @param view
*/
protected abstract void startItemDetails(Context context, long itemId, int position, AbstractCursorAdapter adapter, Cursor c, View view);
/**
* Start item create activity here.
*/
protected abstract void startItemCreate(Context context, View view);
public long[] getSelectedItemIDs()
{
return list_V.getCheckedItemIds();
}
protected void bindItems(Cursor c)
{
boolean needUpdateSelectedIDs = adapter.getCount() == 0 && selectionType == SELECTION_TYPE_MULTI;
adapter.swapCursor(c);
if (needUpdateSelectedIDs && list_V.getTag() != null)
{
final Object tag = list_V.getTag();
if (tag instanceof ArrayList)
{
//noinspection unchecked
final ArrayList<Integer> selectedPositions = (ArrayList<Integer>) tag;
list_V.setTag(selectedPositions);
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < selectedPositions.size(); i++)
list_V.setItemChecked(selectedPositions.get(i), true);
}
else if (tag instanceof long[])
{
final long[] selectedIDs = (long[]) tag;
final Set<Long> selectedIDsSet = new HashSet<Long>();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < selectedIDs.length; i++)
selectedIDsSet.add(selectedIDs[i]);
long itemId;
for (int i = 0; i < adapter.getCount(); i++)
{
itemId = list_V.getItemIdAtPosition(i);
if (selectedIDsSet.contains(itemId))
{
selectedIDsSet.remove(itemId);
list_V.setItemChecked(i, true);
if (selectedIDsSet.size() == 0)
break;
}
}
}
adapter.setSelectedIDs(list_V.getCheckedItemIds());
}
}
}<|fim▁end|> | @Override
public void onActivityCreated(Bundle savedInstanceState)
{
super.onActivityCreated(savedInstanceState); |
<|file_name|>Welcome.py<|end_file_name|><|fim▁begin|>from flask.ext import restful
from . import api
<|fim▁hole|> return api.send_static_file('index.html')<|fim▁end|> | class Welcome(restful.Resource):
def get(self): |
<|file_name|>landmarks_file.py<|end_file_name|><|fim▁begin|># ID-Fits
# Copyright (c) 2015 Institut National de l'Audiovisuel, INA, All rights reserved.
# <|fim▁hole|># License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import numpy as np
def readPtsLandmarkFile(filename, landmarks_number):
f = open(filename)
# Skip first 3 lines
for i in range(3):
f.readline()
# Read landmarks position
landmarks = np.empty((landmarks_number, 2), dtype=np.float)
for i in range(landmarks_number):
landmarks[i] = np.array([float(x) for x in f.readline().split()])
return landmarks<|fim▁end|> | # This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public |
<|file_name|>field_html_tests.js<|end_file_name|><|fim▁begin|>odoo.define('web_editor.field_html_tests', function (require) {
"use strict";
var ajax = require('web.ajax');
var FormView = require('web.FormView');
var testUtils = require('web.test_utils');
var weTestUtils = require('web_editor.test_utils');
var core = require('web.core');
var Wysiwyg = require('web_editor.wysiwyg');
var MediaDialog = require('wysiwyg.widgets.MediaDialog');
var _t = core._t;
QUnit.module('web_editor', {}, function () {
QUnit.module('field html', {
beforeEach: function () {
this.data = weTestUtils.wysiwygData({
'note.note': {
fields: {
display_name: {
string: "Displayed name",
type: "char"
},
header: {
string: "Header",
type: "html",
required: true,
},
body: {
string: "Message",
type: "html"
},
},
records: [{
id: 1,
display_name: "first record",
header: "<p> <br> </p>",
body: "<p>toto toto toto</p><p>tata</p>",
}],
},
'mass.mailing': {
fields: {
display_name: {
string: "Displayed name",
type: "char"
},
body_html: {
string: "Message Body inline (to send)",
type: "html"
},
body_arch: {
string: "Message Body for edition",
type: "html"
},
},
records: [{
id: 1,
display_name: "first record",
body_html: "<div class='field_body' style='background-color: red;'>yep</div>",
body_arch: "<div class='field_body'>yep</div>",
}],
},
"ir.translation": {
fields: {
lang_code: {type: "char"},
value: {type: "char"},
res_id: {type: "integer"}
},
records: [{
id: 99,
res_id: 12,
value: '',
lang_code: 'en_US'
}]
},
});
testUtils.mock.patch(ajax, {
loadAsset: function (xmlId) {
if (xmlId === 'template.assets') {
return Promise.resolve({
cssLibs: [],
cssContents: ['body {background-color: red;}']
});
}
if (xmlId === 'template.assets_all_style') {
return Promise.resolve({
cssLibs: $('link[href]:not([type="image/x-icon"])').map(function () {
return $(this).attr('href');
}).get(),
cssContents: ['body {background-color: red;}']
});
}
throw 'Wrong template';
},
});
},
afterEach: function () {
testUtils.mock.unpatch(ajax);
},
}, function () {
QUnit.module('basic');
QUnit.test('simple rendering', async function (assert) {
assert.expect(3);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px"/>' +
'</form>',
res_id: 1,
});
var $field = form.$('.oe_form_field[name="body"]');
assert.strictEqual($field.children('.o_readonly').html(),
'<p>toto toto toto</p><p>tata</p>',
"should have rendered a div with correct content in readonly");
assert.strictEqual($field.attr('style'), 'height: 100px',
"should have applied the style correctly");
await testUtils.form.clickEdit(form);
await testUtils.nextTick();
$field = form.$('.oe_form_field[name="body"]');
assert.strictEqual($field.find('.note-editable').html(),
'<p>toto toto toto</p><p>tata</p>',
"should have rendered the field correctly in edit");
form.destroy();
});
QUnit.test('check if required field is set', async function (assert) {
assert.expect(1);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="header" widget="html" style="height: 100px" />' +
'</form>',
res_id: 1,
});
testUtils.mock.intercept(form, 'call_service', function (ev) {<|fim▁hole|> "sticky": undefined,
"title": "Invalid fields:",
"type": "danger"
});
}
}, true);
await testUtils.form.clickEdit(form);
await testUtils.nextTick();
await testUtils.dom.click(form.$('.o_form_button_save'));
form.destroy();
});
QUnit.test('colorpicker', async function (assert) {
assert.expect(6);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px"/>' +
'</form>',
res_id: 1,
});
// Summernote needs a RootWidget to set as parent of the ColorPaletteWidget. In the
// tests, there is no RootWidget, so we set it here to the parent of the form view, which
// can act as RootWidget, as it will honor rpc requests correctly (to the MockServer).
const rootWidget = odoo.__DEBUG__.services['root.widget'];
odoo.__DEBUG__.services['root.widget'] = form.getParent();
await testUtils.form.clickEdit(form);
var $field = form.$('.oe_form_field[name="body"]');
// select the text
var pText = $field.find('.note-editable p').first().contents()[0];
Wysiwyg.setRange(pText, 1, pText, 10);
// text is selected
var range = Wysiwyg.getRange($field[0]);
assert.strictEqual(range.sc, pText,
"should select the text");
async function openColorpicker(selector) {
const $colorpicker = $field.find(selector);
const openingProm = new Promise(resolve => {
$colorpicker.one('shown.bs.dropdown', () => resolve());
});
await testUtils.dom.click($colorpicker.find('button:first'));
return openingProm;
}
await openColorpicker('.note-toolbar .note-back-color-preview');
assert.ok($field.find('.note-back-color-preview').hasClass('show'),
"should display the color picker");
await testUtils.dom.click($field.find('.note-toolbar .note-back-color-preview .o_we_color_btn[style="background-color:#00FFFF;"]'));
assert.ok(!$field.find('.note-back-color-preview').hasClass('show'),
"should close the color picker");
assert.strictEqual($field.find('.note-editable').html(),
'<p>t<font style="background-color: rgb(0, 255, 255);">oto toto </font>toto</p><p>tata</p>',
"should have rendered the field correctly in edit");
var fontContent = $field.find('.note-editable font').contents()[0];
var rangeControl = {
sc: fontContent,
so: 0,
ec: fontContent,
eo: fontContent.length,
};
range = Wysiwyg.getRange($field[0]);
assert.deepEqual(_.pick(range, 'sc', 'so', 'ec', 'eo'), rangeControl,
"should select the text after color change");
// select the text
pText = $field.find('.note-editable p').first().contents()[2];
Wysiwyg.setRange(fontContent, 5, pText, 2);
// text is selected
await openColorpicker('.note-toolbar .note-back-color-preview');
await testUtils.dom.click($field.find('.note-toolbar .note-back-color-preview .o_we_color_btn.bg-o-color-3'));
assert.strictEqual($field.find('.note-editable').html(),
'<p>t<font style="background-color: rgb(0, 255, 255);">oto t</font><font style="" class="bg-o-color-3">oto </font><font class="bg-o-color-3" style="">to</font>to</p><p>tata</p>',
"should have rendered the field correctly in edit");
odoo.__DEBUG__.services['root.widget'] = rootWidget;
form.destroy();
});
QUnit.test('media dialog: image', async function (assert) {
assert.expect(1);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px"/>' +
'</form>',
res_id: 1,
mockRPC: function (route, args) {
if (args.model === 'ir.attachment') {
if (args.method === "generate_access_token") {
return Promise.resolve();
}
}
if (route.indexOf('/web/image/123/transparent.png') === 0) {
return Promise.resolve();
}
if (route.indexOf('/web_unsplash/fetch_images') === 0) {
return Promise.resolve();
}
if (route.indexOf('/web_editor/media_library_search') === 0) {
return Promise.resolve();
}
return this._super(route, args);
},
});
await testUtils.form.clickEdit(form);
var $field = form.$('.oe_form_field[name="body"]');
// the dialog load some xml assets
var defMediaDialog = testUtils.makeTestPromise();
testUtils.mock.patch(MediaDialog, {
init: function () {
this._super.apply(this, arguments);
this.opened(defMediaDialog.resolve.bind(defMediaDialog));
}
});
var pText = $field.find('.note-editable p').first().contents()[0];
Wysiwyg.setRange(pText, 1);
await testUtils.dom.click($field.find('.note-toolbar .note-insert button:has(.fa-file-image-o)'));
// load static xml file (dialog, media dialog, unsplash image widget)
await defMediaDialog;
await testUtils.dom.click($('.modal #editor-media-image .o_existing_attachment_cell:first').removeClass('d-none'));
var $editable = form.$('.oe_form_field[name="body"] .note-editable');
assert.ok($editable.find('img')[0].dataset.src.includes('/web/image/123/transparent.png'),
"should have the image in the dom");
testUtils.mock.unpatch(MediaDialog);
form.destroy();
});
QUnit.test('media dialog: icon', async function (assert) {
assert.expect(1);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px"/>' +
'</form>',
res_id: 1,
mockRPC: function (route, args) {
if (args.model === 'ir.attachment') {
return Promise.resolve([]);
}
if (route.indexOf('/web_unsplash/fetch_images') === 0) {
return Promise.resolve();
}
return this._super(route, args);
},
});
await testUtils.form.clickEdit(form);
var $field = form.$('.oe_form_field[name="body"]');
// the dialog load some xml assets
var defMediaDialog = testUtils.makeTestPromise();
testUtils.mock.patch(MediaDialog, {
init: function () {
this._super.apply(this, arguments);
this.opened(defMediaDialog.resolve.bind(defMediaDialog));
}
});
var pText = $field.find('.note-editable p').first().contents()[0];
Wysiwyg.setRange(pText, 1);
await testUtils.dom.click($field.find('.note-toolbar .note-insert button:has(.fa-file-image-o)'));
// load static xml file (dialog, media dialog, unsplash image widget)
await defMediaDialog;
$('.modal .tab-content .tab-pane').removeClass('fade'); // to be sync in test
await testUtils.dom.click($('.modal a[aria-controls="editor-media-icon"]'));
await testUtils.dom.click($('.modal #editor-media-icon .font-icons-icon.fa-glass'));
var $editable = form.$('.oe_form_field[name="body"] .note-editable');
assert.strictEqual($editable.data('wysiwyg').getValue(),
'<p>t<span class="fa fa-glass"></span>oto toto toto</p><p>tata</p>',
"should have the image in the dom");
testUtils.mock.unpatch(MediaDialog);
form.destroy();
});
QUnit.test('save', async function (assert) {
assert.expect(1);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px"/>' +
'</form>',
res_id: 1,
mockRPC: function (route, args) {
if (args.method === "write") {
assert.strictEqual(args.args[1].body,
'<p>t<font class="bg-o-color-3">oto toto </font>toto</p><p>tata</p>',
"should save the content");
}
return this._super.apply(this, arguments);
},
});
await testUtils.form.clickEdit(form);
var $field = form.$('.oe_form_field[name="body"]');
// select the text
var pText = $field.find('.note-editable p').first().contents()[0];
Wysiwyg.setRange(pText, 1, pText, 10);
// text is selected
async function openColorpicker(selector) {
const $colorpicker = $field.find(selector);
const openingProm = new Promise(resolve => {
$colorpicker.one('shown.bs.dropdown', () => resolve());
});
await testUtils.dom.click($colorpicker.find('button:first'));
return openingProm;
}
await openColorpicker('.note-toolbar .note-back-color-preview');
await testUtils.dom.click($field.find('.note-toolbar .note-back-color-preview .o_we_color_btn.bg-o-color-3'));
await testUtils.form.clickSave(form);
form.destroy();
});
QUnit.module('cssReadonly');
QUnit.test('rendering with iframe for readonly mode', async function (assert) {
assert.expect(3);
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form>' +
'<field name="body" widget="html" style="height: 100px" options="{\'cssReadonly\': \'template.assets\'}"/>' +
'</form>',
res_id: 1,
});
var $field = form.$('.oe_form_field[name="body"]');
var $iframe = $field.find('iframe.o_readonly');
await $iframe.data('loadDef');
var doc = $iframe.contents()[0];
assert.strictEqual($(doc).find('#iframe_target').html(),
'<p>toto toto toto</p><p>tata</p>',
"should have rendered a div with correct content in readonly");
assert.strictEqual(doc.defaultView.getComputedStyle(doc.body).backgroundColor,
'rgb(255, 0, 0)',
"should load the asset css");
await testUtils.form.clickEdit(form);
$field = form.$('.oe_form_field[name="body"]');
assert.strictEqual($field.find('.note-editable').html(),
'<p>toto toto toto</p><p>tata</p>',
"should have rendered the field correctly in edit");
form.destroy();
});
QUnit.module('translation');
QUnit.test('field html translatable', async function (assert) {
assert.expect(4);
var multiLang = _t.database.multi_lang;
_t.database.multi_lang = true;
this.data['note.note'].fields.body.translate = true;
var form = await testUtils.createView({
View: FormView,
model: 'note.note',
data: this.data,
arch: '<form string="Partners">' +
'<field name="body" widget="html"/>' +
'</form>',
res_id: 1,
mockRPC: function (route, args) {
if (route === '/web/dataset/call_button' && args.method === 'translate_fields') {
assert.deepEqual(args.args, ['note.note', 1, 'body'], "should call 'call_button' route");
return Promise.resolve({
domain: [],
context: {search_default_name: 'partnes,foo'},
});
}
if (route === "/web/dataset/call_kw/res.lang/get_installed") {
return Promise.resolve([["en_US"], ["fr_BE"]]);
}
return this._super.apply(this, arguments);
},
});
assert.strictEqual(form.$('.oe_form_field_html .o_field_translate').length, 0,
"should not have a translate button in readonly mode");
await testUtils.form.clickEdit(form);
var $button = form.$('.oe_form_field_html .o_field_translate');
assert.strictEqual($button.length, 1, "should have a translate button");
await testUtils.dom.click($button);
assert.containsOnce($(document), '.o_translation_dialog', 'should have a modal to translate');
form.destroy();
_t.database.multi_lang = multiLang;
});
});
});
});<|fim▁end|> | if (ev.data.service === 'notification') {
assert.deepEqual(ev.data.args[0], {
"className": undefined,
"message": "<ul><li>Header</li></ul>", |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField
from lms.djangoapps.courseware.fields import UnsignedBigIntAutoField
<|fim▁hole|>class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='PersistentSubsectionGrade',
fields=[
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('id', UnsignedBigIntAutoField(serialize=False, primary_key=True)),
('user_id', models.IntegerField()),
('course_id', CourseKeyField(max_length=255)),
('usage_key', UsageKeyField(max_length=255)),
('subtree_edited_date', models.DateTimeField(verbose_name=b'last content edit timestamp')),
('course_version', models.CharField(max_length=255, verbose_name=b'guid of latest course version', blank=True)),
('earned_all', models.FloatField()),
('possible_all', models.FloatField()),
('earned_graded', models.FloatField()),
('possible_graded', models.FloatField()),
],
),
migrations.CreateModel(
name='VisibleBlocks',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('blocks_json', models.TextField()),
('hashed', models.CharField(unique=True, max_length=100)),
],
),
migrations.AddField(
model_name='persistentsubsectiongrade',
name='visible_blocks',
field=models.ForeignKey(to='grades.VisibleBlocks', db_column=b'visible_blocks_hash', to_field=b'hashed', on_delete=models.CASCADE),
),
migrations.AlterUniqueTogether(
name='persistentsubsectiongrade',
unique_together=set([('course_id', 'user_id', 'usage_key')]),
),
]<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-06 20:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
<|fim▁hole|> migrations.CreateModel(
name='CodeRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('run_id', models.PositiveIntegerField()),
('run_status', models.CharField(max_length=255)),
('output', models.TextField()),
],
),
]<|fim▁end|> | operations = [ |
<|file_name|>p01_remove_doctype_mappers.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
import webnotes
def execute():
webnotes.conn.sql("""drop table if exists `tabDocType Mapper`""")
webnotes.conn.sql("""drop table if exists `tabTable Mapper Detail`""")<|fim▁hole|> webnotes.conn.sql("""drop table if exists `tabField Mapper Detail`""")
webnotes.delete_doc("DocType", "DocType Mapper")
webnotes.delete_doc("DocType", "Table Mapper Detail")
webnotes.delete_doc("DocType", "Field Mapper Detail")<|fim▁end|> | |
<|file_name|>universe.rs<|end_file_name|><|fim▁begin|>extern crate rand;
use std::vec::Vec;
use celestial::bodies::{Star, Planet};
use celestial::starsystem::{OrbitData, StarSystem};
pub struct Universe {
systems: Vec<StarSystem>
}
impl Universe {
pub fn generate() -> Universe {
let mut u = Universe{
systems: vec![]
};
// generate x stars
for i in 1..100 {
let mut s = Star::new();
// generate random number of planets
let r = rand::random::<u16>() % 10;
let mut ps = vec![];
for j in 1..r {
let p = Planet::generate_for(&s);
let o = OrbitData::generate_for(&s, &p);
ps.push((p,o));
}
let mut sys = StarSystem::new(s);
for (p, o) in ps {
sys.getRootOrbit().addBody(p,o);
}
u.systems.push(sys);
}
<|fim▁hole|> pub fn numSystems(&self) -> usize {
self.systems.len()
}
pub fn getSystem(&mut self, i: usize) -> &mut StarSystem {
self.systems.get_mut(i).unwrap()
}
}<|fim▁end|> | // return Universe
u
}
|
<|file_name|>ExtensionType.py<|end_file_name|><|fim▁begin|># Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.xs import *
from scap.model.xs.AnnotatedType import AnnotatedType
logger = logging.getLogger(__name__)
class ExtensionType(AnnotatedType):<|fim▁hole|> {'tag_name': 'all', 'list': 'tags', 'class': 'AllType', 'min': 0},
{'tag_name': 'choice', 'list': 'tags', 'class': 'ChoiceElement', 'min': 0},
{'tag_name': 'sequence', 'list': 'tags', 'class': 'GroupType', 'min': 0},
{'tag_name': 'attribute', 'list': 'tags', 'class': 'AttributeType', 'min': 0, 'max': None},
{'tag_name': 'attributeGroup', 'list': 'tags', 'class': 'AttributeGroupType', 'min': 0, 'max': None},
{'tag_name': 'anyAttribute', 'list': 'tags', 'class': 'WildcardType', 'min': 0},
],
'attributes': {
'base': {'type': 'QNameType', 'required': True},
}
}
def get_defs(self, schema, top_level):
logger.debug('Base: ' + self.base)
# TODO unable to map xmlns because ET doesn't retain it
base_ns, base_name = [self.base.partition(':')[i] for i in [0,2]]
top_level.set_super_module(base_ns)
top_level.set_super_class(base_name)
return super(ExtensionType, self).get_defs(schema, top_level)<|fim▁end|> | MODEL_MAP = {
'elements': [
{'tag_name': 'group', 'list': 'tags', 'class': 'GroupType', 'min': 0}, |
<|file_name|>test_queue_worker.py<|end_file_name|><|fim▁begin|>import os
import time
import ujson
<|fim▁hole|>from django.test import TestCase
from mock import patch
from typing import Any, Callable, Dict, List, Mapping, Tuple
from zerver.lib.test_helpers import simulated_queue_client
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import get_client, UserActivity
from zerver.worker import queue_processors
class WorkerTest(ZulipTestCase):
class FakeClient(object):
def __init__(self):
# type: () -> None
self.consumers = {} # type: Dict[str, Callable]
self.queue = [] # type: List[Tuple[str, Dict[str, Any]]]
def register_json_consumer(self, queue_name, callback):
# type: (str, Callable) -> None
self.consumers[queue_name] = callback
def start_consuming(self):
# type: () -> None
for queue_name, data in self.queue:
callback = self.consumers[queue_name]
callback(data)
def test_mirror_worker(self):
# type: () -> None
fake_client = self.FakeClient()
data = [
dict(
message=u'\xf3test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
dict(
message='\xf3test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
dict(
message='test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
]
for element in data:
fake_client.queue.append(('email_mirror', element))
with patch('zerver.worker.queue_processors.mirror_email'):
with simulated_queue_client(lambda: fake_client):
worker = queue_processors.MirrorWorker()
worker.setup()
worker.start()
def test_UserActivityWorker(self):
# type: () -> None
fake_client = self.FakeClient()
user = self.example_user('hamlet')
UserActivity.objects.filter(
user_profile = user.id,
client = get_client('ios')
).delete()
data = dict(
user_profile_id = user.id,
client = 'ios',
time = time.time(),
query = 'send_message'
)
fake_client.queue.append(('user_activity', data))
with simulated_queue_client(lambda: fake_client):
worker = queue_processors.UserActivityWorker()
worker.setup()
worker.start()
activity_records = UserActivity.objects.filter(
user_profile = user.id,
client = get_client('ios')
)
self.assertTrue(len(activity_records), 1)
self.assertTrue(activity_records[0].count, 1)
def test_error_handling(self):
# type: () -> None
processed = []
@queue_processors.assign_queue('unreliable_worker')
class UnreliableWorker(queue_processors.QueueProcessingWorker):
def consume(self, data):
# type: (Mapping[str, Any]) -> None
if data["type"] == 'unexpected behaviour':
raise Exception('Worker task not performing as expected!')
processed.append(data["type"])
def _log_problem(self):
# type: () -> None
# keep the tests quiet
pass
fake_client = self.FakeClient()
for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
fake_client.queue.append(('unreliable_worker', {'type': msg}))
fn = os.path.join(settings.QUEUE_ERROR_DIR, 'unreliable_worker.errors')
try:
os.remove(fn)
except OSError: # nocoverage # error handling for the directory not existing
pass
with simulated_queue_client(lambda: fake_client):
worker = UnreliableWorker()
worker.setup()
worker.start()
self.assertEqual(processed, ['good', 'fine', 'back to normal'])
line = open(fn).readline().strip()
event = ujson.loads(line.split('\t')[1])
self.assertEqual(event["type"], 'unexpected behaviour')
def test_worker_noname(self):
# type: () -> None
class TestWorker(queue_processors.QueueProcessingWorker):
def __init__(self):
# type: () -> None
super(TestWorker, self).__init__()
def consume(self, data):
# type: (Mapping[str, Any]) -> None
pass # nocoverage # this is intentionally not called
with self.assertRaises(queue_processors.WorkerDeclarationException):
TestWorker()
def test_worker_noconsume(self):
# type: () -> None
@queue_processors.assign_queue('test_worker')
class TestWorker(queue_processors.QueueProcessingWorker):
def __init__(self):
# type: () -> None
super(TestWorker, self).__init__()
with self.assertRaises(queue_processors.WorkerDeclarationException):
worker = TestWorker()
worker.consume({})<|fim▁end|> | from django.conf import settings
from django.http import HttpResponse |
<|file_name|>datacite.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Utilities for working with DataCite metadata."""
from __future__ import absolute_import
import re
import urllib2
from invenio_utils.xmlDict import ElementTree, XmlDictConfig
__all__ = (
'DataciteMetadata',
)
class DataciteMetadata(object):
"""Helper class for working with DataCite metadata."""
def __init__(self, doi):
"""Initialize object."""
self.url = "http://data.datacite.org/application/x-datacite+xml/"
self.error = False
try:
data = urllib2.urlopen(self.url + doi).read()
except urllib2.HTTPError:
self.error = True
if not self.error:
# Clean the xml for parsing
data = re.sub('<\?xml.*\?>', '', data, count=1)
# Remove the resource tags
data = re.sub('<resource .*xsd">', '', data)
self.data = '<?xml version="1.0"?><datacite>' + \
data[0:len(data) - 11] + '</datacite>'
self.root = ElementTree.XML(self.data)
self.xml = XmlDictConfig(self.root)
def get_creators(self, attribute='creatorName'):
"""Get DataCite creators."""
if 'creators' in self.xml:
if isinstance(self.xml['creators']['creator'], list):
return [c[attribute] for c in self.xml['creators']['creator']]
else:
return self.xml['creators']['creator'][attribute]
return None
def get_titles(self):
"""Get DataCite titles."""
if 'titles' in self.xml:
return self.xml['titles']['title']
return None
def get_publisher(self):
"""Get DataCite publisher."""
if 'publisher' in self.xml:
return self.xml['publisher']
return None
def get_dates(self):
"""Get DataCite dates."""
if 'dates' in self.xml:
if isinstance(self.xml['dates']['date'], dict):
return self.xml['dates']['date'].values()[0]
return self.xml['dates']['date']
return None
def get_publication_year(self):
"""Get DataCite publication year."""
if 'publicationYear' in self.xml:
return self.xml['publicationYear']
return None
def get_language(self):
"""Get DataCite language."""
if 'language' in self.xml:
return self.xml['language']
return None
def get_related_identifiers(self):
"""Get DataCite related identifiers."""
pass
def get_description(self, description_type='Abstract'):
"""Get DataCite description."""<|fim▁hole|> for description in self.xml['descriptions']['description']:
if description_type in description:
return description[description_type]
elif isinstance(self.xml['descriptions']['description'], dict):
description = self.xml['descriptions']['description']
if description_type in description:
return description[description_type]
elif len(description) == 1:
# return the only description
return description.values()[0]
return None
def get_rights(self):
"""Get DataCite rights."""
if 'titles' in self.xml:
return self.xml['rights']
return None<|fim▁end|> | if 'descriptions' in self.xml:
if isinstance(self.xml['descriptions']['description'], list): |
<|file_name|>cleanup_addon_premium.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand
import amo
from mkt.webapps.models import AddonPremium
class Command(BaseCommand):
help = 'Clean up existing AddonPremium objects for free apps.'
def handle(self, *args, **options):
(AddonPremium.objects.filter(addon__premium_type__in=amo.ADDON_FREES)<|fim▁hole|><|fim▁end|> | .delete()) |
<|file_name|>aixc++.py<|end_file_name|><|fim▁begin|>"""SCons.Tool.aixc++
Tool-specific initialization for IBM xlC / Visual Age C++ compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#<|fim▁hole|># KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/aixc++.py rel_2.3.5:3329:275e75118ad4 2015/06/20 11:18:26 bdbaddog"
import os.path
import SCons.Platform.aix
cplusplus = __import__('c++', globals(), locals(), [])
packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp']
def get_xlc(env):
xlc = env.get('CXX', 'xlC')
return SCons.Platform.aix.get_xlc(env, xlc, packages)
def generate(env):
"""Add Builders and construction variables for xlC / Visual Age
suite to an Environment."""
path, _cxx, version = get_xlc(env)
if path and _cxx:
_cxx = os.path.join(path, _cxx)
if 'CXX' not in env:
env['CXX'] = _cxx
cplusplus.generate(env)
if version:
env['CXXVERSION'] = version
def exists(env):
path, _cxx, version = get_xlc(env)
if path and _cxx:
xlc = os.path.join(path, _cxx)
if os.path.exists(xlc):
return xlc
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|> | # The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY |
<|file_name|>RepoUserHeatmap.js<|end_file_name|><|fim▁begin|>import React, {PropTypes} from 'react';
import L from 'leaflet';
import gh from '../api/GitHubApi';
import RaisedButton from 'material-ui/RaisedButton';
const REPO_TIMESPAN = {
ALLTIME: 0,
THIRTYDAYS: 1,
SIXTYDAYS: 2,
ONEYEAR: 3
};
const defaultMapConfig = {
options: {
center: [
39.7589, -84.1916
],
zoomControl: false,
zoom: 4,
maxZoom: 20,
minZoom: 2,
scrollwheel: false,
infoControl: false,
attributionControl: false
},
tileLayer: {
uri: 'http://{s}.tiles.wmflabs.org/bw-mapnik/{z}/{x}/{y}.png',
options: {
maxZoom: 18,
id: ''
}
}
};
class RepoUserHeatmap extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
timespan: REPO_TIMESPAN.THIRTYDAYS,
data: []
};
this.initializeMap = this.initializeMap.bind(this);
}
componentDidMount() {
this.initializeMap();
this.updateData();
gh.getTopRepos().then(data => {
console.log('=== REPOS ===');
console.log(data);
return gh.getContributors(data.data[0].full_name);
}).then(contribs => {
console.log('=== CONTRIBS ===');
console.log(contribs);
return gh.getUser(contribs.data[0].login);
}).then(user => {
console.log('=== USER ===');
console.log(user);
return gh.getRateLimit();
}).then(limit => {
console.log('=== RATE LIMIT ===');
console.log(limit);
}).catch(err => {
console.log('ERROR:');
console.log(err);
});
}
componentWillUnmount() {
this.map = null;
}
initializeMap() {
if (this.map) {
return;
}
this.map = L.map(this.mapDiv, this.props.mapOptions || defaultMapConfig.options);
if (this.props.mapLayers && this.props.mapLayers.length > 0) {
for (let i=0; i < this.props.mapLayers.length; i++) {
this.props.mapLayers[i].addTo(this.map);
}
}
else {
L.tileLayer(defaultMapConfig.tileLayer.uri, defaultMapConfig.tileLayer.options).addTo(this.map);
}
}
updateData() {
}
<|fim▁hole|> <RaisedButton label="Default" />
</div>
);
}
}
RepoUserHeatmap.propTypes = {
mapOptions: PropTypes.object,
mapLayers: PropTypes.array
};
export default RepoUserHeatmap;<|fim▁end|> | render() {
return (
<div className="map-container">
<div className="os-map" ref={(div) => { this.mapDiv = div; }}></div> |
<|file_name|>editprefixers.js<|end_file_name|><|fim▁begin|><|fim▁hole|>describe('Controller: EditprefixersCtrl', function () {
// load the controller's module
beforeEach(module('grafterizerApp'));
var EditprefixersCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
EditprefixersCtrl = $controller('EditprefixersCtrl', {
$scope: scope
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(scope.awesomeThings.length).toBe(3);
});
});<|fim▁end|> | 'use strict';
|
<|file_name|>overviewpage.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2014 The Bitcoin developers
// Copyright (c) 2014-2015 The Gamblr developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "overviewpage.h"
#include "ui_overviewpage.h"
#include "bitcoinunits.h"
#include "clientmodel.h"
#include "darksend.h"
#include "darksendconfig.h"
#include "guiconstants.h"
#include "guiutil.h"
#include "optionsmodel.h"
#include "transactionfilterproxy.h"
#include "transactiontablemodel.h"
#include "walletmodel.h"
#include "init.h"
#include <QAbstractItemDelegate>
#include <QPainter>
#include <QTimer>
#define DECORATION_SIZE 48
#define ICON_OFFSET 16
#define NUM_ITEMS 5
class TxViewDelegate : public QAbstractItemDelegate
{
Q_OBJECT
public:
TxViewDelegate(): QAbstractItemDelegate(), unit(BitcoinUnits::CHIP)
{
}
inline void paint(QPainter *painter, const QStyleOptionViewItem &option,
const QModelIndex &index ) const
{
painter->save();
QIcon icon = qvariant_cast<QIcon>(index.data(Qt::DecorationRole));
QRect mainRect = option.rect;
mainRect.moveLeft(ICON_OFFSET);
QRect decorationRect(mainRect.topLeft(), QSize(DECORATION_SIZE, DECORATION_SIZE));
int xspace = DECORATION_SIZE + 8;
int ypad = 6;
int halfheight = (mainRect.height() - 2*ypad)/2;
QRect amountRect(mainRect.left() + xspace, mainRect.top()+ypad, mainRect.width() - xspace - ICON_OFFSET, halfheight);
QRect addressRect(mainRect.left() + xspace, mainRect.top()+ypad+halfheight, mainRect.width() - xspace, halfheight);
icon.paint(painter, decorationRect);
QDateTime date = index.data(TransactionTableModel::DateRole).toDateTime();
QString address = index.data(Qt::DisplayRole).toString();
qint64 amount = index.data(TransactionTableModel::AmountRole).toLongLong();
bool confirmed = index.data(TransactionTableModel::ConfirmedRole).toBool();
QVariant value = index.data(Qt::ForegroundRole);
QColor foreground = option.palette.color(QPalette::Text);
if(value.canConvert<QBrush>())
{
QBrush brush = qvariant_cast<QBrush>(value);
foreground = brush.color();
}
painter->setPen(foreground);
painter->drawText(addressRect, Qt::AlignLeft|Qt::AlignVCenter, address);
if(amount < 0)
{
foreground = COLOR_NEGATIVE;
}
else if(!confirmed)
{
foreground = COLOR_UNCONFIRMED;
}
else
{
foreground = option.palette.color(QPalette::Text);
}
painter->setPen(foreground);
QString amountText = BitcoinUnits::formatWithUnit(unit, amount, true);
if(!confirmed)
{
amountText = QString("[") + amountText + QString("]");
}
painter->drawText(amountRect, Qt::AlignRight|Qt::AlignVCenter, amountText);
painter->setPen(option.palette.color(QPalette::Text));
painter->drawText(amountRect, Qt::AlignLeft|Qt::AlignVCenter, GUIUtil::dateTimeStr(date));
painter->restore();
}
inline QSize sizeHint(const QStyleOptionViewItem &option, const QModelIndex &index) const
{
return QSize(DECORATION_SIZE, DECORATION_SIZE);
}
int unit;
};
#include "overviewpage.moc"
OverviewPage::OverviewPage(QWidget *parent) :
QWidget(parent),
ui(new Ui::OverviewPage),
clientModel(0),
walletModel(0),
currentBalance(-1),
currentUnconfirmedBalance(-1),
currentImmatureBalance(-1),
txdelegate(new TxViewDelegate()),
filter(0)
{
ui->setupUi(this);
// Recent transactions
ui->listTransactions->setItemDelegate(txdelegate);
ui->listTransactions->setIconSize(QSize(DECORATION_SIZE, DECORATION_SIZE));
ui->listTransactions->setMinimumHeight(NUM_ITEMS * (DECORATION_SIZE + 2));
ui->listTransactions->setAttribute(Qt::WA_MacShowFocusRect, false);
connect(ui->listTransactions, SIGNAL(clicked(QModelIndex)), this, SLOT(handleTransactionClicked(QModelIndex)));
// init "out of sync" warning labels
ui->labelWalletStatus->setText("(" + tr("out of sync") + ")");
ui->labelDarksendSyncStatus->setText("(" + tr("out of sync") + ")");
ui->labelTransactionsStatus->setText("(" + tr("out of sync") + ")");
showingDarkSendMessage = 0;
darksendActionCheck = 0;
lastNewBlock = 0;
if(fLiteMode){
ui->frameDarksend->setVisible(false);
} else if(!fMasterNode) {
timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), this, SLOT(darkSendStatus()));
timer->start(333);
}
if(fMasterNode){
ui->toggleDarksend->setText("(" + tr("Disabled") + ")");
ui->darksendAuto->setText("(" + tr("Disabled") + ")");
ui->darksendReset->setText("(" + tr("Disabled") + ")");
ui->frameDarksend->setEnabled(false);
}else if(!fEnableDarksend){
ui->toggleDarksend->setText(tr("Start Darksend Mixing"));
} else {
ui->toggleDarksend->setText(tr("Stop Darksend Mixing"));
}
// start with displaying the "out of sync" warnings
showOutOfSyncWarning(true);
}
void OverviewPage::handleTransactionClicked(const QModelIndex &index)
{
if(filter)
emit transactionClicked(filter->mapToSource(index));
}
OverviewPage::~OverviewPage()
{
if(!fLiteMode && !fMasterNode) disconnect(timer, SIGNAL(timeout()), this, SLOT(darkSendStatus()));
delete ui;
}
void OverviewPage::setBalance(qint64 balance, qint64 unconfirmedBalance, qint64 immatureBalance, qint64 anonymizedBalance)
{
int unit = walletModel->getOptionsModel()->getDisplayUnit();
currentBalance = balance;
currentUnconfirmedBalance = unconfirmedBalance;
currentImmatureBalance = immatureBalance;
currentAnonymizedBalance = anonymizedBalance;
ui->labelBalance->setText(BitcoinUnits::formatWithUnit(unit, balance));
ui->labelUnconfirmed->setText(BitcoinUnits::formatWithUnit(unit, unconfirmedBalance));
ui->labelImmature->setText(BitcoinUnits::formatWithUnit(unit, immatureBalance));
ui->labelAnonymized->setText(BitcoinUnits::formatWithUnit(unit, anonymizedBalance));
ui->labelTotal->setText(BitcoinUnits::formatWithUnit(unit, balance + unconfirmedBalance + immatureBalance));
// only show immature (newly mined) balance if it's non-zero, so as not to complicate things
// for the non-mining users
bool showImmature = immatureBalance != 0;
ui->labelImmature->setVisible(showImmature);
ui->labelImmatureText->setVisible(showImmature);
if(cachedTxLocks != nCompleteTXLocks){
cachedTxLocks = nCompleteTXLocks;
ui->listTransactions->update();
}
}
void OverviewPage::setClientModel(ClientModel *model)
{
this->clientModel = model;
if(model)
{
// Show warning if this is a prerelease version
connect(model, SIGNAL(alertsChanged(QString)), this, SLOT(updateAlerts(QString)));
updateAlerts(model->getStatusBarWarnings());
}
}
void OverviewPage::setWalletModel(WalletModel *model)
{
this->walletModel = model;
if(model && model->getOptionsModel())
{
// Set up transaction list
filter = new TransactionFilterProxy();
filter->setSourceModel(model->getTransactionTableModel());
filter->setLimit(NUM_ITEMS);
filter->setDynamicSortFilter(true);
filter->setSortRole(Qt::EditRole);
filter->setShowInactive(false);
filter->sort(TransactionTableModel::Status, Qt::DescendingOrder);
ui->listTransactions->setModel(filter);
ui->listTransactions->setModelColumn(TransactionTableModel::ToAddress);
// Keep up to date with wallet
setBalance(model->getBalance(), model->getUnconfirmedBalance(), model->getImmatureBalance(), model->getAnonymizedBalance());
connect(model, SIGNAL(balanceChanged(qint64, qint64, qint64, qint64)), this, SLOT(setBalance(qint64, qint64, qint64, qint64)));
connect(model->getOptionsModel(), SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit()));
connect(ui->darksendAuto, SIGNAL(clicked()), this, SLOT(darksendAuto()));
connect(ui->darksendReset, SIGNAL(clicked()), this, SLOT(darksendReset()));
connect(ui->toggleDarksend, SIGNAL(clicked()), this, SLOT(toggleDarksend()));
}
// update the display unit, to not use the default ("CHIP")
updateDisplayUnit();
}
void OverviewPage::updateDisplayUnit()
{
if(walletModel && walletModel->getOptionsModel())
{
if(currentBalance != -1)
setBalance(currentBalance, currentUnconfirmedBalance, currentImmatureBalance, currentAnonymizedBalance);
// Update txdelegate->unit with the current unit
txdelegate->unit = walletModel->getOptionsModel()->getDisplayUnit();<|fim▁hole|>
ui->listTransactions->update();
}
}
void OverviewPage::updateAlerts(const QString &warnings)
{
this->ui->labelAlerts->setVisible(!warnings.isEmpty());
this->ui->labelAlerts->setText(warnings);
}
void OverviewPage::showOutOfSyncWarning(bool fShow)
{
ui->labelWalletStatus->setVisible(fShow);
ui->labelDarksendSyncStatus->setVisible(fShow);
ui->labelTransactionsStatus->setVisible(fShow);
}
void OverviewPage::updateDarksendProgress()
{
if(IsInitialBlockDownload()) return;
int64_t nBalance = pwalletMain->GetBalance();
if(nBalance == 0)
{
ui->darksendProgress->setValue(0);
QString s(tr("No inputs detected"));
ui->darksendProgress->setToolTip(s);
return;
}
//get denominated unconfirmed inputs
if(pwalletMain->GetDenominatedBalance(true, true) > 0)
{
QString s(tr("Found unconfirmed denominated outputs, will wait till they confirm to recalculate."));
ui->darksendProgress->setToolTip(s);
return;
}
//Get the anon threshold
int64_t nMaxToAnonymize = nAnonymizeGamblrAmount*COIN;
// If it's more than the wallet amount, limit to that.
if(nMaxToAnonymize > nBalance) nMaxToAnonymize = nBalance;
if(nMaxToAnonymize == 0) return;
// calculate parts of the progress, each of them shouldn't be higher than 1:
// mixing progress of denominated balance
int64_t denominatedBalance = pwalletMain->GetDenominatedBalance();
float denomPart = 0;
if(denominatedBalance > 0)
{
denomPart = (float)pwalletMain->GetNormalizedAnonymizedBalance() / denominatedBalance;
denomPart = denomPart > 1 ? 1 : denomPart;
if(denomPart == 1 && nMaxToAnonymize > denominatedBalance) nMaxToAnonymize = denominatedBalance;
}
// % of fully anonymized balance
float anonPart = 0;
if(nMaxToAnonymize > 0)
{
anonPart = (float)pwalletMain->GetAnonymizedBalance() / nMaxToAnonymize;
// if anonPart is > 1 then we are done, make denomPart equal 1 too
anonPart = anonPart > 1 ? (denomPart = 1, 1) : anonPart;
}
// apply some weights to them (sum should be <=100) and calculate the whole progress
int progress = 80 * denomPart + 20 * anonPart;
if(progress >= 100) progress = 100;
ui->darksendProgress->setValue(progress);
std::ostringstream convert;
convert << "Progress: " << progress << "%, inputs have an average of " << pwalletMain->GetAverageAnonymizedRounds() << " of " << nDarksendRounds << " rounds";
QString s(convert.str().c_str());
ui->darksendProgress->setToolTip(s);
}
void OverviewPage::darkSendStatus()
{
int nBestHeight = chainActive.Tip()->nHeight;
if(nBestHeight != darkSendPool.cachedNumBlocks)
{
//we we're processing lots of blocks, we'll just leave
if(GetTime() - lastNewBlock < 10) return;
lastNewBlock = GetTime();
updateDarksendProgress();
QString strSettings(" " + tr("Rounds"));
strSettings.prepend(QString::number(nDarksendRounds)).prepend(" / ");
strSettings.prepend(BitcoinUnits::formatWithUnit(
walletModel->getOptionsModel()->getDisplayUnit(),
nAnonymizeGamblrAmount * COIN)
);
ui->labelAmountRounds->setText(strSettings);
}
if(!fEnableDarksend) {
if(nBestHeight != darkSendPool.cachedNumBlocks)
{
darkSendPool.cachedNumBlocks = nBestHeight;
ui->darksendEnabled->setText(tr("Disabled"));
ui->darksendStatus->setText("");
ui->toggleDarksend->setText(tr("Start Darksend Mixing"));
}
return;
}
// check darksend status and unlock if needed
if(nBestHeight != darkSendPool.cachedNumBlocks)
{
// Balance and number of transactions might have changed
darkSendPool.cachedNumBlocks = nBestHeight;
/* *******************************************************/
ui->darksendEnabled->setText(tr("Enabled"));
}
int state = darkSendPool.GetState();
int entries = darkSendPool.GetEntriesCount();
int accepted = darkSendPool.GetLastEntryAccepted();
/* ** @TODO this string creation really needs some clean ups ---vertoe ** */
std::ostringstream convert;
if(state == POOL_STATUS_IDLE) {
convert << tr("Darksend is idle.").toStdString();
} else if(state == POOL_STATUS_ACCEPTING_ENTRIES) {
if(entries == 0) {
if(darkSendPool.strAutoDenomResult.size() == 0){
convert << tr("Mixing in progress...").toStdString();
} else {
convert << darkSendPool.strAutoDenomResult;
}
showingDarkSendMessage = 0;
} else if (accepted == 1) {
convert << tr("Darksend request complete: Your transaction was accepted into the pool!").toStdString();
if(showingDarkSendMessage % 10 > 8) {
darkSendPool.lastEntryAccepted = 0;
showingDarkSendMessage = 0;
}
} else {
if(showingDarkSendMessage % 70 <= 40) convert << tr("Submitted following entries to masternode:").toStdString() << " " << entries << "/" << darkSendPool.GetMaxPoolTransactions();
else if(showingDarkSendMessage % 70 <= 50) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) .";
else if(showingDarkSendMessage % 70 <= 60) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) ..";
else if(showingDarkSendMessage % 70 <= 70) convert << tr("Submitted to masternode, waiting for more entries").toStdString() << " (" << entries << "/" << darkSendPool.GetMaxPoolTransactions() << " ) ...";
}
} else if(state == POOL_STATUS_SIGNING) {
if(showingDarkSendMessage % 70 <= 10) convert << tr("Found enough users, signing ...").toStdString();
else if(showingDarkSendMessage % 70 <= 20) convert << tr("Found enough users, signing ( waiting").toStdString() << ". )";
else if(showingDarkSendMessage % 70 <= 30) convert << tr("Found enough users, signing ( waiting").toStdString() << ".. )";
else if(showingDarkSendMessage % 70 <= 40) convert << tr("Found enough users, signing ( waiting").toStdString() << "... )";
} else if(state == POOL_STATUS_TRANSMISSION) {
convert << tr("Transmitting final transaction.").toStdString();
} else if (state == POOL_STATUS_IDLE) {
convert << tr("Darksend is idle.").toStdString();
} else if (state == POOL_STATUS_FINALIZE_TRANSACTION) {
convert << tr("Finalizing transaction.").toStdString();
} else if(state == POOL_STATUS_ERROR) {
convert << tr("Darksend request incomplete:").toStdString() << " " << darkSendPool.lastMessage << ". " << tr("Will retry...").toStdString();
} else if(state == POOL_STATUS_SUCCESS) {
convert << tr("Darksend request complete:").toStdString() << " " << darkSendPool.lastMessage;
} else if(state == POOL_STATUS_QUEUE) {
if(showingDarkSendMessage % 70 <= 50) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " .";
else if(showingDarkSendMessage % 70 <= 60) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " ..";
else if(showingDarkSendMessage % 70 <= 70) convert << tr("Submitted to masternode, waiting in queue").toStdString() << " ...";
} else {
convert << tr("Unknown state:").toStdString() << " id = " << state;
}
if(state == POOL_STATUS_ERROR || state == POOL_STATUS_SUCCESS) darkSendPool.Check();
QString s(convert.str().c_str());
s = tr("Last Darksend message:\n") + s;
if(s != ui->darksendStatus->text())
LogPrintf("Last Darksend message: %s\n", convert.str().c_str());
ui->darksendStatus->setText(s);
if(darkSendPool.sessionDenom == 0){
ui->labelSubmittedDenom->setText(tr("N/A"));
} else {
std::string out;
darkSendPool.GetDenominationsToString(darkSendPool.sessionDenom, out);
QString s2(out.c_str());
ui->labelSubmittedDenom->setText(s2);
}
showingDarkSendMessage++;
darksendActionCheck++;
// Get DarkSend Denomination Status
}
void OverviewPage::darksendAuto(){
darkSendPool.DoAutomaticDenominating();
}
void OverviewPage::darksendReset(){
darkSendPool.Reset();
QMessageBox::warning(this, tr("Darksend"),
tr("Darksend was successfully reset."),
QMessageBox::Ok, QMessageBox::Ok);
}
void OverviewPage::toggleDarksend(){
if(!fEnableDarksend){
int64_t balance = pwalletMain->GetBalance();
float minAmount = 1.49 * COIN;
if(balance < minAmount){
QString strMinAmount(
BitcoinUnits::formatWithUnit(
walletModel->getOptionsModel()->getDisplayUnit(),
minAmount));
QMessageBox::warning(this, tr("Darksend"),
tr("Darksend requires at least %1 to use.").arg(strMinAmount),
QMessageBox::Ok, QMessageBox::Ok);
return;
}
// if wallet is locked, ask for a passphrase
if (walletModel->getEncryptionStatus() == WalletModel::Locked)
{
WalletModel::UnlockContext ctx(walletModel->requestUnlock(false));
if(!ctx.isValid())
{
//unlock was cancelled
darkSendPool.cachedNumBlocks = 0;
QMessageBox::warning(this, tr("Darksend"),
tr("Wallet is locked and user declined to unlock. Disabling Darksend."),
QMessageBox::Ok, QMessageBox::Ok);
if (fDebug) LogPrintf("Wallet is locked and user declined to unlock. Disabling Darksend.\n");
return;
}
}
}
darkSendPool.cachedNumBlocks = 0;
fEnableDarksend = !fEnableDarksend;
if(!fEnableDarksend){
ui->toggleDarksend->setText(tr("Start Darksend Mixing"));
} else {
ui->toggleDarksend->setText(tr("Stop Darksend Mixing"));
/* show darksend configuration if client has defaults set */
if(nAnonymizeGamblrAmount == 0){
DarksendConfig dlg(this);
dlg.setModel(walletModel);
dlg.exec();
}
darkSendPool.DoAutomaticDenominating();
}
}<|fim▁end|> | |
<|file_name|>NodeBackedModelSet.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.model.internal.core;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import groovy.lang.Closure;
import org.gradle.api.Action;
import org.gradle.api.internal.ClosureBackedAction;
import org.gradle.api.specs.Specs;
import org.gradle.model.ModelSet;
import org.gradle.model.internal.core.rule.describe.ModelRuleDescriptor;
import org.gradle.model.internal.manage.instance.ManagedInstance;
import org.gradle.model.internal.type.ModelType;
import java.util.Collection;
import java.util.Iterator;
import static org.gradle.model.internal.core.NodePredicate.allLinks;
public class NodeBackedModelSet<T> implements ModelSet<T>, ManagedInstance {
private final String toString;
private final ModelType<T> elementType;
private final ModelRuleDescriptor descriptor;
private final MutableModelNode modelNode;
private final ModelViewState state;
private final ChildNodeInitializerStrategy<T> creatorStrategy;
private final ModelReference<T> elementTypeReference;
private Collection<T> elements;
public NodeBackedModelSet(String toString, ModelType<T> elementType, ModelRuleDescriptor descriptor, MutableModelNode modelNode, ModelViewState state, ChildNodeInitializerStrategy<T> creatorStrategy) {
this.toString = toString;
this.elementType = elementType;
this.elementTypeReference = ModelReference.of(elementType);
this.descriptor = descriptor;
this.modelNode = modelNode;
this.state = state;
this.creatorStrategy = creatorStrategy;
}
@Override
public MutableModelNode getBackingNode() {
return modelNode;
}
@Override
public ModelType<?> getManagedType() {
return ModelType.of(this.getClass());
}
@Override
public String toString() {
return toString;
}
@Override
public void create(final Action<? super T> action) {
state.assertCanMutate();
String name = String.valueOf(modelNode.getLinkCount(ModelNodes.withType(elementType)));
ModelPath childPath = modelNode.getPath().child(name);
final ModelRuleDescriptor descriptor = this.descriptor.append("create()");
NodeInitializer nodeInitializer = creatorStrategy.initializer(elementType, Specs.<ModelType<?>>satisfyAll());
ModelRegistration registration = ModelRegistrations.of(childPath, nodeInitializer)
.descriptor(descriptor)
.action(ModelActionRole.Initialize, NoInputsModelAction.of(ModelReference.of(childPath, elementType), descriptor, action))
.build();
modelNode.addLink(registration);
}
@Override
public void afterEach(Action<? super T> configAction) {
state.assertCanMutate();
modelNode.applyTo(allLinks(), ModelActionRole.Finalize, NoInputsModelAction.of(elementTypeReference, descriptor.append("afterEach()"), configAction));
}
@Override
public void beforeEach(Action<? super T> configAction) {
state.assertCanMutate();
modelNode.applyTo(allLinks(), ModelActionRole.Defaults, NoInputsModelAction.of(elementTypeReference, descriptor.append("afterEach()"), configAction));
}
@Override
public int size() {
state.assertCanReadChildren();
return modelNode.getLinkCount(ModelNodes.withType(elementType));
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
return getElements().contains(o);
}
@Override
public Iterator<T> iterator() {
return getElements().iterator();
}
@Override
public Object[] toArray() {
return getElements().toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return getElements().toArray(a);
}
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
return getElements().containsAll(c);
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
<|fim▁hole|> }
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
// TODO - mix this in using decoration. Also validate closure parameter types, if declared
public void create(Closure<?> closure) {
create(ClosureBackedAction.of(closure));
}
public void afterEach(Closure<?> closure) {
afterEach(ClosureBackedAction.of(closure));
}
public void beforeEach(Closure<?> closure) {
beforeEach(ClosureBackedAction.of(closure));
}
private Collection<T> getElements() {
state.assertCanReadChildren();
if (elements == null) {
elements = Lists.newArrayList(
Iterables.transform(modelNode.getLinks(ModelNodes.withType(elementType)), new Function<MutableModelNode, T>() {
@Override
public T apply(MutableModelNode input) {
return input.asImmutable(elementType, descriptor).getInstance();
}
})
);
}
return elements;
}
}<|fim▁end|> | @Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException(); |
<|file_name|>Instructions.js<|end_file_name|><|fim▁begin|>var NN = NN || {};
NN.InstructionsState = NN.InstructionsState || {};
NN.InstructionsState.init = function(levelnum) {
this.game.stage.backgroundColor = '#00f';
this.levelnum = levelnum;
this.GAMEX = this.game.world.width;
this.GAMEY = this.game.world.height;
};
NN.InstructionsState.preload = function() {
};
NN.InstructionsState.create = function() {
var background = this.game.add.sprite(0,0,'instructions');
background.inputEnabled = true;
background.x = 0;
background.y = 0;
background.height = this.GAMEY;
background.width = this.GAMEX;
// background.scaleX = (0.5);
// background.scaleY = (0.2);
// background.scaleX = (this.GAMEX / background.width);
// background.scaleY = (this.GAMEY / background.height);
var style = {font: 'bold 24pt Arial', fill: '#0f0'};
var words1 = this.game.add.text(this.GAMEX/2, this.GAMEY / 3, 'Text', style);
var words2 = this.game.add.text(this.GAMEX/2, this.GAMEY / 2, 'Text', style);
var words3 = this.game.add.text(this.GAMEX/2, this.GAMEY * 2 / 3, 'Text', style);
words1.anchor.setTo(0.5);
words2.anchor.setTo(0.5);
words3.anchor.setTo(0.5);
if ( this.levelnum == 1 ) {
words1.text = 'Swipe to move';
words2.text = 'and tap to nab';
words3.text = 'all the answers';
}
<|fim▁hole|> words3.text = 'current level';
}
if ( this.levelnum == 3 ) {
words1.text = 'Enjoy your last';
words2.text = 'level without';
words3.text = 'enemies!';
}
background.events.onInputDown.add(function() {
this.startGameState();
}, this);
};
NN.InstructionsState.startGameState = function(button) {
this.state.start('GameState', true, false, this.levelnum, true);
};<|fim▁end|> | if ( this.levelnum == 2 ) {
words1.text = 'The answers are';
words2.text = 'multiples of the'; |
<|file_name|>zlib.cpp<|end_file_name|><|fim▁begin|>/*
* The Mana Server
* Copyright (C) 2006-2010 The Mana World Development Team
*
* This file is part of The Mana Server.
*
* The Mana Server is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* any later version.
*
* The Mana Server is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with The Mana Server. If not, see <http://www.gnu.org/licenses/>.
*/
#include <cstdlib>
#include <zlib.h>
#include "utils/zlib.h"
#include "utils/logger.h"
static void logZlibError(int error)
{
switch (error)
{
case Z_MEM_ERROR:
LOG_ERROR("Out of memory while decompressing data!");
break;
case Z_VERSION_ERROR:
LOG_ERROR("Incompatible zlib version!");
break;
case Z_DATA_ERROR:
LOG_ERROR("Incorrect zlib compressed data!");
break;
default:
LOG_ERROR("Unknown error while decompressing data!");
}
}
bool inflateMemory(char *in, unsigned inLength,
char *&out, unsigned &outLength)
{
int bufferSize = 256 * 1024;
int ret;
z_stream strm;
out = (char *)malloc(bufferSize);
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.next_in = (Bytef *)in;
strm.avail_in = inLength;
strm.next_out = (Bytef *)out;
strm.avail_out = bufferSize;
ret = inflateInit2(&strm, 15 + 32);
if (ret != Z_OK)
{
logZlibError(ret);
free(out);
return false;
}
do
{
ret = inflate(&strm, Z_SYNC_FLUSH);<|fim▁hole|> case Z_NEED_DICT:
case Z_STREAM_ERROR:
ret = Z_DATA_ERROR;
case Z_DATA_ERROR:
case Z_MEM_ERROR:
inflateEnd(&strm);
logZlibError(ret);
free(out);
return false;
}
if (ret != Z_STREAM_END)
{
out = (char *)realloc(out, bufferSize * 2);
if (!out)
{
inflateEnd(&strm);
logZlibError(Z_MEM_ERROR);
free(out);
return false;
}
strm.next_out = (Bytef *)(out + bufferSize);
strm.avail_out = bufferSize;
bufferSize *= 2;
}
}
while (ret != Z_STREAM_END);
if (strm.avail_in != 0)
{
logZlibError(Z_DATA_ERROR);
free(out);
return false;
}
outLength = bufferSize - strm.avail_out;
inflateEnd(&strm);
return true;
}<|fim▁end|> |
switch (ret) { |
<|file_name|>ProjOpen.js<|end_file_name|><|fim▁begin|>/**
* File: app/project/ProjOpen.js
* Author: liusha
*/
Ext.define('xdfn.project.ProjOpen', {
extend: 'xdfn.project.ui.ProjOpen',
grid: null,
initComponent: function() {
var me = this;
me.openStore = Ext.create('xdfn.project.store.ProjOpenJsonStore');
me.rowEditing = Ext.create('Ext.grid.plugin.RowEditing', {
errorSummary: false
});
me.callParent(arguments);
me.down('button[text="增加记录"]').on('click', me.OnAddProjOpenBtnClick, me);
me.down('button[text="删除记录"]').on('click', me.OnDeleteProjOpenBtnClick, me);
me.down('button[text="导出"]').on('click', me.OnExportProjOpenBtnClick, me);
me.rowEditing.on('edit', me.OnGridEdit, me);
me.rowEditing.on('beforeedit', me.OnGridBeforeEdit, me);
},
OnGridBeforeEdit: function(editor, e, epts) {
xdfn.user.Rights.noRights('XMGL-XMZL-31', function() {
editor.cancelEdit();
});
},
OnGridEdit: function(editor, e) {
var me = this;
if (!e.record.dirty) return;
var url = './proExec.do?method=modifyKbjl';
if (Ext.isEmpty(e.record.get('ID_VIEW'))) {
var rows = me.grid.getSelectionModel().getSelection();
e.record.set('ID_VIEW', rows[0].get('ID_VIEW'));
url = './proExec.do?method=addProKbjl';
}
e.record.commit();
Ext.Ajax.request({
url: url,
method: 'post',
params: {
ID: e.record.get('ID_VIEW'),
V_MANU: e.record.get('V_MANU_VIEW'),
V_MACHINE: e.record.get('V_MACHINE_VIEW'),
N_CAP: e.record.get('N_CAP_VIEW'),
N_SUM_NUM: e.record.get('N_SUM_NUM_VIEW'),
N_SUM_MONEY: e.record.get('N_SUM_MONEY_VIEW'),
V_MEMO: e.record.get('V_MEMO_VIEW')
},
success: function(response, opts) {
var result = Ext.JSON.decode(response.responseText); //服务端返回新建ID
e.record.set(result.data);
e.record.commit();
},
failure: function(response, opts) {
Ext.Msg.alert('提示','提交失败!');
}
});
},
OnAddProjOpenBtnClick: function(self, e, options) {
var me = this,
sm = me.grid.getSelectionModel(),
rows = sm.getSelection();
xdfn.user.Rights.hasRights('XMGL-XMZL-30', function() {
if (rows.length > 0) {
me.rowEditing.cancelEdit();
me.openStore.insert(0, {});
me.rowEditing.startEdit(0, 0);
} else {
Ext.Msg.alert('提示','请先选择相应的项目!');
}
});
},
OnDeleteProjOpenBtnClick: function(self, e, options) {
var me = this,
grid = self.up('gridpanel'),
store = grid.getStore(),
sm = grid.getSelectionModel(),
rows = sm.getSelection();
xdfn.user.Rights.hasRights('XMGL-XMZL-32', function() {
if (rows.length > 0) {
if (Ext.isEmpty(rows[0].get('ID_VIEW'))) {
me.rowEditing.cancelEdit();
var i = store.indexOf(rows[0]);
store.remove(rows);
var count = store.getCount();
if (count > 0) {
sm.select((i == count)? --i : i);
}
return;
}
Ext.MessageBox.confirm('提示', '确定删除该记录吗?', function(id) {
if (id == 'yes') {
//TODO 删除记录
Ext.Ajax.request({
url: './proExec.do?method=deleteKbjl', //改为实际的删除请求url
method: 'get',
params: {
ID: rows[0].get('ID_VIEW')
},
success: function(response, opts) {
me.rowEditing.cancelEdit();
var i = store.indexOf(rows[0]);
store.remove(rows);
var count = store.getCount();
if (count > 0) {
sm.select((i == count)? --i : i);
}
},
failure: function(response, opts) {
Ext.Msg.alert('提示','删除失败!');
}
});
}
});
} else {
Ext.Msg.alert('提示','请选择要删除的记录!');
}
});
},
OnExportProjOpenBtnClick: function(self, e, options) {
var me = this;
//导出为excel文件
xdfn.user.Rights.hasRights('XMGL-XMZL-33', function() {
me.openStore.load({
limit: me.openStore.getTotalCount(),
scope: this,
callback: function(records, operation, success) {
var excelXml = Ext.ux.exporter.Exporter.exportGrid(self.up('gridpanel'), 'excel', {title: '项目开标记录'});
document.location = 'data:application/vnd.ms-excel;base64,' + Ext.ux.exporter.Base64.encode(excelXml);
}
});
});
}<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>config_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.<|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"encoding/json"
"io/ioutil"
"net/url"
"reflect"
"strings"
"testing"
"time"
"github.com/prometheus/common/model"
"gopkg.in/yaml.v2"
)
var expectedConf = &Config{
GlobalConfig: GlobalConfig{
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
EvaluationInterval: model.Duration(30 * time.Second),
ExternalLabels: model.LabelSet{
"monitor": "codelab",
"foo": "bar",
},
},
RuleFiles: []string{
"testdata/first.rules",
"/absolute/second.rules",
"testdata/my/*.rules",
},
ScrapeConfigs: []*ScrapeConfig{
{
JobName: "prometheus",
HonorLabels: true,
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
BearerTokenFile: "testdata/valid_token_file",
StaticConfigs: []*TargetGroup{
{
Targets: []model.LabelSet{
{model.AddressLabel: "localhost:9090"},
{model.AddressLabel: "localhost:9191"},
},
Labels: model.LabelSet{
"my": "label",
"your": "label",
},
},
},
FileSDConfigs: []*FileSDConfig{
{
Files: []string{"foo/*.slow.json", "foo/*.slow.yml", "single/file.yml"},
RefreshInterval: model.Duration(10 * time.Minute),
},
{
Files: []string{"bar/*.yaml"},
RefreshInterval: model.Duration(5 * time.Minute),
},
},
RelabelConfigs: []*RelabelConfig{
{
SourceLabels: model.LabelNames{"job", "__meta_dns_name"},
TargetLabel: "job",
Separator: ";",
Regex: MustNewRegexp("(.*)some-[regex]"),
Replacement: "foo-${1}",
Action: RelabelReplace,
}, {
SourceLabels: model.LabelNames{"abc"},
TargetLabel: "cde",
Separator: ";",
Regex: DefaultRelabelConfig.Regex,
Replacement: DefaultRelabelConfig.Replacement,
Action: RelabelReplace,
}, {
TargetLabel: "abc",
Separator: ";",
Regex: DefaultRelabelConfig.Regex,
Replacement: "static",
Action: RelabelReplace,
},
},
},
{
JobName: "service-x",
ScrapeInterval: model.Duration(50 * time.Second),
ScrapeTimeout: model.Duration(5 * time.Second),
BasicAuth: &BasicAuth{
Username: "admin_name",
Password: "admin_password",
},
MetricsPath: "/my_path",
Scheme: "https",
DNSSDConfigs: []*DNSSDConfig{
{
Names: []string{
"first.dns.address.domain.com",
"second.dns.address.domain.com",
},
RefreshInterval: model.Duration(15 * time.Second),
Type: "SRV",
},
{
Names: []string{
"first.dns.address.domain.com",
},
RefreshInterval: model.Duration(30 * time.Second),
Type: "SRV",
},
},
RelabelConfigs: []*RelabelConfig{
{
SourceLabels: model.LabelNames{"job"},
Regex: MustNewRegexp("(.*)some-[regex]"),
Separator: ";",
Replacement: DefaultRelabelConfig.Replacement,
Action: RelabelDrop,
},
{
SourceLabels: model.LabelNames{"__address__"},
TargetLabel: "__tmp_hash",
Regex: DefaultRelabelConfig.Regex,
Replacement: DefaultRelabelConfig.Replacement,
Modulus: 8,
Separator: ";",
Action: RelabelHashMod,
},
{
SourceLabels: model.LabelNames{"__tmp_hash"},
Regex: MustNewRegexp("1"),
Separator: ";",
Replacement: DefaultRelabelConfig.Replacement,
Action: RelabelKeep,
},
{
Regex: MustNewRegexp("1"),
Separator: ";",
Replacement: DefaultRelabelConfig.Replacement,
Action: RelabelLabelMap,
},
},
MetricRelabelConfigs: []*RelabelConfig{
{
SourceLabels: model.LabelNames{"__name__"},
Regex: MustNewRegexp("expensive_metric.*"),
Separator: ";",
Replacement: DefaultRelabelConfig.Replacement,
Action: RelabelDrop,
},
},
},
{
JobName: "service-y",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
ConsulSDConfigs: []*ConsulSDConfig{
{
Server: "localhost:1234",
Services: []string{"nginx", "cache", "mysql"},
TagSeparator: DefaultConsulSDConfig.TagSeparator,
Scheme: DefaultConsulSDConfig.Scheme,
},
},
},
{
JobName: "service-z",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
MetricsPath: "/metrics",
Scheme: "http",
TLSConfig: TLSConfig{
CertFile: "testdata/valid_cert_file",
KeyFile: "testdata/valid_key_file",
},
BearerToken: "avalidtoken",
},
{
JobName: "service-kubernetes",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
KubernetesSDConfigs: []*KubernetesSDConfig{
{
APIServers: []URL{kubernetesSDHostURL()},
Role: KubernetesRoleEndpoint,
BasicAuth: &BasicAuth{
Username: "myusername",
Password: "mypassword",
},
RequestTimeout: model.Duration(10 * time.Second),
RetryInterval: model.Duration(1 * time.Second),
},
},
},
{
JobName: "service-marathon",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
MarathonSDConfigs: []*MarathonSDConfig{
{
Servers: []string{
"http://marathon.example.com:8080",
},
RefreshInterval: model.Duration(30 * time.Second),
},
},
},
{
JobName: "service-ec2",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
EC2SDConfigs: []*EC2SDConfig{
{
Region: "us-east-1",
AccessKey: "access",
SecretKey: "secret",
RefreshInterval: model.Duration(60 * time.Second),
Port: 80,
},
},
},
{
JobName: "service-azure",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
AzureSDConfigs: []*AzureSDConfig{
{
SubscriptionID: "11AAAA11-A11A-111A-A111-1111A1111A11",
TenantID: "BBBB222B-B2B2-2B22-B222-2BB2222BB2B2",
ClientID: "333333CC-3C33-3333-CCC3-33C3CCCCC33C",
ClientSecret: "nAdvAK2oBuVym4IXix",
RefreshInterval: model.Duration(5 * time.Minute),
Port: 9100,
},
},
},
{
JobName: "service-nerve",
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
NerveSDConfigs: []*NerveSDConfig{
{
Servers: []string{"localhost"},
Paths: []string{"/monitoring"},
Timeout: model.Duration(10 * time.Second),
},
},
},
},
original: "",
}
func TestLoadConfig(t *testing.T) {
// Parse a valid file that sets a global scrape timeout. This tests whether parsing
// an overwritten default field in the global config permanently changes the default.
if _, err := LoadFile("testdata/global_timeout.good.yml"); err != nil {
t.Errorf("Error parsing %s: %s", "testdata/conf.good.yml", err)
}
c, err := LoadFile("testdata/conf.good.yml")
if err != nil {
t.Fatalf("Error parsing %s: %s", "testdata/conf.good.yml", err)
}
bgot, err := yaml.Marshal(c)
if err != nil {
t.Fatalf("%s", err)
}
bexp, err := yaml.Marshal(expectedConf)
if err != nil {
t.Fatalf("%s", err)
}
expectedConf.original = c.original
if !reflect.DeepEqual(c, expectedConf) {
t.Fatalf("%s: unexpected config result: \n\n%s\n expected\n\n%s", "testdata/conf.good.yml", bgot, bexp)
}
// String method must not reveal authentication credentials.
s := c.String()
if strings.Contains(s, "admin_password") {
t.Fatalf("config's String method reveals authentication credentials.")
}
}
var expectedErrors = []struct {
filename string
errMsg string
}{
{
filename: "jobname.bad.yml",
errMsg: `"prom^etheus" is not a valid job name`,
}, {
filename: "jobname_dup.bad.yml",
errMsg: `found multiple scrape configs with job name "prometheus"`,
}, {
filename: "scrape_interval.bad.yml",
errMsg: `scrape timeout greater than scrape interval`,
}, {
filename: "labelname.bad.yml",
errMsg: `"not$allowed" is not a valid label name`,
}, {
filename: "labelname2.bad.yml",
errMsg: `"not:allowed" is not a valid label name`,
}, {
filename: "regex.bad.yml",
errMsg: "error parsing regexp",
}, {
filename: "modulus_missing.bad.yml",
errMsg: "relabel configuration for hashmod requires non-zero modulus",
}, {
filename: "rules.bad.yml",
errMsg: "invalid rule file path",
}, {
filename: "unknown_attr.bad.yml",
errMsg: "unknown fields in scrape_config: consult_sd_configs",
}, {
filename: "bearertoken.bad.yml",
errMsg: "at most one of bearer_token & bearer_token_file must be configured",
}, {
filename: "bearertoken_basicauth.bad.yml",
errMsg: "at most one of basic_auth, bearer_token & bearer_token_file must be configured",
}, {
filename: "kubernetes_bearertoken.bad.yml",
errMsg: "at most one of bearer_token & bearer_token_file must be configured",
}, {
filename: "kubernetes_role.bad.yml",
errMsg: "role",
}, {
filename: "kubernetes_bearertoken_basicauth.bad.yml",
errMsg: "at most one of basic_auth, bearer_token & bearer_token_file must be configured",
}, {
filename: "marathon_no_servers.bad.yml",
errMsg: "Marathon SD config must contain at least one Marathon server",
}, {
filename: "url_in_targetgroup.bad.yml",
errMsg: "\"http://bad\" is not a valid hostname",
},
}
func TestBadConfigs(t *testing.T) {
for _, ee := range expectedErrors {
_, err := LoadFile("testdata/" + ee.filename)
if err == nil {
t.Errorf("Expected error parsing %s but got none", ee.filename)
continue
}
if !strings.Contains(err.Error(), ee.errMsg) {
t.Errorf("Expected error for %s to contain %q but got: %s", ee.filename, ee.errMsg, err)
}
}
}
func TestBadStaticConfigs(t *testing.T) {
content, err := ioutil.ReadFile("testdata/static_config.bad.json")
if err != nil {
t.Fatal(err)
}
var tg TargetGroup
err = json.Unmarshal(content, &tg)
if err == nil {
t.Errorf("Expected unmarshal error but got none.")
}
}
func TestEmptyConfig(t *testing.T) {
c, err := Load("")
if err != nil {
t.Fatalf("Unexpected error parsing empty config file: %s", err)
}
exp := DefaultConfig
if !reflect.DeepEqual(*c, exp) {
t.Fatalf("want %v, got %v", exp, c)
}
}
func TestEmptyGlobalBlock(t *testing.T) {
c, err := Load("global:\n")
if err != nil {
t.Fatalf("Unexpected error parsing empty config file: %s", err)
}
exp := DefaultConfig
exp.original = "global:\n"
if !reflect.DeepEqual(*c, exp) {
t.Fatalf("want %v, got %v", exp, c)
}
}
func kubernetesSDHostURL() URL {
tURL, _ := url.Parse("https://localhost:1234")
return URL{URL: tURL}
}<|fim▁end|> | |
<|file_name|>beta_answer_the_students_questions.py<|end_file_name|><|fim▁begin|>from collections import Counter
def answer(q,inf):
s = Counter(q.split(' ')); r = [-1,-1]
for i,j in enumerate(inf):
check = sum(s.get(w,0) for w in j.split(' '))<|fim▁hole|> return None if r == [-1,-1] else inf[r[0]]<|fim▁end|> | if check != 0 and check > r[1]: r = [i,check]
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django.forms import ModelForm
from bug_reporting.models import Feedback
from CoralNet.forms import FormHelper
class FeedbackForm(ModelForm):
class Meta:
model = Feedback
fields = ('type', 'comment') # Other fields are auto-set
#error_css_class = ...
#required_css_class = ...
def clean(self):
"""
1. Strip spaces from character fields.
2. Call the parent's clean() to finish up with the default behavior.
"""
<|fim▁hole|> self.cleaned_data, self.fields)
self.cleaned_data = data
return super(FeedbackForm, self).clean()<|fim▁end|> | data = FormHelper.stripSpacesFromFields( |
<|file_name|>moderator.py<|end_file_name|><|fim▁begin|>#from moderation import moderation
#from .models import SuccessCase
<|fim▁hole|>
#moderation.register(SuccessCase)<|fim▁end|> | |
<|file_name|>test_arp_lib.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 VA Linux Systems Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Fumihiko Kakuma, VA Linux Systems Japan K.K.
import collections
import contextlib
import mock
from neutron.openstack.common import importutils
import neutron.plugins.ofagent.agent.metadata as meta
from neutron.tests.unit.ofagent import ofa_test_base
_OFALIB_NAME = 'neutron.plugins.ofagent.agent.arp_lib'
class OFAAgentTestCase(ofa_test_base.OFAAgentTestBase):
def setUp(self):
super(OFAAgentTestCase, self).setUp()
Net = collections.namedtuple('Net', 'net, mac, ip')
self.nets = [Net(net=10, mac='11:11:11:44:55:66', ip='10.1.2.20'),
Net(net=10, mac='11:11:11:44:55:67', ip='10.1.2.21'),
Net(net=20, mac='22:22:22:44:55:66', ip='10.2.2.20')]
self.packet_mod = mock.Mock()
self.proto_ethernet_mod = mock.Mock()
self.proto_vlan_mod = mock.Mock()
self.proto_vlan_mod.vid = 999
self.proto_arp_mod = mock.Mock()
self.fake_get_protocol = mock.Mock(return_value=self.proto_vlan_mod)
self.packet_mod.get_protocol = self.fake_get_protocol
self.fake_add_protocol = mock.Mock()
self.packet_mod.add_protocol = self.fake_add_protocol
self.arp = importutils.import_module('ryu.lib.packet.arp')
self.ethernet = importutils.import_module('ryu.lib.packet.ethernet')
self.vlan = importutils.import_module('ryu.lib.packet.vlan')
mock.patch('ryu.lib.packet.packet.Packet',
return_value=self.packet_mod).start()
self.ryuapp = 'ryuapp'
self.inport = '1'
self.ev = mock.Mock()
self.datapath = self._mk_test_dp('tun_br')
self.ofproto = importutils.import_module('ryu.ofproto.ofproto_v1_3')
self.ofpp = mock.Mock()
self.datapath.ofproto = self.ofproto
self.datapath.ofproto_parser = self.ofpp
self.OFPActionOutput = mock.Mock()
self.OFPActionOutput.return_value = 'OFPActionOutput'
self.ofpp.OFPActionOutput = self.OFPActionOutput
self.msg = mock.Mock()
self.msg.datapath = self.datapath
self.msg.buffer_id = self.ofproto.OFP_NO_BUFFER
self.msg_data = 'test_message_data'
self.msg.data = self.msg_data
self.ev.msg = self.msg
self.msg.match = {'in_port': self.inport,
'metadata': meta.LOCAL | self.nets[0].net}
class TestArpLib(OFAAgentTestCase):
def setUp(self):
super(TestArpLib, self).setUp()
self.mod_arplib = importutils.import_module(_OFALIB_NAME)
self.arplib = self.mod_arplib.ArpLib(self.ryuapp)
self.packet_mod.get_protocol = self._fake_get_protocol
self._fake_get_protocol_ethernet = True
self._fake_get_protocol_vlan = True
self._fake_get_protocol_arp = True
self.br = mock.Mock(datapath=self.datapath)
self.arplib.set_bridge(self.br)
def test__send_unknown_packet_no_buffer(self):
in_port = 3
out_port = self.ofproto.OFPP_TABLE
self.msg.buffer_id = self.ofproto.OFP_NO_BUFFER
self.arplib._send_unknown_packet(self.msg, in_port, out_port)
actions = [self.ofpp.OFPActionOutput(self.ofproto.OFPP_TABLE, 0)]
self.ofpp.OFPPacketOut.assert_called_once_with(
datapath=self.datapath,
buffer_id=self.msg.buffer_id,
in_port=in_port,
actions=actions,
data=self.msg_data)
def test__send_unknown_packet_existence_buffer(self):
in_port = 3
out_port = self.ofproto.OFPP_TABLE
self.msg.buffer_id = 256
self.arplib._send_unknown_packet(self.msg, in_port, out_port)
actions = [self.ofpp.OFPActionOutput(self.ofproto.OFPP_TABLE, 0)]
self.ofpp.OFPPacketOut.assert_called_once_with(
datapath=self.datapath,
buffer_id=self.msg.buffer_id,
in_port=in_port,
actions=actions,
data=None)
def test__respond_arp(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
port = 3
arptbl = self.arplib._arp_tbl[self.nets[0].net]
pkt_ethernet = self.ethernet
pkt_vlan = self.vlan
pkt_arp = self.arp
pkt_arp.opcode = self.arp.ARP_REQUEST
pkt_arp.dst_ip = self.nets[0].ip
with mock.patch.object(
self.arplib, '_send_arp_reply'
) as send_arp_rep_fn:
self.assertTrue(
self.arplib._respond_arp(self.datapath, port, arptbl,
pkt_ethernet, pkt_vlan, pkt_arp))
ethernet_ethernet = self.ethernet.ethernet(
ethertype=pkt_ethernet.ethertype,
dst=pkt_ethernet.src,
src=self.nets[0].mac)
vlan_vlan = self.vlan.vlan(cfi=pkt_vlan.cfi,
ethertype=pkt_vlan.ethertype,
pcp=pkt_vlan.pcp,
vid=pkt_vlan.vid)
arp_arp = self.arp.arp(opcode=self.arp.ARP_REPLY,
src_mac=self.nets[0].mac,
src_ip=pkt_arp.dst_ip,
dst_mac=pkt_arp.src_mac,
dst_ip=pkt_arp.src_ip)
self.fake_add_protocol.assert_has_calls([mock.call(ethernet_ethernet),
mock.call(vlan_vlan),
mock.call(arp_arp)])
send_arp_rep_fn.assert_called_once_with(
self.datapath, port, self.packet_mod)
def _test__respond_arp(self, pkt_arp):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
port = 3
arptbl = self.arplib._arp_tbl[self.nets[0].net]
pkt_ethernet = mock.Mock()
pkt_vlan = mock.Mock()
self.assertFalse(
self.arplib._respond_arp(self.datapath, port, arptbl,
pkt_ethernet, pkt_vlan, pkt_arp))
def test__respond_arp_non_arp_req(self):
pkt_arp = mock.Mock()
pkt_arp.opcode = self.arp.ARP_REPLY
self._test__respond_arp(pkt_arp)
def test__respond_arp_ip_not_found_in_arptable(self):
pkt_arp = mock.Mock()
pkt_arp.opcode = self.arp.ARP_REQUEST
pkt_arp.dst_ip = self.nets[1].ip
self._test__respond_arp(pkt_arp)
def test_add_arp_table_entry(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}})
def test_add_arp_table_entry_multiple_net(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.arplib.add_arp_table_entry(self.nets[2].net,
self.nets[2].ip, self.nets[2].mac)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac},
self.nets[2].net: {self.nets[2].ip: self.nets[2].mac}})
def test_add_arp_table_entry_multiple_ip(self):
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[0].ip, self.nets[0].mac)
self.arplib.add_arp_table_entry(self.nets[0].net,
self.nets[1].ip, self.nets[1].mac)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac,
self.nets[1].ip: self.nets[1].mac}})
def test_del_arp_table_entry(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
self.arplib.del_arp_table_entry(self.nets[0].net, self.nets[0].ip)
self.assertEqual(self.arplib._arp_tbl, {})
def test_del_arp_table_entry_multiple_net(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac},
self.nets[2].net: {self.nets[2].ip: self.nets[2].mac}}
self.arplib.del_arp_table_entry(self.nets[0].net, self.nets[0].ip)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[2].net: {self.nets[2].ip: self.nets[2].mac}})
def test_del_arp_table_entry_multiple_ip(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac,
self.nets[1].ip: self.nets[1].mac}}
self.arplib.del_arp_table_entry(self.nets[0].net, self.nets[1].ip)
self.assertEqual(
self.arplib._arp_tbl,
{self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}})
def _fake_get_protocol(self, net_type):
if net_type == self.ethernet.ethernet:
if self._fake_get_protocol_ethernet:
return self.proto_ethernet_mod
else:
return
if net_type == self.vlan.vlan:
if self._fake_get_protocol_vlan:
return self.proto_vlan_mod
else:
return
if net_type == self.arp.arp:
if self._fake_get_protocol_arp:
return self.proto_arp_mod
else:
return
def _test_packet_in_handler(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
with contextlib.nested(
mock.patch.object(self.arplib, '_respond_arp',
return_value=True),
mock.patch.object(self.br,
'arp_passthrough'),
mock.patch.object(self.arplib,
'_send_unknown_packet'),
) as (res_arp_fn, add_flow_fn, send_unknown_pk_fn):
self.arplib.packet_in_handler(self.ev)
self.assertFalse(add_flow_fn.call_count)
self.assertFalse(send_unknown_pk_fn.call_count)
res_arp_fn.assert_called_once_with(
self.datapath, self.inport,
self.arplib._arp_tbl[self.nets[0].net],
self.proto_ethernet_mod,
self.proto_vlan_mod if self._fake_get_protocol_vlan else None,
self.proto_arp_mod)
def _test_packet_in_handler_drop(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
with contextlib.nested(
mock.patch.object(self.arplib, '_respond_arp',
return_value=True),
mock.patch.object(self.br, 'arp_passthrough'),
mock.patch.object(self.arplib,
'_send_unknown_packet'),
) as (res_arp_fn, add_flow_fn, send_unknown_pk_fn):
self.arplib.packet_in_handler(self.ev)
self.assertFalse(add_flow_fn.call_count)
self.assertFalse(send_unknown_pk_fn.call_count)
self.assertFalse(res_arp_fn.call_count)
<|fim▁hole|>
def test_packet_in_handler_non_ethernet(self):
self._fake_get_protocol_ethernet = False
self._test_packet_in_handler_drop()
def test_packet_in_handler_non_vlan(self):
self._fake_get_protocol_vlan = False
self._test_packet_in_handler()
def test_packet_in_handler_non_arp(self):
self._fake_get_protocol_arp = False
self._test_packet_in_handler_drop()
def test_packet_in_handler_unknown_network(self):
self.arplib._arp_tbl = {
self.nets[0].net: {self.nets[0].ip: self.nets[0].mac}}
with contextlib.nested(
mock.patch.object(self.arplib, '_respond_arp',
return_value=False),
mock.patch.object(self.br, 'arp_passthrough'),
mock.patch.object(self.arplib,
'_send_unknown_packet'),
) as (res_arp_fn, add_flow_fn, send_unknown_pk_fn):
self.arplib.packet_in_handler(self.ev)
add_flow_fn.assert_called_once_with(
network=self.nets[0].net,
tpa=self.proto_arp_mod.dst_ip)
send_unknown_pk_fn.assert_called_once_with(
self.ev.msg, self.msg.match['in_port'],
self.datapath.ofproto.OFPP_TABLE)
res_arp_fn.assert_called_once_with(
self.datapath, self.inport,
self.arplib._arp_tbl[self.nets[0].net],
self.proto_ethernet_mod, self.proto_vlan_mod, self.proto_arp_mod)<|fim▁end|> | def test_packet_in_handler(self):
self._test_packet_in_handler() |
<|file_name|>handler.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Metadata request handler."""
import hashlib
import hmac
import os
from oslo_config import cfg
from oslo_log import log as logging
import six
import webob.dec
import webob.exc
from nova.api.metadata import base
from nova import conductor
from nova import exception
from nova.i18n import _
from nova.i18n import _LE
from nova.i18n import _LW
from nova.openstack.common import memorycache
from nova import utils
from nova import wsgi
CACHE_EXPIRATION = 15 # in seconds
CONF = cfg.CONF
CONF.import_opt('use_forwarded_for', 'nova.api.auth')
metadata_proxy_opts = [
cfg.BoolOpt(
'service_metadata_proxy',
default=False,
help='Set flag to indicate Neutron will proxy metadata requests and '
'resolve instance ids.'),
cfg.StrOpt(
'metadata_proxy_shared_secret',
default='', secret=True,
help='Shared secret to validate proxies Neutron metadata requests'),
]
CONF.register_opts(metadata_proxy_opts, 'neutron')
LOG = logging.getLogger(__name__)
class MetadataRequestHandler(wsgi.Application):
"""Serve metadata."""
def __init__(self):
self._cache = memorycache.get_client()
self.conductor_api = conductor.API()
def get_metadata_by_remote_address(self, address):
if not address:
raise exception.FixedIpNotFoundForAddress(address=address)
cache_key = 'metadata-%s' % address
data = self._cache.get(cache_key)
if data:
return data
try:
data = base.get_metadata_by_address(self.conductor_api, address)
except exception.NotFound:
return None
self._cache.set(cache_key, data, CACHE_EXPIRATION)
return data
def get_metadata_by_instance_id(self, instance_id, address):
cache_key = 'metadata-%s' % instance_id
data = self._cache.get(cache_key)
if data:
return data
try:
data = base.get_metadata_by_instance_id(self.conductor_api,
instance_id, address)
except exception.NotFound:
return None
self._cache.set(cache_key, data, CACHE_EXPIRATION)
return data
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
if os.path.normpath(req.path_info) == "/":
resp = base.ec2_md_print(base.VERSIONS + ["latest"])
req.response.body = resp
req.response.content_type = base.MIME_TYPE_TEXT_PLAIN
return req.response
if CONF.neutron.service_metadata_proxy:
meta_data = self._handle_instance_id_request(req)
else:
if req.headers.get('X-Instance-ID'):
LOG.warning(
_LW("X-Instance-ID present in request headers. The "
"'service_metadata_proxy' option must be "
"enabled to process this header."))
meta_data = self._handle_remote_ip_request(req)
if meta_data is None:
raise webob.exc.HTTPNotFound()
try:
data = meta_data.lookup(req.path_info)
except base.InvalidMetadataPath:
raise webob.exc.HTTPNotFound()
if callable(data):
return data(req, meta_data)
resp = base.ec2_md_print(data)
if isinstance(resp, six.text_type):
req.response.text = resp
else:
req.response.body = resp
req.response.content_type = meta_data.get_mimetype()<|fim▁hole|>
def _handle_remote_ip_request(self, req):
remote_address = req.remote_addr
if CONF.use_forwarded_for:
remote_address = req.headers.get('X-Forwarded-For', remote_address)
try:
meta_data = self.get_metadata_by_remote_address(remote_address)
except Exception:
LOG.exception(_LE('Failed to get metadata for ip: %s'),
remote_address)
msg = _('An unknown error has occurred. '
'Please try your request again.')
raise webob.exc.HTTPInternalServerError(
explanation=six.text_type(msg))
if meta_data is None:
LOG.error(_LE('Failed to get metadata for ip: %s'),
remote_address)
return meta_data
def _handle_instance_id_request(self, req):
instance_id = req.headers.get('X-Instance-ID')
tenant_id = req.headers.get('X-Tenant-ID')
signature = req.headers.get('X-Instance-ID-Signature')
remote_address = req.headers.get('X-Forwarded-For')
# Ensure that only one header was passed
if instance_id is None:
msg = _('X-Instance-ID header is missing from request.')
elif signature is None:
msg = _('X-Instance-ID-Signature header is missing from request.')
elif tenant_id is None:
msg = _('X-Tenant-ID header is missing from request.')
elif not isinstance(instance_id, six.string_types):
msg = _('Multiple X-Instance-ID headers found within request.')
elif not isinstance(tenant_id, six.string_types):
msg = _('Multiple X-Tenant-ID headers found within request.')
else:
msg = None
if msg:
raise webob.exc.HTTPBadRequest(explanation=msg)
expected_signature = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
instance_id,
hashlib.sha256).hexdigest()
if not utils.constant_time_compare(expected_signature, signature):
if instance_id:
LOG.warning(_LW('X-Instance-ID-Signature: %(signature)s does '
'not match the expected value: '
'%(expected_signature)s for id: '
'%(instance_id)s. Request From: '
'%(remote_address)s'),
{'signature': signature,
'expected_signature': expected_signature,
'instance_id': instance_id,
'remote_address': remote_address})
msg = _('Invalid proxy request signature.')
raise webob.exc.HTTPForbidden(explanation=msg)
try:
meta_data = self.get_metadata_by_instance_id(instance_id,
remote_address)
except Exception:
LOG.exception(_LE('Failed to get metadata for instance id: %s'),
instance_id)
msg = _('An unknown error has occurred. '
'Please try your request again.')
raise webob.exc.HTTPInternalServerError(
explanation=six.text_type(msg))
if meta_data is None:
LOG.error(_LE('Failed to get metadata for instance id: %s'),
instance_id)
elif meta_data.instance.project_id != tenant_id:
LOG.warning(_LW("Tenant_id %(tenant_id)s does not match tenant_id "
"of instance %(instance_id)s."),
{'tenant_id': tenant_id, 'instance_id': instance_id})
# causes a 404 to be raised
meta_data = None
return meta_data<|fim▁end|> | return req.response |
<|file_name|>robotFactory.py<|end_file_name|><|fim▁begin|>from .stt import ApiRobot
from .stt import BingRobot
from .stt import WatsonRobot
from .stt import WitaiRobot
from .stt import GoogleRobot<|fim▁hole|> configSTT = config['stt']
if configSTT == 'bing':
return BingRobot(config['bing'], speaker, actions)
if configSTT == 'watson':
return WatsonRobot(config['watson-stt'], speaker, actions)
if configSTT == 'witai':
return WitaiRobot(config['witai-stt'], speaker, actions)
if configSTT == 'google':
return GoogleRobot(config['google-stt'], speaker, actions)
return ApiRobot(config['apiai'], speaker, actions)<|fim▁end|> |
class RobotFactory:
@staticmethod
def produce(config, speaker, actions): |
<|file_name|>gl_interface.cpp<|end_file_name|><|fim▁begin|>//
//---------------------------------------------------------------------------
//
// Copyright(C) 2005-2016 Christoph Oelckers
// All rights reserved.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see http://www.gnu.org/licenses/
//
//--------------------------------------------------------------------------
//
/*
** r_opengl.cpp
**
** OpenGL system interface
**
*/
#include "gl/system/gl_system.h"
#include "tarray.h"
#include "doomtype.h"
#include "m_argv.h"
#include "zstring.h"
#include "version.h"
#include "i_system.h"
#include "v_text.h"
#include "r_data/r_translate.h"
#include "gl/system/gl_interface.h"
#include "gl/system/gl_cvars.h"
void gl_PatchMenu();
static TArray<FString> m_Extensions;
RenderContext gl;
//==========================================================================
//
//
//
//==========================================================================
static void CollectExtensions()
{
const char *extension;
int max = 0;
glGetIntegerv(GL_NUM_EXTENSIONS, &max);
if (0 == max)
{
// Try old method to collect extensions
const char *supported = (char *)glGetString(GL_EXTENSIONS);
if (nullptr != supported)
{
char *extensions = new char[strlen(supported) + 1];
strcpy(extensions, supported);
char *extension = strtok(extensions, " ");
while (extension)
{
m_Extensions.Push(FString(extension));
extension = strtok(nullptr, " ");
}
delete [] extensions;
}
}
else
{
// Use modern method to collect extensions
for (int i = 0; i < max; i++)
{
extension = (const char*)glGetStringi(GL_EXTENSIONS, i);
m_Extensions.Push(FString(extension));
}
}
}
//==========================================================================
//
//
//
//==========================================================================
static bool CheckExtension(const char *ext)
{
for (unsigned int i = 0; i < m_Extensions.Size(); ++i)
{
if (m_Extensions[i].CompareNoCase(ext) == 0) return true;
}
return false;
}
//==========================================================================
//
//
//
//==========================================================================
static void InitContext()
{
gl.flags=0;
}
//==========================================================================
//
//
//
//==========================================================================
#define FUDGE_FUNC(name, ext) if (_ptrc_##name == NULL) _ptrc_##name = _ptrc_##name##ext;
void gl_LoadExtensions()
{
InitContext();
CollectExtensions();
const char *version = Args->CheckValue("-glversion");
const char *glversion = (const char*)glGetString(GL_VERSION);
if (version == NULL)
{
version = glversion;
}
else
{
double v1 = strtod(version, NULL);
double v2 = strtod(glversion, NULL);
if (v2 < v1) version = glversion;
else Printf("Emulating OpenGL v %s\n", version);
}
float gl_version = (float)strtod(version, NULL) + 0.01f;
// Don't even start if it's lower than 2.0 or no framebuffers are available (The framebuffer extension is needed for glGenerateMipmapsEXT!)
if ((gl_version < 2.0f || !CheckExtension("GL_EXT_framebuffer_object")) && gl_version < 3.0f)
{
I_FatalError("Unsupported OpenGL version.\nAt least OpenGL 2.0 with framebuffer support is required to run " GAMENAME ".\n");
}
// add 0.01 to account for roundoff errors making the number a tad smaller than the actual version
gl.glslversion = strtod((char*)glGetString(GL_SHADING_LANGUAGE_VERSION), NULL) + 0.01f;
gl.vendorstring = (char*)glGetString(GL_VENDOR);
// first test for optional features
if (CheckExtension("GL_ARB_texture_compression")) gl.flags |= RFL_TEXTURE_COMPRESSION;
if (CheckExtension("GL_EXT_texture_compression_s3tc")) gl.flags |= RFL_TEXTURE_COMPRESSION_S3TC;
if ((gl_version >= 3.3f || CheckExtension("GL_ARB_sampler_objects")) && !Args->CheckParm("-nosampler"))
{
gl.flags |= RFL_SAMPLER_OBJECTS;
}
// The minimum requirement for the modern render path are GL 3.0 + uniform buffers. Also exclude the Linux Mesa driver at GL 3.0 because it errors out on shader compilation.
if (gl_version < 3.0f || (gl_version < 3.1f && (!CheckExtension("GL_ARB_uniform_buffer_object") || strstr(gl.vendorstring, "X.Org") != nullptr)))
{
gl.legacyMode = true;
gl.lightmethod = LM_LEGACY;
gl.buffermethod = BM_LEGACY;
gl.glslversion = 0;
gl.flags |= RFL_NO_CLIP_PLANES;
}
else
{
gl.legacyMode = false;
gl.lightmethod = LM_DEFERRED;
gl.buffermethod = BM_DEFERRED;
if (gl_version < 4.f)
{
#ifdef _WIN32
if (strstr(gl.vendorstring, "ATI Tech"))
{
gl.flags |= RFL_NO_CLIP_PLANES; // gl_ClipDistance is horribly broken on ATI GL3 drivers for Windows.
}
#endif
}
else if (gl_version < 4.5f)
{
// don't use GL 4.x features when running a GL 3.x context.
if (CheckExtension("GL_ARB_buffer_storage"))
{
// work around a problem with older AMD drivers: Their implementation of shader storage buffer objects is piss-poor and does not match uniform buffers even closely.
// Recent drivers, GL 4.4 don't have this problem, these can easily be recognized by also supporting the GL_ARB_buffer_storage extension.
if (CheckExtension("GL_ARB_shader_storage_buffer_object"))
{
// Shader storage buffer objects are broken on current Intel drivers.
if (strstr(gl.vendorstring, "Intel") == NULL)
{
gl.flags |= RFL_SHADER_STORAGE_BUFFER;
}
}
gl.flags |= RFL_BUFFER_STORAGE;
gl.lightmethod = LM_DIRECT;
gl.buffermethod = BM_PERSISTENT;
}
}
else
{
// Assume that everything works without problems on GL 4.5 drivers where these things are core features.
gl.flags |= RFL_SHADER_STORAGE_BUFFER | RFL_BUFFER_STORAGE;
gl.lightmethod = LM_DIRECT;
gl.buffermethod = BM_PERSISTENT;
}
if (gl_version >= 4.3f || CheckExtension("GL_ARB_invalidate_subdata")) gl.flags |= RFL_INVALIDATE_BUFFER;
if (gl_version >= 4.3f || CheckExtension("GL_KHR_debug")) gl.flags |= RFL_DEBUG;
const char *lm = Args->CheckValue("-lightmethod");
if (lm != NULL)
{
if (!stricmp(lm, "deferred") && gl.lightmethod == LM_DIRECT) gl.lightmethod = LM_DEFERRED;
}
lm = Args->CheckValue("-buffermethod");
if (lm != NULL)
{
if (!stricmp(lm, "deferred") && gl.buffermethod == BM_PERSISTENT) gl.buffermethod = BM_DEFERRED;
}
}
int v;
if (!gl.legacyMode && !(gl.flags & RFL_SHADER_STORAGE_BUFFER))
{
glGetIntegerv(GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, &v);<|fim▁hole|> gl.uniformblockalignment = v;
}
else
{
gl.maxuniforms = 0;
gl.maxuniformblock = 0;
gl.uniformblockalignment = 0;
}
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &gl.max_texturesize);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
if (gl.legacyMode)
{
// fudge a bit with the framebuffer stuff to avoid redundancies in the main code. Some of the older cards do not have the ARB stuff but the calls are nearly identical.
FUDGE_FUNC(glGenerateMipmap, EXT);
FUDGE_FUNC(glGenFramebuffers, EXT);
FUDGE_FUNC(glBindFramebuffer, EXT);
FUDGE_FUNC(glDeleteFramebuffers, EXT);
FUDGE_FUNC(glFramebufferTexture2D, EXT);
FUDGE_FUNC(glGenerateMipmap, EXT);
FUDGE_FUNC(glGenFramebuffers, EXT);
FUDGE_FUNC(glBindFramebuffer, EXT);
FUDGE_FUNC(glDeleteFramebuffers, EXT);
FUDGE_FUNC(glFramebufferTexture2D, EXT);
FUDGE_FUNC(glFramebufferRenderbuffer, EXT);
FUDGE_FUNC(glGenRenderbuffers, EXT);
FUDGE_FUNC(glDeleteRenderbuffers, EXT);
FUDGE_FUNC(glRenderbufferStorage, EXT);
FUDGE_FUNC(glBindRenderbuffer, EXT);
FUDGE_FUNC(glCheckFramebufferStatus, EXT);
gl_PatchMenu();
}
}
//==========================================================================
//
//
//
//==========================================================================
void gl_PrintStartupLog()
{
int v = 0;
if (!gl.legacyMode) glGetIntegerv(GL_CONTEXT_PROFILE_MASK, &v);
Printf ("GL_VENDOR: %s\n", glGetString(GL_VENDOR));
Printf ("GL_RENDERER: %s\n", glGetString(GL_RENDERER));
Printf ("GL_VERSION: %s (%s profile)\n", glGetString(GL_VERSION), (v & GL_CONTEXT_CORE_PROFILE_BIT)? "Core" : "Compatibility");
Printf ("GL_SHADING_LANGUAGE_VERSION: %s\n", glGetString(GL_SHADING_LANGUAGE_VERSION));
Printf (PRINT_LOG, "GL_EXTENSIONS:");
for (unsigned i = 0; i < m_Extensions.Size(); i++)
{
Printf(PRINT_LOG, " %s", m_Extensions[i].GetChars());
}
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &v);
Printf("\nMax. texture size: %d\n", v);
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &v);
Printf ("Max. texture units: %d\n", v);
glGetIntegerv(GL_MAX_VARYING_FLOATS, &v);
Printf ("Max. varying: %d\n", v);
if (!gl.legacyMode && !(gl.flags & RFL_SHADER_STORAGE_BUFFER))
{
glGetIntegerv(GL_MAX_UNIFORM_BLOCK_SIZE, &v);
Printf ("Max. uniform block size: %d\n", v);
glGetIntegerv(GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT, &v);
Printf ("Uniform block alignment: %d\n", v);
}
if (gl.flags & RFL_SHADER_STORAGE_BUFFER)
{
glGetIntegerv(GL_MAX_COMBINED_SHADER_STORAGE_BLOCKS, &v);
Printf("Max. combined shader storage blocks: %d\n", v);
glGetIntegerv(GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS, &v);
Printf("Max. vertex shader storage blocks: %d\n", v);
}
// For shader-less, the special alphatexture translation must be changed to actually set the alpha, because it won't get translated by a shader.
if (gl.legacyMode)
{
FRemapTable *remap = translationtables[TRANSLATION_Standard][8];
for (int i = 0; i < 256; i++)
{
remap->Remap[i] = i;
remap->Palette[i] = PalEntry(i, 255, 255, 255);
}
}
}<|fim▁end|> | gl.maxuniforms = v;
glGetIntegerv(GL_MAX_UNIFORM_BLOCK_SIZE, &v);
gl.maxuniformblock = v;
glGetIntegerv(GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT, &v); |
<|file_name|>tableBodyClasses.js<|end_file_name|><|fim▁begin|>"use strict";
<|fim▁hole|>exports.default = void 0;
var _unstyled = require("@material-ui/unstyled");
function getTableBodyUtilityClass(slot) {
return (0, _unstyled.generateUtilityClass)('MuiTableBody', slot);
}
const tableBodyClasses = (0, _unstyled.generateUtilityClasses)('MuiTableBody', ['root']);
var _default = tableBodyClasses;
exports.default = _default;<|fim▁end|> | Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getTableBodyUtilityClass = getTableBodyUtilityClass; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- encoding: utf-8 -*-<|fim▁hole|># Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from py2neo.legacy.batch import *
from py2neo.legacy.core import *
from py2neo.legacy.index import *
__all__ = ["LegacyResource", "LegacyNode", "Index", "LegacyReadBatch", "LegacyWriteBatch"]<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate serde_codegen;
extern crate serde_json;
extern crate glob;
use std::env;
use std::fs;
use std::path::Path;
/// generate an enum of Events
fn main() {
for entry in glob::glob("src/**/*.rs.in").expect("Failed to read glob pattern") {
println!("cargo:rerun-if-changed={}", entry.unwrap().display());
}
let out_dir = env::var_os("OUT_DIR").unwrap();
// Switch to our `src` directory so that we have the right base for our
// globs, and so that we won't need to strip `src/` off every path.
env::set_current_dir("src").unwrap();
for entry in glob::glob("**/*.rs.in").expect("Failed to read glob pattern") {
match entry {
Ok(src) => {
let mut dst = Path::new(&out_dir).join(&src);
// Change ".rs.in" to ".rs".
dst.set_file_name(src.file_stem().expect("Failed to get file stem"));<|fim▁hole|> fs::create_dir_all(dst.parent().unwrap()).unwrap();
// Process our source file.
serde_codegen::expand(&src, &dst).unwrap();
}
Err(e) => {
panic!("Error globbing: {}", e);
}
}
}
}<|fim▁end|> | dst.set_extension("rs");
// Make sure our target directory exists. We only need
// this if there are extra nested sudirectories under src/. |
<|file_name|>check_data.py<|end_file_name|><|fim▁begin|>from data import *
from draw import *
img, hiden_x = get_img_class()
print img.shape
print img
d_idx = np.random.randint(0, 50)
<|fim▁hole|>x_x, obs_x, obs_y, obs_tfs, new_ob_x, new_ob_y, new_ob_tf, imgs = gen_data()
print show_dim(x_x)
print show_dim(obs_x)
print show_dim(obs_y)
print show_dim(obs_tfs)
print show_dim(new_ob_x)
print show_dim(new_ob_y)
print show_dim(new_ob_tf)
obss = zip([np.argmax(obx[d_idx]) for obx in obs_x],
[np.argmax(oby[d_idx]) for oby in obs_y],
[obtf[d_idx] for obtf in obs_tfs])
obss = [((x[0],x[1]), x[2]) for x in obss]
print "hidden number value ", np.argmax(x_x[d_idx])
draw_obs(obss, "test_obs.png")
img = imgs[d_idx]
draw(np.reshape(img, [L,L,1]), "test_orig.png")
print img<|fim▁end|> | |
<|file_name|>unsized-bare-typaram.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>fn foo<Sized? T>() { bar::<T>() } //~ ERROR the trait `core::kinds::Sized` is not implemented
fn main() { }<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
fn bar<T: Sized>() { } |
<|file_name|>CopyShader.js<|end_file_name|><|fim▁begin|>/**
* @author alteredq / http://alteredqualia.com/
*
* Full-screen textured quad shader
*/
var THREE = window.THREE || require('three');
THREE.CopyShader = {
uniforms: {
"tDiffuse": { type: "t", value: null },
"opacity": { type: "f", value: 1.0 }
},
vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"<|fim▁hole|>
].join("\n"),
fragmentShader: [
"uniform float opacity;",
"uniform sampler2D tDiffuse;",
"varying vec2 vUv;",
"void main() {",
"vec4 texel = texture2D( tDiffuse, vUv );",
"gl_FragColor = opacity * texel;",
"}"
].join("\n")
};<|fim▁end|> | |
<|file_name|>test_graph.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import graph
class TestGraph(unittest.TestCase):
'''
Unit test for graph.py
'''
def setUp(self):
'''<|fim▁hole|> '''
test_graph_data = {'1': [], '2': ['1'], '3': ['1', '4'], '4': [],
'5': ['2', '3'], '6': ['3']}
self.g = graph.Graph()
self.g.construct_from_edgeset(test_graph_data)
def test_pred_nodes(self):
preds = set(self.g.pred_nodes('5'))
expected = set(['1', '4', '2', '3', '5'])
self.assertEqual(preds, expected)
def test_succ_nodes(self):
succs = set(self.g.succ_nodes('1'))
expected = set(['1', '2', '3', '5', '6'])
self.assertEqual(succs, expected)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | This method sets up the test graph data |
<|file_name|>run.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package main
import (
"fmt"
"net/rpc"
"os"
"path/filepath"
"launchpad.net/gnuflag"
"github.com/wallyworld/core/cmd"
"github.com/wallyworld/core/names"
"github.com/wallyworld/core/utils/exec"
"github.com/wallyworld/core/utils/fslock"
"github.com/wallyworld/core/worker/uniter"
)
var (
AgentDir = "/var/lib/juju/agents"
LockDir = "/var/lib/juju/locks"
)
type RunCommand struct {
cmd.CommandBase
unit string
commands string
showHelp bool
noContext bool
}
const runCommandDoc = `
Run the specified commands in the hook context for the unit.
unit-name can be either the unit tag:
i.e. unit-ubuntu-0
or the unit id:
i.e. ubuntu/0
If --no-context is specified, the <unit-name> positional
argument is not needed.
The commands are executed with '/bin/bash -s', and the output returned.
`
// Info returns usage information for the command.
func (c *RunCommand) Info() *cmd.Info {
return &cmd.Info{
Name: "juju-run",
Args: "<unit-name> <commands>",
Purpose: "run commands in a unit's hook context",
Doc: runCommandDoc,
}
}
func (c *RunCommand) SetFlags(f *gnuflag.FlagSet) {
f.BoolVar(&c.showHelp, "h", false, "show help on juju-run")
f.BoolVar(&c.showHelp, "help", false, "")
f.BoolVar(&c.noContext, "no-context", false, "do not run the command in a unit context")
}
func (c *RunCommand) Init(args []string) error {
// make sure we aren't in an existing hook context
if contextId, err := getenv("JUJU_CONTEXT_ID"); err == nil && contextId != "" {
return fmt.Errorf("juju-run cannot be called from within a hook, have context %q", contextId)
}
if !c.noContext {
if len(args) < 1 {
return fmt.Errorf("missing unit-name")
}
c.unit, args = args[0], args[1:]
// If the command line param is a unit id (like service/2) we need to
// change it to the unit tag as that is the format of the agent directory
// on disk (unit-service-2).
if names.IsUnit(c.unit) {
c.unit = names.UnitTag(c.unit)
}
}
if len(args) < 1 {
return fmt.Errorf("missing commands")
}
c.commands, args = args[0], args[1:]
return cmd.CheckEmpty(args)
}
func (c *RunCommand) Run(ctx *cmd.Context) error {<|fim▁hole|> if c.showHelp {
return gnuflag.ErrHelp
}
var result *exec.ExecResponse
var err error
if c.noContext {
result, err = c.executeNoContext()
} else {
result, err = c.executeInUnitContext()
}
if err != nil {
return err
}
ctx.Stdout.Write(result.Stdout)
ctx.Stderr.Write(result.Stderr)
return cmd.NewRcPassthroughError(result.Code)
}
func (c *RunCommand) executeInUnitContext() (*exec.ExecResponse, error) {
unitDir := filepath.Join(AgentDir, c.unit)
logger.Debugf("looking for unit dir %s", unitDir)
// make sure the unit exists
_, err := os.Stat(unitDir)
if os.IsNotExist(err) {
return nil, fmt.Errorf("unit %q not found on this machine", c.unit)
} else if err != nil {
return nil, err
}
socketPath := filepath.Join(unitDir, uniter.RunListenerFile)
// make sure the socket exists
client, err := rpc.Dial("unix", socketPath)
if err != nil {
return nil, err
}
defer client.Close()
var result exec.ExecResponse
err = client.Call(uniter.JujuRunEndpoint, c.commands, &result)
return &result, err
}
func getLock() (*fslock.Lock, error) {
return fslock.NewLock(LockDir, "uniter-hook-execution")
}
func (c *RunCommand) executeNoContext() (*exec.ExecResponse, error) {
// Acquire the uniter hook execution lock to make sure we don't
// stomp on each other.
lock, err := getLock()
if err != nil {
return nil, err
}
err = lock.Lock("juju-run")
if err != nil {
return nil, err
}
defer lock.Unlock()
runCmd := `[ -f "/home/ubuntu/.juju-proxy" ] && . "/home/ubuntu/.juju-proxy"` + "\n" + c.commands
return exec.RunCommands(
exec.RunParams{
Commands: runCmd,
})
}<|fim▁end|> | |
<|file_name|>postgres_to_s3_transfer.py<|end_file_name|><|fim▁begin|>from urllib.parse import urlparse
import subprocess
import logging
import boto3
import airflow.hooks.base_hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
import utils.helpers as helpers
class PostgresToS3Transfer(BaseOperator):
'''Dumps a Postgres database to a S3 key
:param url: URL to download. (templated)
:type url: str
:param postgres_conn_id: Postgres Connection's ID.
:type postgres_conn_id: str
:param tables: List of tables to export (optional, default exports all
tables).
:type tables: list of str
:param s3_conn_id: S3 Connection's ID. It needs a JSON in the `extra` field
with `aws_access_key_id` and `aws_secret_access_key`
:type s3_conn_id: str
:param s3_url: S3 url (e.g. `s3://my_bucket/my_key.zip`) (templated)
:type s3_url: str
'''
template_fields = ('s3_url',)
@apply_defaults
def __init__(self, postgres_conn_id, s3_conn_id, s3_url, tables=None, *args, **kwargs):
super(PostgresToS3Transfer, self).__init__(*args, **kwargs)
self.postgres_conn_id = postgres_conn_id
self.tables = tables
self.s3_conn_id = s3_conn_id
self.s3_url = s3_url
def execute(self, context):
s3 = self._load_s3_connection(self.s3_conn_id)
s3_bucket, s3_key = self._parse_s3_url(self.s3_url)
command = [
'pg_dump',
'-Fc',
]
if self.tables:
tables_params = ['--table={}'.format(table) for table in self.tables]
command.extend(tables_params)
logging.info('Dumping database "%s" into "%s"', self.postgres_conn_id, self.s3_url)
logging.info('Command: %s <POSTGRES_URI>', ' '.join(command))
command.append(helpers.get_postgres_uri(self.postgres_conn_id))
with subprocess.Popen(command, stdout=subprocess.PIPE).stdout as dump_file:
s3.Bucket(s3_bucket) \
.upload_fileobj(dump_file, s3_key)
@staticmethod<|fim▁hole|> if not parsed_url.netloc:
raise airflow.exceptions.AirflowException('Please provide a bucket_name')
else:
bucket_name = parsed_url.netloc
key = parsed_url.path.strip('/')
return (bucket_name, key)
def _load_s3_connection(self, conn_id):
'''
Parses the S3 connection and returns a Boto3 resource.
This should be implementing using the S3Hook, but it currently uses
boto (not boto3) which doesn't allow streaming.
:return: Boto3 resource
:rtype: boto3.resources.factory.s3.ServiceResource
'''
conn = airflow.hooks.base_hook.BaseHook.get_connection(conn_id)
extra_dejson = conn.extra_dejson
key_id = extra_dejson['aws_access_key_id']
access_key = extra_dejson['aws_secret_access_key']
s3 = boto3.resource(
's3',
aws_access_key_id=key_id,
aws_secret_access_key=access_key
)
return s3<|fim▁end|> | def _parse_s3_url(s3_url):
parsed_url = urlparse(s3_url) |
<|file_name|>fake_lcd.go<|end_file_name|><|fim▁begin|><|fim▁hole|>
import "github.com/augustoroman/serial_lcd"
type FakeLcd struct{}
func (f FakeLcd) SetBG(r, g, b uint8) error { return nil }
func (f FakeLcd) SetOn(On bool) error { return nil }
func (f FakeLcd) SetBrightness(b uint8) error { return nil }
func (f FakeLcd) SetContrast(c uint8) error { return nil }
func (f FakeLcd) SetAutoscroll(On bool) error { return nil }
func (f FakeLcd) SetSize(cols, rows uint8) error { return nil }
func (f FakeLcd) Clear() error { return nil }
func (f FakeLcd) Home() error { return nil }
func (f FakeLcd) MoveTo(col, row uint8) error { return nil }
func (f FakeLcd) MoveForward() error { return nil }
func (f FakeLcd) MoveBack() error { return nil }
func (f FakeLcd) Write(b []byte) (int, error) { return len(b), nil }
func (f FakeLcd) CreateCustomChar(spot uint8, c serial_lcd.Char) error { return nil }<|fim▁end|> | package main |
<|file_name|>feed_parse_extractEmergencyExitsReleaseBlog.py<|end_file_name|><|fim▁begin|><|fim▁hole|> vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False<|fim▁end|> | def extractEmergencyExitsReleaseBlog(item):
"""
""" |
<|file_name|>PaiCard.tsx<|end_file_name|><|fim▁begin|>import { useBackend, useLocalState } from '../backend';
import { Box, Button, LabeledList, NoticeBox, Section, Stack, Tabs } from '../components';
import { Window } from '../layouts';
type PaiCardData = {
candidates: Candidate[];
pai: Pai;
};
type Candidate = {
comments: string;
description: string;
key: string;
name: string;
};
type Pai = {
can_holo: number;
dna: string;
emagged: number;
laws: string;
master: string;
name: string;
transmit: number;
receive: number;
};
export const PaiCard = (_, context) => {
const { data } = useBackend<PaiCardData>(context);
const { pai } = data;
return (
<Window width={400} height={400} title="pAI Options Menu">
<Window.Content>{!pai ? <PaiDownload /> : <PaiOptions />}</Window.Content>
</Window>
);
};
/** Gives a list of candidates as cards */
const PaiDownload = (_, context) => {
const { act, data } = useBackend<PaiCardData>(context);
const { candidates = [] } = data;
return (
<Section
buttons={
<Button
icon="concierge-bell"
onClick={() => act('request')}
tooltip="Request candidates.">
Request
</Button>
}
fill<|fim▁hole|> scrollable
title="Viewing pAI Candidates">
{!candidates.length ? (
<NoticeBox>None found!</NoticeBox>
) : (
<Stack fill vertical>
{candidates.map((candidate, index) => {
return (
<Stack.Item key={index}>
<CandidateDisplay candidate={candidate} />
</Stack.Item>
);
})}
</Stack>
)}
</Section>
);
};
/** Candidate card: Individual. Since this info is refreshing,
* had to make the comments and descriptions a separate tab.
* In longer entries, it is much more readable.
*/
const CandidateDisplay = (props, context) => {
const [tab, setTab] = useLocalState(context, 'tab', 'description');
const { candidate } = props;
const { comments, description, name } = candidate;
const onTabClickHandler = (tab: string) => {
setTab(tab);
};
return (
<Box
style={{
'background': '#111111',
'border': '1px solid #4972a1',
'border-radius': '5px',
'padding': '1rem',
}}>
<Section
buttons={
<CandidateTabs
candidate={candidate}
onTabClick={onTabClickHandler}
tab={tab}
/>
}
fill
height={12}
scrollable
title="Candidate">
<Box color="green" fontSize="16px">
Name: {name || 'Randomized Name'}
</Box>
{tab === 'description'
? (`Description: ${description.length && description || "None"}`)
: (`OOC Comments: ${comments.length && comments || "None"}`)}
</Section>
</Box>
);
};
/** Tabs for the candidate */
const CandidateTabs = (props, context) => {
const { act } = useBackend<PaiCardData>(context);
const { candidate, onTabClick, tab } = props;
const { key } = candidate;
return (
<Stack>
<Stack.Item>
<Tabs>
<Tabs.Tab
onClick={() => {
onTabClick('description');
}}
selected={tab === 'description'}>
Description
</Tabs.Tab>
<Tabs.Tab
onClick={() => {
onTabClick('comments');
}}
selected={tab === 'comments'}>
OOC
</Tabs.Tab>
</Tabs>
</Stack.Item>
<Stack.Item>
<Button
icon="download"
onClick={() => act('download', { key })}
tooltip="Accepts this pAI candidate.">
Download
</Button>
</Stack.Item>
</Stack>
);
};
/** Once a pAI has been loaded, you can alter its settings here */
const PaiOptions = (_, context) => {
const { act, data } = useBackend<PaiCardData>(context);
const { pai } = data;
const { can_holo, dna, emagged, laws, master, name, transmit, receive } = pai;
return (
<Section fill scrollable title={name}>
<LabeledList>
<LabeledList.Item label="Master">
{master || (
<Button icon="dna" onClick={() => act('set_dna')}>
Imprint
</Button>
)}
</LabeledList.Item>
{!!master && <LabeledList.Item label="DNA">{dna}</LabeledList.Item>}
<LabeledList.Item label="Laws">{laws}</LabeledList.Item>
<LabeledList.Item label="Holoform">
<Button
icon={can_holo ? 'toggle-on' : 'toggle-off'}
onClick={() => act('toggle_holo')}
selected={can_holo}>
Toggle
</Button>
</LabeledList.Item>
<LabeledList.Item label="Transmit">
<Button
icon={transmit ? 'toggle-on' : 'toggle-off'}
onClick={() => act('toggle_radio', { option: 'transmit' })}
selected={transmit}>
Toggle
</Button>
</LabeledList.Item>
<LabeledList.Item label="Receive">
<Button
icon={receive ? 'toggle-on' : 'toggle-off'}
onClick={() => act('toggle_radio', { option: 'receive' })}
selected={receive}>
Toggle
</Button>
</LabeledList.Item>
<LabeledList.Item label="Troubleshoot">
<Button icon="comment" onClick={() => act('fix_speech')}>
Fix Speech
</Button>
<Button icon="edit" onClick={() => act('set_laws')}>
Set Laws
</Button>
</LabeledList.Item>
<LabeledList.Item label="Personality">
<Button icon="trash" onClick={() => act('wipe_pai')}>
Erase
</Button>
</LabeledList.Item>
</LabeledList>
{!!emagged && (
<Button color="bad" disabled icon="bug" mt={1}>
Malicious Software Detected
</Button>
)}
</Section>
);
};<|fim▁end|> | |
<|file_name|>BuiltIn.py<|end_file_name|><|fim▁begin|># Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import time
import token
from tokenize import generate_tokens, untokenize
from StringIO import StringIO
from robotide.lib.robot.api import logger
from robotide.lib.robot.errors import (ContinueForLoop, DataError, ExecutionFailed,
ExecutionFailures, ExecutionPassed, ExitForLoop,
PassExecution, ReturnFromKeyword)
from robotide.lib.robot.running import Keyword, RUN_KW_REGISTER
from robotide.lib.robot.running.context import EXECUTION_CONTEXTS
from robotide.lib.robot.running.usererrorhandler import UserErrorHandler
from robotide.lib.robot.utils import (asserts, DotDict, escape, format_assign_message,
get_error_message, get_time, is_falsy, is_integer,
is_string, is_truthy, is_unicode, JYTHON, Matcher,
normalize, NormalizedDict, parse_time, prepr,
RERAISED_EXCEPTIONS, plural_or_not as s,
secs_to_timestr, seq2str, split_from_equals,
timestr_to_secs, type_name, unic)
from robotide.lib.robot.variables import (is_list_var, is_var, DictVariableTableValue,
VariableTableValue, VariableSplitter,
variable_not_found)
from robotide.lib.robot.version import get_version
if JYTHON:
from java.lang import String, Number
# TODO: The name of this decorator should be changed. It is used for avoiding
# arguments to be resolved by many other keywords than run keyword variants.
# Should also consider:
# - Exposing this functionality to external libraries. Would require doc
# enhancements and clean way to expose variables to make resolving them
# based on needs easier.
# - Removing the functionality that run keyword variants can be overridded
# by custom keywords without a warning.
def run_keyword_variant(resolve):
def decorator(method):
RUN_KW_REGISTER.register_run_keyword('BuiltIn', method.__name__, resolve)
return method
return decorator
class _BuiltInBase(object):
@property
def _context(self):
if EXECUTION_CONTEXTS.current is None:
raise RobotNotRunningError('Cannot access execution context')
return EXECUTION_CONTEXTS.current
@property
def _namespace(self):
return self._context.namespace
def _get_namespace(self, top=False):
ctx = EXECUTION_CONTEXTS.top if top else EXECUTION_CONTEXTS.current
return ctx.namespace
@property
def _variables(self):
return self._namespace.variables
def _matches(self, string, pattern):
# Must use this instead of fnmatch when string may contain newlines.
matcher = Matcher(pattern, caseless=False, spaceless=False)
return matcher.match(string)
def _is_true(self, condition):
if is_string(condition):
condition = self.evaluate(condition, modules='os,sys')
return bool(condition)
def _log_types(self, *args):
msg = ["Argument types are:"] + [self._get_type(a) for a in args]
self.log('\n'.join(msg), 'DEBUG')
def _get_type(self, arg):
# In IronPython type(u'x') is str. We want to report unicode anyway.
if is_unicode(arg):
return "<type 'unicode'>"
return str(type(arg))
class _Converter(_BuiltInBase):
def convert_to_integer(self, item, base=None):
"""Converts the given item to an integer number.
If the given item is a string, it is by default expected to be an
integer in base 10. There are two ways to convert from other bases:
- Give base explicitly to the keyword as ``base`` argument.
- Prefix the given string with the base so that ``0b`` means binary
(base 2), ``0o`` means octal (base 8), and ``0x`` means hex (base 16).
The prefix is considered only when ``base`` argument is not given and
may itself be prefixed with a plus or minus sign.
The syntax is case-insensitive and possible spaces are ignored.
Examples:
| ${result} = | Convert To Integer | 100 | | # Result is 100 |
| ${result} = | Convert To Integer | FF AA | 16 | # Result is 65450 |
| ${result} = | Convert To Integer | 100 | 8 | # Result is 64 |
| ${result} = | Convert To Integer | -100 | 2 | # Result is -4 |
| ${result} = | Convert To Integer | 0b100 | | # Result is 4 |
| ${result} = | Convert To Integer | -0x100 | | # Result is -256 |
See also `Convert To Number`, `Convert To Binary`, `Convert To Octal`,
`Convert To Hex`, and `Convert To Bytes`.
"""
self._log_types(item)
return self._convert_to_integer(item, base)
def _convert_to_integer(self, orig, base=None):
try:
item = self._handle_java_numbers(orig)
item, base = self._get_base(item, base)
if base:
return int(item, self._convert_to_integer(base))
return int(item)
except:
raise RuntimeError("'%s' cannot be converted to an integer: %s"
% (orig, get_error_message()))
def _handle_java_numbers(self, item):
if not JYTHON:
return item
if isinstance(item, String):
return unic(item)
if isinstance(item, Number):
return item.doubleValue()
return item
def _get_base(self, item, base):
if not is_string(item):
return item, base
item = normalize(item)
if item.startswith(('-', '+')):
sign = item[0]
item = item[1:]
else:
sign = ''
bases = {'0b': 2, '0o': 8, '0x': 16}
if base or not item.startswith(tuple(bases)):
return sign+item, base
return sign+item[2:], bases[item[:2]]
def convert_to_binary(self, item, base=None, prefix=None, length=None):
"""Converts the given item to a binary string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to a binary number (base 2) represented as a
string such as ``1011``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Binary | 10 | | | # Result is 1010 |
| ${result} = | Convert To Binary | F | base=16 | prefix=0b | # Result is 0b1111 |
| ${result} = | Convert To Binary | -2 | prefix=B | length=4 | # Result is -B0010 |
See also `Convert To Integer`, `Convert To Octal` and `Convert To Hex`.
"""
return self._convert_to_bin_oct_hex(bin, item, base, prefix, length)
def convert_to_octal(self, item, base=None, prefix=None, length=None):
"""Converts the given item to an octal string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to an octal number (base 8) represented as a
string such as ``775``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Octal | 10 | | | # Result is 12 |
| ${result} = | Convert To Octal | -F | base=16 | prefix=0 | # Result is -017 |
| ${result} = | Convert To Octal | 16 | prefix=oct | length=4 | # Result is oct0020 |
See also `Convert To Integer`, `Convert To Binary` and `Convert To Hex`.
"""
return self._convert_to_bin_oct_hex(oct, item, base, prefix, length)
def convert_to_hex(self, item, base=None, prefix=None, length=None,
lowercase=False):
"""Converts the given item to a hexadecimal string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to a hexadecimal number (base 16) represented as
a string such as ``FF0A``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
By default the value is returned as an upper case string, but the
``lowercase`` argument a true value (see `Boolean arguments`) turns
the value (but not the given prefix) to lower case.
Examples:
| ${result} = | Convert To Hex | 255 | | | # Result is FF |
| ${result} = | Convert To Hex | -10 | prefix=0x | length=2 | # Result is -0x0A |
| ${result} = | Convert To Hex | 255 | prefix=X | lowercase=yes | # Result is Xff |
See also `Convert To Integer`, `Convert To Binary` and `Convert To Octal`.
"""
return self._convert_to_bin_oct_hex(hex, item, base, prefix, length,
lowercase)
def _convert_to_bin_oct_hex(self, method, item, base, prefix, length,
lowercase=False):
self._log_types(item)
ret = method(self._convert_to_integer(item, base)).upper().rstrip('L')
prefix = prefix or ''
if ret[0] == '-':
prefix = '-' + prefix
ret = ret[1:]
if len(ret) > 1: # oct(0) -> '0' (i.e. has no prefix)
prefix_length = {bin: 2, oct: 1, hex: 2}[method]
ret = ret[prefix_length:]
if length:
ret = ret.rjust(self._convert_to_integer(length), '0')
if is_truthy(lowercase):
ret = ret.lower()
return prefix + ret
def convert_to_number(self, item, precision=None):
"""Converts the given item to a floating point number.
If the optional ``precision`` is positive or zero, the returned number
is rounded to that number of decimal digits. Negative precision means
that the number is rounded to the closest multiple of 10 to the power
of the absolute precision.
Examples:
| ${result} = | Convert To Number | 42.512 | | # Result is 42.512 |
| ${result} = | Convert To Number | 42.512 | 1 | # Result is 42.5 |
| ${result} = | Convert To Number | 42.512 | 0 | # Result is 43.0 |
| ${result} = | Convert To Number | 42.512 | -1 | # Result is 40.0 |
Notice that machines generally cannot store floating point numbers
accurately. This may cause surprises with these numbers in general
and also when they are rounded. For more information see, for example,
these resources:
- http://docs.python.org/2/tutorial/floatingpoint.html
- http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition
If you need an integer number, use `Convert To Integer` instead.
"""
self._log_types(item)
return self._convert_to_number(item, precision)
def _convert_to_number(self, item, precision=None):
number = self._convert_to_number_without_precision(item)
if precision:
number = round(number, self._convert_to_integer(precision))
return number
def _convert_to_number_without_precision(self, item):
try:
if JYTHON:
item = self._handle_java_numbers(item)
return float(item)
except:
error = get_error_message()
try:
return float(self._convert_to_integer(item))
except RuntimeError:
raise RuntimeError("'%s' cannot be converted to a floating "
"point number: %s" % (item, error))
def convert_to_string(self, item):
"""Converts the given item to a Unicode string.
Uses ``__unicode__`` or ``__str__`` method with Python objects and
``toString`` with Java objects.
Use `Encode String To Bytes` and `Decode Bytes To String` keywords
in ``String`` library if you need to convert between Unicode and byte
strings using different encodings. Use `Convert To Bytes` if you just
want to create byte strings.
"""
self._log_types(item)
return self._convert_to_string(item)
def _convert_to_string(self, item):
return unic(item)
def convert_to_boolean(self, item):
"""Converts the given item to Boolean true or false.
Handles strings ``True`` and ``False`` (case-insensitive) as expected,
otherwise returns item's
[http://docs.python.org/2/library/stdtypes.html#truth|truth value]
using Python's ``bool()`` method.
"""
self._log_types(item)
if is_string(item):
if item.upper() == 'TRUE':
return True
if item.upper() == 'FALSE':
return False
return bool(item)
def convert_to_bytes(self, input, input_type='text'):
u"""Converts the given ``input`` to bytes according to the ``input_type``.
Valid input types are listed below:
- ``text:`` Converts text to bytes character by character. All
characters with ordinal below 256 can be used and are converted to
bytes with same values. Many characters are easiest to represent
using escapes like ``\\x00`` or ``\\xff``.
- ``int:`` Converts integers separated by spaces to bytes. Similarly as
with `Convert To Integer`, it is possible to use binary, octal, or
hex values by prefixing the values with ``0b``, ``0o``, or ``0x``,
respectively.
- ``hex:`` Converts hexadecimal values to bytes. Single byte is always
two characters long (e.g. ``01`` or ``FF``). Spaces are ignored and
can be used freely as a visual separator.
- ``bin:`` Converts binary values to bytes. Single byte is always eight
characters long (e.g. ``00001010``). Spaces are ignored and can be
used freely as a visual separator.
In addition to giving the input as a string, it is possible to use
lists or other iterables containing individual characters or numbers.
In that case numbers do not need to be padded to certain length and
they cannot contain extra spaces.
Examples (last column shows returned bytes):
| ${bytes} = | Convert To Bytes | hyv\xe4 | | # hyv\\xe4 |
| ${bytes} = | Convert To Bytes | \\xff\\x07 | | # \\xff\\x07 |
| ${bytes} = | Convert To Bytes | 82 70 | int | # RF |
| ${bytes} = | Convert To Bytes | 0b10 0x10 | int | # \\x02\\x10 |
| ${bytes} = | Convert To Bytes | ff 00 07 | hex | # \\xff\\x00\\x07 |
| ${bytes} = | Convert To Bytes | 5246212121 | hex | # RF!!! |
| ${bytes} = | Convert To Bytes | 0000 1000 | bin | # \\x08 |
| ${input} = | Create List | 1 | 2 | 12 |
| ${bytes} = | Convert To Bytes | ${input} | int | # \\x01\\x02\\x0c |
| ${bytes} = | Convert To Bytes | ${input} | hex | # \\x01\\x02\\x12 |
Use `Encode String To Bytes` in ``String`` library if you need to
convert text to bytes using a certain encoding.
New in Robot Framework 2.8.2.
"""
try:
try:
ordinals = getattr(self, '_get_ordinals_from_%s' % input_type)
except AttributeError:
raise RuntimeError("Invalid input type '%s'." % input_type)
return ''.join(chr(o) for o in ordinals(input))
except:
raise RuntimeError("Creating bytes failed: %s" % get_error_message())
def _get_ordinals_from_text(self, input):
for char in input:
yield self._test_ordinal(ord(char), char, 'Character')
def _test_ordinal(self, ordinal, original, type):
if 0 <= ordinal <= 255:
return ordinal
raise RuntimeError("%s '%s' cannot be represented as a byte."
% (type, original))
def _get_ordinals_from_int(self, input):
if is_string(input):
input = input.split()
elif is_integer(input):
input = [input]
for integer in input:
ordinal = self._convert_to_integer(integer)
yield self._test_ordinal(ordinal, integer, 'Integer')
def _get_ordinals_from_hex(self, input):
for token in self._input_to_tokens(input, length=2):
ordinal = self._convert_to_integer(token, base=16)
yield self._test_ordinal(ordinal, token, 'Hex value')
def _get_ordinals_from_bin(self, input):
for token in self._input_to_tokens(input, length=8):
ordinal = self._convert_to_integer(token, base=2)
yield self._test_ordinal(ordinal, token, 'Binary value')
def _input_to_tokens(self, input, length):
if not is_string(input):
return input
input = ''.join(input.split())
if len(input) % length != 0:
raise RuntimeError('Expected input to be multiple of %d.' % length)
return (input[i:i+length] for i in xrange(0, len(input), length))
def create_list(self, *items):
"""Returns a list containing given items.
The returned list can be assigned both to ``${scalar}`` and ``@{list}``
variables.
Examples:
| @{list} = | Create List | a | b | c |
| ${scalar} = | Create List | a | b | c |
| ${ints} = | Create List | ${1} | ${2} | ${3} |
"""
return list(items)
@run_keyword_variant(resolve=0)
def create_dictionary(self, *items):
"""Creates and returns a dictionary based on given items.
Items are given using ``key=value`` syntax same way as ``&{dictionary}``
variables are created in the Variable table. Both keys and values
can contain variables, and possible equal sign in key can be escaped
with a backslash like ``escaped\\=key=value``. It is also possible to
get items from existing dictionaries by simply using them like
``&{dict}``.
If same key is used multiple times, the last value has precedence.
The returned dictionary is ordered, and values with strings as keys
can also be accessed using convenient dot-access syntax like
``${dict.key}``.
Examples:
| &{dict} = | Create Dictionary | key=value | foo=bar |
| Should Be True | ${dict} == {'key': 'value', 'foo': 'bar'} |
| &{dict} = | Create Dictionary | ${1}=${2} | &{dict} | foo=new |
| Should Be True | ${dict} == {1: 2, 'key': 'value', 'foo': 'new'} |
| Should Be Equal | ${dict.key} | value |
This keyword was changed in Robot Framework 2.9 in many ways:
- Moved from ``Collections`` library to ``BuiltIn``.
- Support also non-string keys in ``key=value`` syntax.
- Deprecated old syntax to give keys and values separately.
- Returned dictionary is ordered and dot-accessible.
"""
separate, combined = self._split_dict_items(items)
if separate:
self.log("Giving keys and values separately to 'Create Dictionary' "
"keyword is deprecated. Use 'key=value' syntax instead.",
level='WARN')
separate = self._format_separate_dict_items(separate)
combined = DictVariableTableValue(combined).resolve(self._variables)
result = DotDict(separate)
result.update(combined)
return result
def _split_dict_items(self, items):
separate = []
for item in items:
name, value = split_from_equals(item)
if value is not None or VariableSplitter(item).is_dict_variable():
break
separate.append(item)
return separate, items[len(separate):]
def _format_separate_dict_items(self, separate):
separate = self._variables.replace_list(separate)
if len(separate) % 2 != 0:
raise DataError('Expected even number of keys and values, got %d.'
% len(separate))
return [separate[i:i+2] for i in range(0, len(separate), 2)]
class _Verify(_BuiltInBase):
def _set_and_remove_tags(self, tags):
set_tags = [tag for tag in tags if not tag.startswith('-')]
remove_tags = [tag[1:] for tag in tags if tag.startswith('-')]
if remove_tags:
self.remove_tags(*remove_tags)
if set_tags:
self.set_tags(*set_tags)
def fail(self, msg=None, *tags):
"""Fails the test with the given message and optionally alters its tags.
The error message is specified using the ``msg`` argument.
It is possible to use HTML in the given error message, similarly
as with any other keyword accepting an error message, by prefixing
the error with ``*HTML*``.
It is possible to modify tags of the current test case by passing tags
after the message. Tags starting with a hyphen (e.g. ``-regression``)
are removed and others added. Tags are modified using `Set Tags` and
`Remove Tags` internally, and the semantics setting and removing them
are the same as with these keywords.
Examples:
| Fail | Test not ready | | | # Fails with the given message. |
| Fail | *HTML*<b>Test not ready</b> | | | # Fails using HTML in the message. |
| Fail | Test not ready | not-ready | | # Fails and adds 'not-ready' tag. |
| Fail | OS not supported | -regression | | # Removes tag 'regression'. |
| Fail | My message | tag | -t* | # Removes all tags starting with 't' except the newly added 'tag'. |
See `Fatal Error` if you need to stop the whole test execution.
Support for modifying tags was added in Robot Framework 2.7.4 and
HTML message support in 2.8.
"""
self._set_and_remove_tags(tags)
raise AssertionError(msg) if msg else AssertionError()
def fatal_error(self, msg=None):
"""Stops the whole test execution.
The test or suite where this keyword is used fails with the provided
message, and subsequent tests fail with a canned message.
Possible teardowns will nevertheless be executed.
See `Fail` if you only want to stop one test case unconditionally.
"""
error = AssertionError(msg) if msg else AssertionError()
error.ROBOT_EXIT_ON_FAILURE = True
raise error
def should_not_be_true(self, condition, msg=None):
"""Fails if the given condition is true.
See `Should Be True` for details about how ``condition`` is evaluated
and how ``msg`` can be used to override the default error message.
"""
if not msg:
msg = "'%s' should not be true." % condition
asserts.fail_if(self._is_true(condition), msg)
def should_be_true(self, condition, msg=None):
"""Fails if the given condition is not true.
If ``condition`` is a string (e.g. ``${rc} < 10``), it is evaluated as
a Python expression as explained in `Evaluating expressions` and the
keyword status is decided based on the result. If a non-string item is
given, the status is got directly from its
[http://docs.python.org/2/library/stdtypes.html#truth|truth value].
The default error message (``<condition> should be true``) is not very
informative, but it can be overridden with the ``msg`` argument.
Examples:
| Should Be True | ${rc} < 10 |
| Should Be True | '${status}' == 'PASS' | # Strings must be quoted |
| Should Be True | ${number} | # Passes if ${number} is not zero |
| Should Be True | ${list} | # Passes if ${list} is not empty |
Variables used like ``${variable}``, as in the examples above, are
replaced in the expression before evaluation. Variables are also
available in the evaluation namespace and can be accessed using special
syntax ``$variable``. This is a new feature in Robot Framework 2.9
and it is explained more thoroughly in `Evaluating expressions`.
Examples:
| Should Be True | $rc < 10 |
| Should Be True | $status == 'PASS' | # Expected string must be quoted |
Starting from Robot Framework 2.8, `Should Be True` automatically
imports Python's [http://docs.python.org/2/library/os.html|os] and
[http://docs.python.org/2/library/sys.html|sys] modules that contain
several useful attributes:
| Should Be True | os.linesep == '\\n' | # Unixy |
| Should Be True | os.linesep == '\\r\\n' | # Windows |
| Should Be True | sys.platform == 'darwin' | # OS X |
| Should Be True | sys.platform.startswith('java') | # Jython |
"""
if not msg:
msg = "'%s' should be true." % condition
asserts.fail_unless(self._is_true(condition), msg)
def should_be_equal(self, first, second, msg=None, values=True):
"""Fails if the given objects are unequal.
Optional ``msg`` and ``values`` arguments specify how to construct
the error message if this keyword fails:
- If ``msg`` is not given, the error message is ``<first> != <second>``.
- If ``msg`` is given and ``values`` gets a true value, the error
message is ``<msg>: <first> != <second>``.
- If ``msg`` is given and ``values`` gets a false value, the error
message is simply ``<msg>``.
``values`` is true by default, but can be turned to false by using,
for example, string ``false`` or ``no values``. See `Boolean arguments`
section for more details.
"""
self._log_types(first, second)
self._should_be_equal(first, second, msg, values)
def _should_be_equal(self, first, second, msg, values):
asserts.fail_unless_equal(first, second, msg,
self._include_values(values))
def _include_values(self, values):
return is_truthy(values) and str(values).upper() != 'NO VALUES'
def should_not_be_equal(self, first, second, msg=None, values=True):
"""Fails if the given objects are equal.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types(first, second)
self._should_not_be_equal(first, second, msg, values)
def _should_not_be_equal(self, first, second, msg, values):
asserts.fail_if_equal(first, second, msg, self._include_values(values))
def should_not_be_equal_as_integers(self, first, second, msg=None,
values=True, base=None):
"""Fails if objects are equal after converting them to integers.
See `Convert To Integer` for information how to convert integers from
other bases than 10 using ``base`` argument or ``0b/0o/0x`` prefixes.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
See `Should Be Equal As Integers` for some usage examples.
"""
self._log_types(first, second)
self._should_not_be_equal(self._convert_to_integer(first, base),
self._convert_to_integer(second, base),
msg, values)
def should_be_equal_as_integers(self, first, second, msg=None, values=True,
base=None):
"""Fails if objects are unequal after converting them to integers.
See `Convert To Integer` for information how to convert integers from
other bases than 10 using ``base`` argument or ``0b/0o/0x`` prefixes.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
Examples:
| Should Be Equal As Integers | 42 | ${42} | Error message |
| Should Be Equal As Integers | ABCD | abcd | base=16 |
| Should Be Equal As Integers | 0b1011 | 11 |
"""
self._log_types(first, second)
self._should_be_equal(self._convert_to_integer(first, base),
self._convert_to_integer(second, base),
msg, values)
def should_not_be_equal_as_numbers(self, first, second, msg=None,
values=True, precision=6):
"""Fails if objects are equal after converting them to real numbers.
The conversion is done with `Convert To Number` keyword using the
given ``precision``.
See `Should Be Equal As Numbers` for examples on how to use
``precision`` and why it does not always work as expected. See also
`Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types(first, second)
first = self._convert_to_number(first, precision)
second = self._convert_to_number(second, precision)
self._should_not_be_equal(first, second, msg, values)
def should_be_equal_as_numbers(self, first, second, msg=None, values=True,
precision=6):
"""Fails if objects are unequal after converting them to real numbers.
The conversion is done with `Convert To Number` keyword using the
given ``precision``.
Examples:
| Should Be Equal As Numbers | ${x} | 1.1 | | # Passes if ${x} is 1.1 |
| Should Be Equal As Numbers | 1.123 | 1.1 | precision=1 | # Passes |
| Should Be Equal As Numbers | 1.123 | 1.4 | precision=0 | # Passes |
| Should Be Equal As Numbers | 112.3 | 75 | precision=-2 | # Passes |
As discussed in the documentation of `Convert To Number`, machines
generally cannot store floating point numbers accurately. Because of
this limitation, comparing floats for equality is problematic and
a correct approach to use depends on the context. This keyword uses
a very naive approach of rounding the numbers before comparing them,
which is both prone to rounding errors and does not work very well if
numbers are really big or small. For more information about comparing
floats, and ideas on how to implement your own context specific
comparison algorithm, see
http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/.
See `Should Not Be Equal As Numbers` for a negative version of this
keyword and `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
"""
self._log_types(first, second)
first = self._convert_to_number(first, precision)
second = self._convert_to_number(second, precision)
self._should_be_equal(first, second, msg, values)
def should_not_be_equal_as_strings(self, first, second, msg=None, values=True):
"""Fails if objects are equal after converting them to strings.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types(first, second)
first, second = [self._convert_to_string(i) for i in first, second]
self._should_not_be_equal(first, second, msg, values)
def should_be_equal_as_strings(self, first, second, msg=None, values=True):
"""Fails if objects are unequal after converting them to strings.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types(first, second)
first, second = [self._convert_to_string(i) for i in first, second]
self._should_be_equal(first, second, msg, values)
def should_not_start_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` starts with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(str1, str2, msg, values, 'starts with')
asserts.fail_if(str1.startswith(str2), msg)
def should_start_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` does not start with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(str1, str2, msg, values, 'does not start with')
asserts.fail_unless(str1.startswith(str2), msg)
def should_not_end_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` ends with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(str1, str2, msg, values, 'ends with')
asserts.fail_if(str1.endswith(str2), msg)
def should_end_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` does not end with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(str1, str2, msg, values, 'does not end with')
asserts.fail_unless(str1.endswith(str2), msg)
def should_not_contain(self, item1, item2, msg=None, values=True):
"""Fails if ``item1`` contains ``item2`` one or more times.
Works with strings, lists, and anything that supports Python's ``in``
operator. See `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
Examples:
| Should Not Contain | ${output} | FAILED |
| Should Not Contain | ${some_list} | value |
"""
msg = self._get_string_msg(item1, item2, msg, values, 'contains')
asserts.fail_if(item2 in item1, msg)
def should_contain(self, item1, item2, msg=None, values=True):
"""Fails if ``item1`` does not contain ``item2`` one or more times.
Works with strings, lists, and anything that supports Python's ``in``
operator. See `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
Examples:
| Should Contain | ${output} | PASS |
| Should Contain | ${some_list} | value |
"""
msg = self._get_string_msg(item1, item2, msg, values, 'does not contain')
asserts.fail_unless(item2 in item1, msg)
def should_contain_x_times(self, item1, item2, count, msg=None):
"""Fails if ``item1`` does not contain ``item2`` ``count`` times.
Works with strings, lists and all objects that `Get Count` works
with. The default error message can be overridden with ``msg`` and
the actual count is always logged.
Examples:
| Should Contain X Times | ${output} | hello | 2 |
| Should Contain X Times | ${some list} | value | 3 |
"""
count = self._convert_to_integer(count)
x = self.get_count(item1, item2)
if not msg:
msg = "'%s' contains '%s' %d time%s, not %d time%s." \
% (unic(item1), unic(item2), x, s(x), count, s(count))
self.should_be_equal_as_integers(x, count, msg, values=False)
def get_count(self, item1, item2):
"""Returns and logs how many times ``item2`` is found from ``item1``.
This keyword works with Python strings and lists and all objects
that either have ``count`` method or can be converted to Python lists.
Example:
| ${count} = | Get Count | ${some item} | interesting value |
| Should Be True | 5 < ${count} < 10 |
"""
if not hasattr(item1, 'count'):
try:
item1 = list(item1)
except:
raise RuntimeError("Converting '%s' to list failed: %s"
% (item1, get_error_message()))
count = item1.count(item2)
self.log('Item found from the first item %d time%s' % (count, s(count)))
return count
def should_not_match(self, string, pattern, msg=None, values=True):
"""Fails if the given ``string`` matches the given ``pattern``.
Pattern matching is similar as matching files in a shell, and it is
always case-sensitive. In the pattern ``*`` matches to anything and
``?`` matches to any single character.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(string, pattern, msg, values, 'matches')
asserts.fail_if(self._matches(string, pattern), msg)
def should_match(self, string, pattern, msg=None, values=True):
"""Fails unless the given ``string`` matches the given ``pattern``.
Pattern matching is similar as matching files in a shell, and it is
always case-sensitive. In the pattern, ``*`` matches to anything and
``?`` matches to any single character.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
msg = self._get_string_msg(string, pattern, msg, values,
'does not match')
asserts.fail_unless(self._matches(string, pattern), msg)
def should_match_regexp(self, string, pattern, msg=None, values=True):
"""Fails if ``string`` does not match ``pattern`` as a regular expression.
Regular expression check is implemented using the Python
[http://docs.python.org/2/library/re.html|re module]. Python's regular
expression syntax is derived from Perl, and it is thus also very
similar to the syntax used, for example, in Java, Ruby and .NET.
Things to note about the regexp syntax in Robot Framework test data:
1) Backslash is an escape character in the test data, and possible
backslashes in the pattern must thus be escaped with another backslash
(e.g. ``\\\\d\\\\w+``).
2) Strings that may contain special characters, but should be handled
as literal strings, can be escaped with the `Regexp Escape` keyword.
3) The given pattern does not need to match the whole string. For
example, the pattern ``ello`` matches the string ``Hello world!``. If
a full match is needed, the ``^`` and ``$`` characters can be used to
denote the beginning and end of the string, respectively. For example,
``^ello$`` only matches the exact string ``ello``.
4) Possible flags altering how the expression is parsed (e.g.
``re.IGNORECASE``, ``re.MULTILINE``) can be set by prefixing the
pattern with the ``(?iLmsux)`` group like ``(?im)pattern``. The
available flags are ``i`` (case-insensitive), ``m`` (multiline mode),
``s`` (dotall mode), ``x`` (verbose), ``u`` (Unicode dependent) and
``L`` (locale dependent).
If this keyword passes, it returns the portion of the string that
matched the pattern. Additionally, the possible captured groups are
returned.
See the `Should Be Equal` keyword for an explanation on how to override
the default error message with the ``msg`` and ``values`` arguments.
Examples:
| Should Match Regexp | ${output} | \\\\d{6} | # Output contains six numbers |
| Should Match Regexp | ${output} | ^\\\\d{6}$ | # Six numbers and nothing more |
| ${ret} = | Should Match Regexp | Foo: 42 | (?i)foo: \\\\d+ |
| ${match} | ${group1} | ${group2} = |
| ... | Should Match Regexp | Bar: 43 | (Foo|Bar): (\\\\d+) |
=>
| ${ret} = 'Foo: 42'
| ${match} = 'Bar: 43'
| ${group1} = 'Bar'
| ${group2} = '43'
"""
msg = self._get_string_msg(string, pattern, msg, values, 'does not match')
res = re.search(pattern, string)
asserts.fail_if_none(res, msg, values=False)
match = res.group(0)
groups = res.groups()
if groups:
return [match] + list(groups)
return match
def should_not_match_regexp(self, string, pattern, msg=None, values=True):
"""Fails if ``string`` matches ``pattern`` as a regular expression.
See `Should Match Regexp` for more information about arguments.
"""
msg = self._get_string_msg(string, pattern, msg, values, 'matches')
asserts.fail_unless_none(re.search(pattern, string), msg, values=False)
def get_length(self, item):
"""Returns and logs the length of the given item as an integer.
The item can be anything that has a length, for example, a string,
a list, or a mapping. The keyword first tries to get the length with
the Python function ``len``, which calls the item's ``__len__`` method
internally. If that fails, the keyword tries to call the item's
possible ``length`` and ``size`` methods directly. The final attempt is
trying to get the value of the item's ``length`` attribute. If all
these attempts are unsuccessful, the keyword fails.
Examples:
| ${length} = | Get Length | Hello, world! | |
| Should Be Equal As Integers | ${length} | 13 |
| @{list} = | Create List | Hello, | world! |
| ${length} = | Get Length | ${list} | |
| Should Be Equal As Integers | ${length} | 2 |
See also `Length Should Be`, `Should Be Empty` and `Should Not Be
Empty`.
"""
length = self._get_length(item)
self.log('Length is %d' % length)
return length
def _get_length(self, item):
try:
return len(item)
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.length()
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.size()
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.length
except RERAISED_EXCEPTIONS:
raise
except:
raise RuntimeError("Could not get length of '%s'." % item)
def length_should_be(self, item, length, msg=None):
"""Verifies that the length of the given item is correct.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
length = self._convert_to_integer(length)
actual = self.get_length(item)
if actual != length:
raise AssertionError(msg or "Length of '%s' should be %d but is %d."
% (item, length, actual))
def should_be_empty(self, item, msg=None):
"""Verifies that the given item is empty.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
if self.get_length(item) > 0:
raise AssertionError(msg or "'%s' should be empty." % item)
def should_not_be_empty(self, item, msg=None):
"""Verifies that the given item is not empty.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
if self.get_length(item) == 0:
raise AssertionError(msg or "'%s' should not be empty." % item)
def _get_string_msg(self, str1, str2, msg, values, delim):
default = "'%s' %s '%s'" % (unic(str1), delim, unic(str2))
if not msg:
msg = default
elif self._include_values(values):
msg = '%s: %s' % (msg, default)
return msg
class _Variables(_BuiltInBase):
def get_variables(self, no_decoration=False):
"""Returns a dictionary containing all variables in the current scope.
Variables are returned as a special dictionary that allows accessing
variables in space, case, and underscore insensitive manner similarly
as accessing variables in the test data. This dictionary supports all
same operations as normal Python dictionaries and, for example,
Collections library can be used to access or modify it. Modifying the
returned dictionary has no effect on the variables available in the
current scope.
By default variables are returned with ``${}``, ``@{}`` or ``&{}``
decoration based on variable types. Giving a true value (see `Boolean
arguments`) to the optional argument ``no_decoration`` will return
the variables without the decoration. This option is new in Robot
Framework 2.9.
Example:
| ${example_variable} = | Set Variable | example value |
| ${variables} = | Get Variables | |
| Dictionary Should Contain Key | ${variables} | \\${example_variable} |
| Dictionary Should Contain Key | ${variables} | \\${ExampleVariable} |
| Set To Dictionary | ${variables} | \\${name} | value |
| Variable Should Not Exist | \\${name} | | |
| ${no decoration} = | Get Variables | no_decoration=Yes |
| Dictionary Should Contain Key | ${no decoration} | example_variable |
Note: Prior to Robot Framework 2.7.4 variables were returned as
a custom object that did not support all dictionary methods.
"""
return self._variables.as_dict(decoration=is_falsy(no_decoration))
@run_keyword_variant(resolve=0)
def get_variable_value(self, name, default=None):
"""Returns variable value or ``default`` if the variable does not exist.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
Examples:
| ${x} = | Get Variable Value | ${a} | default |
| ${y} = | Get Variable Value | ${a} | ${b} |
| ${z} = | Get Variable Value | ${z} | |
=>
| ${x} gets value of ${a} if ${a} exists and string 'default' otherwise
| ${y} gets value of ${a} if ${a} exists and value of ${b} otherwise
| ${z} is set to Python None if it does not exist previously
See `Set Variable If` for another keyword to set variables dynamically.
"""
try:
return self._variables[self._get_var_name(name)]
except DataError:
return self._variables.replace_scalar(default)
def log_variables(self, level='INFO'):
"""Logs all variables in the current scope with given log level."""
variables = self.get_variables()
for name in sorted(variables, key=lambda s: s[2:-1].lower()):
msg = format_assign_message(name, variables[name], cut_long=False)
self.log(msg, level)
@run_keyword_variant(resolve=0)
def variable_should_exist(self, name, msg=None):
"""Fails unless the given variable exists within the current scope.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Not Exist` and `Keyword Should Exist`.
"""
name = self._get_var_name(name)
msg = self._variables.replace_string(msg) if msg \
else "Variable %s does not exist." % name
try:
self._variables[name]
except DataError:
raise AssertionError(msg)
@run_keyword_variant(resolve=0)
def variable_should_not_exist(self, name, msg=None):
"""Fails if the given variable exists within the current scope.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Exist` and `Keyword Should Exist`.
"""
name = self._get_var_name(name)
msg = self._variables.replace_string(msg) if msg \
else "Variable %s exists." % name
try:
self._variables[name]
except DataError:
pass
else:
raise AssertionError(msg)
def replace_variables(self, text):
"""Replaces variables in the given text with their current values.
If the text contains undefined variables, this keyword fails.
If the given ``text`` contains only a single variable, its value is
returned as-is and it can be any object. Otherwise this keyword
always returns a string.
Example:
The file ``template.txt`` contains ``Hello ${NAME}!`` and variable
``${NAME}`` has the value ``Robot``.
| ${template} = | Get File | ${CURDIR}/template.txt |
| ${message} = | Replace Variables | ${template} |
| Should Be Equal | ${message} | Hello Robot! |
"""
return self._variables.replace_scalar(text)
def set_variable(self, *values):
"""Returns the given values which can then be assigned to a variables.
This keyword is mainly used for setting scalar variables.
Additionally it can be used for converting a scalar variable
containing a list to a list variable or to multiple scalar variables.
It is recommended to use `Create List` when creating new lists.
Examples:
| ${hi} = | Set Variable | Hello, world! |
| ${hi2} = | Set Variable | I said: ${hi} |
| ${var1} | ${var2} = | Set Variable | Hello | world |
| @{list} = | Set Variable | ${list with some items} |
| ${item1} | ${item2} = | Set Variable | ${list with 2 items} |
Variables created with this keyword are available only in the
scope where they are created. See `Set Global Variable`,
`Set Test Variable` and `Set Suite Variable` for information on how to
set variables so that they are available also in a larger scope.
"""
if len(values) == 0:
return ''
elif len(values) == 1:
return values[0]
else:
return list(values)
@run_keyword_variant(resolve=0)
def set_test_variable(self, name, *values):
"""Makes a variable available everywhere within the scope of the current test.
Variables set with this keyword are available everywhere within the
scope of the currently executed test case. For example, if you set a
variable in a user keyword, it is available both in the test case level
and also in all other user keywords used in the current test. Other
test cases will not see variables set with this keyword.
See `Set Suite Variable` for more information and examples.
"""
name = self._get_var_name(name)
value = self._get_var_value(name, values)
self._variables.set_test(name, value)
self._log_set_variable(name, value)
@run_keyword_variant(resolve=0)
def set_suite_variable(self, name, *values):
"""Makes a variable available everywhere within the scope of the current suite.
Variables set with this keyword are available everywhere within the
scope of the currently executed test suite. Setting variables with this
keyword thus has the same effect as creating them using the Variable
table in the test data file or importing them from variable files.
Possible child test suites do not see variables set with this keyword
by default. Starting from Robot Framework 2.9, that can be controlled
by using ``children=<option>`` as the last argument. If the specified
``<option>`` is a non-empty string or any other value considered true
in Python, the variable is set also to the child suites. Parent and
sibling suites will never see variables set with this keyword.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format as ``\\${NAME}`` or ``$NAME``.
Variable value can be given using the same syntax as when variables
are created in the Variable table.
If a variable already exists within the new scope, its value will be
overwritten. Otherwise a new variable is created. If a variable already
exists within the current scope, the value can be left empty and the
variable within the new scope gets the value within the current scope.
Examples:
| Set Suite Variable | ${SCALAR} | Hello, world! |
| Set Suite Variable | ${SCALAR} | Hello, world! | children=true |
| Set Suite Variable | @{LIST} | First item | Second item |
| Set Suite Variable | &{DICT} | key=value | foo=bar |
| ${ID} = | Get ID |
| Set Suite Variable | ${ID} |
To override an existing value with an empty value, use built-in
variables ``${EMPTY}``, ``@{EMPTY}`` or ``&{EMPTY}``:
| Set Suite Variable | ${SCALAR} | ${EMPTY} |
| Set Suite Variable | @{LIST} | @{EMPTY} | # New in RF 2.7.4 |
| Set Suite Variable | &{DICT} | &{EMPTY} | # New in RF 2.9 |
*NOTE:* If the variable has value which itself is a variable (escaped
or not), you must always use the escaped format to set the variable:
Example:
| ${NAME} = | Set Variable | \\${var} |
| Set Suite Variable | ${NAME} | value | # Sets variable ${var} |
| Set Suite Variable | \\${NAME} | value | # Sets variable ${NAME} |
This limitation applies also to `Set Test Variable`, `Set Global
Variable`, `Variable Should Exist`, `Variable Should Not Exist` and
`Get Variable Value` keywords.
"""
name = self._get_var_name(name)
if (values and is_string(values[-1]) and
values[-1].startswith('children=')):
children = self._variables.replace_scalar(values[-1][9:])
children = is_truthy(children)
values = values[:-1]
else:
children = False
value = self._get_var_value(name, values)
self._variables.set_suite(name, value, children=children)
self._log_set_variable(name, value)
@run_keyword_variant(resolve=0)
def set_global_variable(self, name, *values):
"""Makes a variable available globally in all tests and suites.
Variables set with this keyword are globally available in all test
cases and suites executed after setting them. Setting variables with
this keyword thus has the same effect as creating from the command line
using the options ``--variable`` or ``--variablefile``. Because this
keyword can change variables everywhere, it should be used with care.
See `Set Suite Variable` for more information and examples.
"""
name = self._get_var_name(name)
value = self._get_var_value(name, values)
self._variables.set_global(name, value)
self._log_set_variable(name, value)
# Helpers
def _get_var_name(self, orig):
name = self._resolve_possible_variable(orig)
try:
return self._unescape_variable_if_needed(name)
except ValueError:
raise RuntimeError("Invalid variable syntax '%s'." % orig)
def _resolve_possible_variable(self, name):
try:
resolved = self._variables.replace_string(name)
return self._unescape_variable_if_needed(resolved)
except (KeyError, ValueError, DataError):
return name
def _unescape_variable_if_needed(self, name):
if name.startswith('\\'):
name = name[1:]
if len(name) < 2:
raise ValueError
if name[0] in '$@&' and name[1] != '{':
name = '%s{%s}' % (name[0], name[1:])
if is_var(name):
return name
# Support for possible internal variables (issue 397)
name = '%s{%s}' % (name[0], self.replace_variables(name[2:-1]))
if is_var(name):
return name
raise ValueError
def _get_var_value(self, name, values):
if not values:
return self._variables[name]
# TODO: In RF 2.10/3.0 the if branch below can be removed and
# VariableTableValue used with all variables. See issue #1919.
if name[0] == '$':
if len(values) != 1 or VariableSplitter(values[0]).is_list_variable():
raise DataError("Setting list value to scalar variable '%s' "
"is not supported anymore. Create list "
"variable '@%s' instead." % (name, name[1:]))
return self._variables.replace_scalar(values[0])
return VariableTableValue(values, name).resolve(self._variables)
def _log_set_variable(self, name, value):
self.log(format_assign_message(name, value))
class _RunKeyword(_BuiltInBase):
# If you use any of these run keyword variants from another library, you
# should register those keywords with 'register_run_keyword' method. See
# the documentation of that method at the end of this file. There are also
# other run keyword variant keywords in BuiltIn which can also be seen
# at the end of this file.
def run_keyword(self, name, *args):
"""Executes the given keyword with the given arguments.
Because the name of the keyword to execute is given as an argument, it
can be a variable and thus set dynamically, e.g. from a return value of
another keyword or from the command line.
"""
if not is_string(name):
raise RuntimeError('Keyword name must be a string.')
kw = Keyword(name, args=args)
return kw.run(self._context)
def run_keywords(self, *keywords):
"""Executes all the given keywords in a sequence.
This keyword is mainly useful in setups and teardowns when they need
to take care of multiple actions and creating a new higher level user
keyword would be an overkill.
By default all arguments are expected to be keywords to be executed.
Examples:
| Run Keywords | Initialize database | Start servers | Clear logs |
| Run Keywords | ${KW 1} | ${KW 2} |
| Run Keywords | @{KEYWORDS} |
Starting from Robot Framework 2.7.6, keywords can also be run with
arguments using upper case ``AND`` as a separator between keywords.
The keywords are executed so that the first argument is the first
keyword and proceeding arguments until the first ``AND`` are arguments
to it. First argument after the first ``AND`` is the second keyword and
proceeding arguments until the next ``AND`` are its arguments. And so on.
Examples:
| Run Keywords | Initialize database | db1 | AND | Start servers | server1 | server2 |
| Run Keywords | Initialize database | ${DB NAME} | AND | Start servers | @{SERVERS} | AND | Clear logs |
| Run Keywords | ${KW} | AND | @{KW WITH ARGS} |
Notice that the ``AND`` control argument must be used explicitly and
cannot itself come from a variable. If you need to use literal ``AND``
string as argument, you can either use variables or escape it with
a backslash like ``\\AND``.
"""
self._run_keywords(self._split_run_keywords(list(keywords)))
def _run_keywords(self, iterable):
errors = []
for kw, args in iterable:
try:
self.run_keyword(kw, *args)
except ExecutionPassed as err:
err.set_earlier_failures(errors)
raise err
except ExecutionFailed as err:
errors.extend(err.get_errors())
if not err.can_continue(self._context.in_teardown):
break
if errors:
raise ExecutionFailures(errors)
def _split_run_keywords(self, keywords):
if 'AND' not in keywords:
for name in self._variables.replace_list(keywords):
yield name, ()
else:
for name, args in self._split_run_keywords_from_and(keywords):
yield name, args
def _split_run_keywords_from_and(self, keywords):
while 'AND' in keywords:
index = keywords.index('AND')
yield self._resolve_run_keywords_name_and_args(keywords[:index])
keywords = keywords[index+1:]
yield self._resolve_run_keywords_name_and_args(keywords)
def _resolve_run_keywords_name_and_args(self, kw_call):
kw_call = self._variables.replace_list(kw_call, replace_until=1)
if not kw_call:
raise DataError('Incorrect use of AND')
return kw_call[0], kw_call[1:]
def run_keyword_if(self, condition, name, *args):
"""Runs the given keyword with the given arguments, if ``condition`` is true.
The given ``condition`` is evaluated in Python as explained in
`Evaluating expressions`, and ``name`` and ``*args`` have same
semantics as with `Run Keyword`.
Example, a simple if/else construct:
| ${status} | ${value} = | `Run Keyword And Ignore Error` | `My Keyword` |
| `Run Keyword If` | '${status}' == 'PASS' | `Some Action` | arg |
| `Run Keyword Unless` | '${status}' == 'PASS' | `Another Action` |
In this example, only either `Some Action` or `Another Action` is
executed, based on the status of `My Keyword`. Instead of `Run Keyword
And Ignore Error` you can also use `Run Keyword And Return Status`.
Variables used like ``${variable}``, as in the examples above, are
replaced in the expression before evaluation. Variables are also
available in the evaluation namespace and can be accessed using special
syntax ``$variable``. This is a new feature in Robot Framework 2.9
and it is explained more thoroughly in `Evaluating expressions`.
Example:
| `Run Keyword If` | $result is None or $result == 'FAIL' | `Keyword` |
Starting from Robot version 2.7.4, this keyword supports also optional
ELSE and ELSE IF branches. Both of these are defined in ``*args`` and
must use exactly format ``ELSE`` or ``ELSE IF``, respectively. ELSE
branches must contain first the name of the keyword to execute and then
its possible arguments. ELSE IF branches must first contain a condition,
like the first argument to this keyword, and then the keyword to execute
and its possible arguments. It is possible to have ELSE branch after
ELSE IF and to have multiple ELSE IF branches.
Given previous example, if/else construct can also be created like this:
| ${status} | ${value} = | `Run Keyword And Ignore Error` | My Keyword |
| `Run Keyword If` | '${status}' == 'PASS' | `Some Action` | arg | ELSE | `Another Action` |
The return value is the one of the keyword that was executed or None if
no keyword was executed (i.e. if ``condition`` was false). Hence, it is
recommended to use ELSE and/or ELSE IF branches to conditionally assign
return values from keyword to variables (to conditionally assign fixed
values to variables, see `Set Variable If`). This is illustrated by the
example below:
| ${var1} = | `Run Keyword If` | ${rc} == 0 | `Some keyword returning a value` |
| ... | ELSE IF | 0 < ${rc} < 42 | `Another keyword` |
| ... | ELSE IF | ${rc} < 0 | `Another keyword with args` | ${rc} | arg2 |
| ... | ELSE | `Final keyword to handle abnormal cases` | ${rc} |
| ${var2} = | `Run Keyword If` | ${condition} | `Some keyword` |
In this example, ${var2} will be set to None if ${condition} is false.
Notice that ``ELSE`` and ``ELSE IF`` control words must be used
explicitly and thus cannot come from variables. If you need to use
literal ``ELSE`` and ``ELSE IF`` strings as arguments, you can escape
them with a backslash like ``\\ELSE`` and ``\\ELSE IF``.
Starting from Robot Framework 2.8, Python's
[http://docs.python.org/2/library/os.html|os] and
[http://docs.python.org/2/library/sys.html|sys] modules are
automatically imported when evaluating the ``condition``.
Attributes they contain can thus be used in the condition:
| `Run Keyword If` | os.sep == '/' | `Unix Keyword` |
| ... | ELSE IF | sys.platform.startswith('java') | `Jython Keyword` |
| ... | ELSE | `Windows Keyword` |
"""
args, branch = self._split_elif_or_else_branch(args)
if self._is_true(condition):
return self.run_keyword(name, *args)
return branch()
def _split_elif_or_else_branch(self, args):
if 'ELSE IF' in args:
args, branch = self._split_branch(args, 'ELSE IF', 2,
'condition and keyword')
return args, lambda: self.run_keyword_if(*branch)
if 'ELSE' in args:
args, branch = self._split_branch(args, 'ELSE', 1, 'keyword')
return args, lambda: self.run_keyword(*branch)
return args, lambda: None
def _split_branch(self, args, control_word, required, required_error):
index = list(args).index(control_word)
branch = self._variables.replace_list(args[index+1:], required)
if len(branch) < required:
raise DataError('%s requires %s.' % (control_word, required_error))
return args[:index], branch
def run_keyword_unless(self, condition, name, *args):
"""Runs the given keyword with the given arguments, if ``condition`` is false.
See `Run Keyword If` for more information and an example.
"""
if not self._is_true(condition):
return self.run_keyword(name, *args)
def run_keyword_and_ignore_error(self, name, *args):
"""Runs the given keyword with the given arguments and ignores possible error.
This keyword returns two values, so that the first is either string
``PASS`` or ``FAIL``, depending on the status of the executed keyword.
The second value is either the return value of the keyword or the
received error message. See `Run Keyword And Return Status` If you are
only interested in the execution status.
The keyword name and arguments work as in `Run Keyword`. See
`Run Keyword If` for a usage example.
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword. Otherwise this keyword itself never fails.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
return 'PASS', self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
return 'FAIL', unicode(err)
def run_keyword_and_return_status(self, name, *args):
"""Runs the given keyword with given arguments and returns the status as a Boolean value.
This keyword returns Boolean ``True`` if the keyword that is executed
succeeds and ``False`` if it fails. This is useful, for example, in
combination with `Run Keyword If`. If you are interested in the error
message or return value, use `Run Keyword And Ignore Error` instead.
The keyword name and arguments work as in `Run Keyword`.
Example:
| ${passed} = | `Run Keyword And Return Status` | Keyword | args |
| `Run Keyword If` | ${passed} | Another keyword |
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword. Otherwise this keyword itself never fails.
New in Robot Framework 2.7.6.
"""
status, _ = self.run_keyword_and_ignore_error(name, *args)
return status == 'PASS'
def run_keyword_and_continue_on_failure(self, name, *args):
"""Runs the keyword and continues execution even if a failure occurs.
The keyword name and arguments work as with `Run Keyword`.
Example:
| Run Keyword And Continue On Failure | Fail | This is a stupid example |
| Log | This keyword is executed |
The execution is not continued if the failure is caused by invalid syntax,
timeout, or fatal exception.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
return self.run_keyword(name, *args)
except ExecutionFailed as err:
if not err.dont_continue:
err.continue_on_failure = True
raise err
def run_keyword_and_expect_error(self, expected_error, name, *args):
"""Runs the keyword and checks that the expected error occurred.
The expected error must be given in the same format as in
Robot Framework reports. It can be a pattern containing
characters ``?``, which matches to any single character and
``*``, which matches to any number of any characters. ``name`` and
``*args`` have same semantics as with `Run Keyword`.
If the expected error occurs, the error message is returned and it can
be further processed/tested, if needed. If there is no error, or the
error does not match the expected error, this keyword fails.
Examples:
| Run Keyword And Expect Error | My error | Some Keyword | arg1 | arg2 |
| ${msg} = | Run Keyword And Expect Error | * | My KW |
| Should Start With | ${msg} | Once upon a time in |
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
else:
raise AssertionError("Expected error '%s' did not occur."
% expected_error)
if not self._matches(unicode(err), expected_error):
raise AssertionError("Expected error '%s' but got '%s'."
% (expected_error, err))
return unicode(err)
def repeat_keyword(self, times, name, *args):
"""Executes the specified keyword multiple times.
``name`` and ``args`` define the keyword that is executed
similarly as with `Run Keyword`, and ``times`` specifies how many
the keyword should be executed. ``times`` can be given as an
integer or as a string that can be converted to an integer. If it is
a string, it can have postfix ``times`` or ``x`` (case and space
insensitive) to make the expression more explicit.
If ``times`` is zero or negative, the keyword is not executed at
all. This keyword fails immediately if any of the execution
rounds fails.
Examples:
| Repeat Keyword | 5 times | Go to Previous Page |
| Repeat Keyword | ${var} | Some Keyword | arg1 | arg2 |
"""
times = self._get_times_to_repeat(times)
self._run_keywords(self._yield_repeated_keywords(times, name, args))
def _get_times_to_repeat(self, times, require_postfix=False):
times = normalize(str(times))
if times.endswith('times'):
times = times[:-5]
elif times.endswith('x'):
times = times[:-1]
elif require_postfix:
raise ValueError
return self._convert_to_integer(times)
def _yield_repeated_keywords(self, times, name, args):
if times <= 0:
self.log("Keyword '%s' repeated zero times." % name)
for i in xrange(times):
self.log("Repeating keyword, round %d/%d." % (i+1, times))
yield name, args
def wait_until_keyword_succeeds(self, retry, retry_interval, name, *args):
"""Runs the specified keyword and retries if it fails.
``name`` and ``args`` define the keyword that is executed similarly
as with `Run Keyword`. How long to retry running the keyword is
defined using ``retry`` argument either as timeout or count.
``retry_interval`` is the time to wait before trying to run the
keyword again after the previous run has failed.
If ``retry`` is given as timeout, it must be in Robot Framework's
time format (e.g. ``1 minute``, ``2 min 3 s``, ``4.5``) that is
explained in an appendix of Robot Framework User Guide. If it is
given as count, it must have ``times`` or ``x`` postfix (e.g.
``5 times``, ``10 x``). ``retry_interval`` must always be given in
Robot Framework's time format.
If the keyword does not succeed regardless of retries, this keyword
fails. If the executed keyword passes, its return value is returned.
Examples:
| Wait Until Keyword Succeeds | 2 min | 5 sec | My keyword | argument |
| ${result} = | Wait Until Keyword Succeeds | 3x | 200ms | My keyword |
All normal failures are caught by this keyword. Errors caused by
invalid syntax, test or keyword timeouts, or fatal exceptions (caused
e.g. by `Fatal Error`) are not caught.
Running the same keyword multiple times inside this keyword can create
lots of output and considerably increase the size of the generated
output files. Starting from Robot Framework 2.7, it is possible to
remove unnecessary keywords from the outputs using
``--RemoveKeywords WUKS`` command line option.
Support for specifying ``retry`` as a number of times to retry is
a new feature in Robot Framework 2.9.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
maxtime = count = -1
try:
count = self._get_times_to_repeat(retry, require_postfix=True)
except ValueError:
timeout = timestr_to_secs(retry)
maxtime = time.time() + timeout
message = 'for %s' % secs_to_timestr(timeout)
else:
if count <= 0:
raise ValueError('Retry count %d is not positive.' % count)
message = '%d time%s' % (count, s(count))
retry_interval = timestr_to_secs(retry_interval)
while True:
try:
return self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
count -= 1
if time.time() > maxtime > 0 or count == 0:
raise AssertionError("Keyword '%s' failed after retrying "
"%s. The last error was: %s"
% (name, message, err))
self._sleep_in_parts(retry_interval)
def set_variable_if(self, condition, *values):
"""Sets variable based on the given condition.
The basic usage is giving a condition and two values. The
given condition is first evaluated the same way as with the
`Should Be True` keyword. If the condition is true, then the
first value is returned, and otherwise the second value is
returned. The second value can also be omitted, in which case
it has a default value None. This usage is illustrated in the
examples below, where ``${rc}`` is assumed to be zero.
| ${var1} = | Set Variable If | ${rc} == 0 | zero | nonzero |
| ${var2} = | Set Variable If | ${rc} > 0 | value1 | value2 |
| ${var3} = | Set Variable If | ${rc} > 0 | whatever | |
=>
| ${var1} = 'zero'
| ${var2} = 'value2'
| ${var3} = None
It is also possible to have 'else if' support by replacing the
second value with another condition, and having two new values
after it. If the first condition is not true, the second is
evaluated and one of the values after it is returned based on
its truth value. This can be continued by adding more
conditions without a limit.
| ${var} = | Set Variable If | ${rc} == 0 | zero |
| ... | ${rc} > 0 | greater than zero | less then zero |
| |
| ${var} = | Set Variable If |
| ... | ${rc} == 0 | zero |
| ... | ${rc} == 1 | one |
| ... | ${rc} == 2 | two |
| ... | ${rc} > 2 | greater than two |
| ... | ${rc} < 0 | less than zero |
Use `Get Variable Value` if you need to set variables
dynamically based on whether a variable exist or not.
"""
values = self._verify_values_for_set_variable_if(list(values))
if self._is_true(condition):
return self._variables.replace_scalar(values[0])
values = self._verify_values_for_set_variable_if(values[1:], True)
if len(values) == 1:
return self._variables.replace_scalar(values[0])
return self.run_keyword('BuiltIn.Set Variable If', *values[0:])
def _verify_values_for_set_variable_if(self, values, default=False):
if not values:
if default:
return [None]
raise RuntimeError('At least one value is required')
if is_list_var(values[0]):
values[:1] = [escape(item) for item in self._variables[values[0]]]
return self._verify_values_for_set_variable_if(values)
return values
def run_keyword_if_test_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if the test failed.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
Prior to Robot Framework 2.9 failures in test teardown itself were
not detected by this keyword.
"""
test = self._get_test_in_teardown('Run Keyword If Test Failed')
if not test.passed or self._context.failure_in_test_teardown:
return self.run_keyword(name, *args)
def run_keyword_if_test_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if the test passed.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
Prior to Robot Framework 2.9 failures in test teardown itself were
not detected by this keyword.
"""
test = self._get_test_in_teardown('Run Keyword If Test Passed')
if test.passed and not self._context.failure_in_test_teardown:
return self.run_keyword(name, *args)
def run_keyword_if_timeout_occurred(self, name, *args):
"""Runs the given keyword if either a test or a keyword timeout has occurred.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
self._get_test_in_teardown('Run Keyword If Timeout Occurred')
if self._context.timeout_occurred:
return self.run_keyword(name, *args)
def _get_test_in_teardown(self, kwname):
ctx = self._context
if ctx.test and ctx.in_test_teardown:
return ctx.test
raise RuntimeError("Keyword '%s' can only be used in test teardown."
% kwname)
def run_keyword_if_all_critical_tests_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if all critical tests passed.
This keyword can only be used in suite teardown. Trying to use it in
any other place will result in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If '
'All Critical Tests Passed')
if suite.statistics.critical.failed == 0:
return self.run_keyword(name, *args)
def run_keyword_if_any_critical_tests_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if any critical tests failed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If '
'Any Critical Tests Failed')
if suite.statistics.critical.failed > 0:
return self.run_keyword(name, *args)
def run_keyword_if_all_tests_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if all tests passed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If All Tests Passed')
if suite.statistics.all.failed == 0:
return self.run_keyword(name, *args)
def run_keyword_if_any_tests_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if one or more tests failed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If Any Tests Failed')
if suite.statistics.all.failed > 0:
return self.run_keyword(name, *args)
def _get_suite_in_teardown(self, kwname):
if not self._context.in_suite_teardown:
raise RuntimeError("Keyword '%s' can only be used in suite teardown."
% kwname)
return self._context.suite
class _Control(_BuiltInBase):
def continue_for_loop(self):
"""Skips the current for loop iteration and continues from the next.
Skips the remaining keywords in the current for loop iteration and
continues from the next one. Can be used directly in a for loop or
in a keyword that the loop uses.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Run Keyword If | '${var}' == 'CONTINUE' | Continue For Loop |
| | Do Something | ${var} |
See `Continue For Loop If` to conditionally continue a for loop without
using `Run Keyword If` or other wrapper keywords.
New in Robot Framework 2.8.
"""
self.log("Continuing for loop from the next iteration.")
raise ContinueForLoop()
def continue_for_loop_if(self, condition):
"""Skips the current for loop iteration if the ``condition`` is true.
A wrapper for `Continue For Loop` to continue a for loop based on
the given condition. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Continue For Loop If | '${var}' == 'CONTINUE' |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.continue_for_loop()
def exit_for_loop(self):
"""Stops executing the enclosing for loop.
Exits the enclosing for loop and continues execution after it.
Can be used directly in a for loop or in a keyword that the loop uses.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Run Keyword If | '${var}' == 'EXIT' | Exit For Loop |
| | Do Something | ${var} |
See `Exit For Loop If` to conditionally exit a for loop without
using `Run Keyword If` or other wrapper keywords.
"""
self.log("Exiting for loop altogether.")
raise ExitForLoop()
def exit_for_loop_if(self, condition):
"""Stops executing the enclosing for loop if the ``condition`` is true.
A wrapper for `Exit For Loop` to exit a for loop based on
the given condition. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Exit For Loop If | '${var}' == 'EXIT' |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.exit_for_loop()
@run_keyword_variant(resolve=0)
def return_from_keyword(self, *return_values):
"""Returns from the enclosing user keyword.
This keyword can be used to return from a user keyword with PASS status
without executing it fully. It is also possible to return values
similarly as with the ``[Return]`` setting. For more detailed information
about working with the return values, see the User Guide.
This keyword is typically wrapped to some other keyword, such as
`Run Keyword If` or `Run Keyword If Test Passed`, to return based
on a condition:
| Run Keyword If | ${rc} < 0 | Return From Keyword |
| Run Keyword If Test Passed | Return From Keyword |
It is possible to use this keyword to return from a keyword also inside
a for loop. That, as well as returning values, is demonstrated by the
`Find Index` keyword in the following somewhat advanced example.
Notice that it is often a good idea to move this kind of complicated
logic into a test library.
| ***** Variables *****
| @{LIST} = foo baz
|
| ***** Test Cases *****
| Example
| ${index} = Find Index baz @{LIST}
| Should Be Equal ${index} ${1}
| ${index} = Find Index non existing @{LIST}
| Should Be Equal ${index} ${-1}
|
| ***** Keywords *****
| Find Index
| [Arguments] ${element} @{items}
| ${index} = Set Variable ${0}
| :FOR ${item} IN @{items}
| \\ Run Keyword If '${item}' == '${element}' Return From Keyword ${index}
| \\ ${index} = Set Variable ${index + 1}
| Return From Keyword ${-1} # Also [Return] would work here.
The most common use case, returning based on an expression, can be
accomplished directly with `Return From Keyword If`. Both of these
keywords are new in Robot Framework 2.8.
See also `Run Keyword And Return` and `Run Keyword And Return If`.
"""
self.log('Returning from the enclosing user keyword.')
raise ReturnFromKeyword(return_values)
@run_keyword_variant(resolve=1)
def return_from_keyword_if(self, condition, *return_values):
"""Returns from the enclosing user keyword if ``condition`` is true.
A wrapper for `Return From Keyword` to return based on the given
condition. The condition is evaluated using the same semantics as
with `Should Be True` keyword.
Given the same example as in `Return From Keyword`, we can rewrite the
`Find Index` keyword as follows:
| ***** Keywords *****
| Find Index
| [Arguments] ${element} @{items}
| ${index} = Set Variable ${0}
| :FOR ${item} IN @{items}
| \\ Return From Keyword If '${item}' == '${element}' ${index}
| \\ ${index} = Set Variable ${index + 1}
| Return From Keyword ${-1} # Also [Return] would work here.
See also `Run Keyword And Return` and `Run Keyword And Return If`.
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.return_from_keyword(*return_values)
@run_keyword_variant(resolve=1)
def run_keyword_and_return(self, name, *args):
"""Runs the specified keyword and returns from the enclosing user keyword.
The keyword to execute is defined with ``name`` and ``*args`` exactly
like with `Run Keyword`. After running the keyword, returns from the
enclosing user keyword and passes possible return value from the
executed keyword further. Returning from a keyword has exactly same
semantics as with `Return From Keyword`.
Example:
| `Run Keyword And Return` | `My Keyword` | arg1 | arg2 |
| # Above is equivalent to: |
| ${result} = | `My Keyword` | arg1 | arg2 |
| `Return From Keyword` | ${result} | | |
Use `Run Keyword And Return If` if you want to run keyword and return
based on a condition.
New in Robot Framework 2.8.2.
"""
ret = self.run_keyword(name, *args)
self.return_from_keyword(escape(ret))
@run_keyword_variant(resolve=2)
def run_keyword_and_return_if(self, condition, name, *args):
"""Runs the specified keyword and returns from the enclosing user keyword.
A wrapper for `Run Keyword And Return` to run and return based on
the given ``condition``. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| `Run Keyword And Return If` | ${rc} > 0 | `My Keyword` | arg1 | arg2 |
| # Above is equivalent to: |
| `Run Keyword If` | ${rc} > 0 | `Run Keyword And Return` | `My Keyword ` | arg1 | arg2 |
Use `Return From Keyword If` if you want to return a certain value
based on a condition.
New in Robot Framework 2.8.2.
"""
if self._is_true(condition):
self.run_keyword_and_return(name, *args)
def pass_execution(self, message, *tags):
"""Skips rest of the current test, setup, or teardown with PASS status.
This keyword can be used anywhere in the test data, but the place where
used affects the behavior:
- When used in any setup or teardown (suite, test or keyword), passes
that setup or teardown. Possible keyword teardowns of the started
keywords are executed. Does not affect execution or statuses
otherwise.
- When used in a test outside setup or teardown, passes that particular
test case. Possible test and keyword teardowns are executed.
Possible continuable failures before this keyword is used, as well as
failures in executed teardowns, will fail the execution.
It is mandatory to give a message explaining why execution was passed.
By default the message is considered plain text, but starting it with
``*HTML*`` allows using HTML formatting.
It is also possible to modify test tags passing tags after the message
similarly as with `Fail` keyword. Tags starting with a hyphen
(e.g. ``-regression``) are removed and others added. Tags are modified
using `Set Tags` and `Remove Tags` internally, and the semantics
setting and removing them are the same as with these keywords.
Examples:
| Pass Execution | All features available in this version tested. |
| Pass Execution | Deprecated test. | deprecated | -regression |
This keyword is typically wrapped to some other keyword, such as
`Run Keyword If`, to pass based on a condition. The most common case
can be handled also with `Pass Execution If`:
| Run Keyword If | ${rc} < 0 | Pass Execution | Negative values are cool. |
| Pass Execution If | ${rc} < 0 | Negative values are cool. |
Passing execution in the middle of a test, setup or teardown should be
used with care. In the worst case it leads to tests that skip all the
parts that could actually uncover problems in the tested application.
In cases where execution cannot continue do to external factors,
it is often safer to fail the test case and make it non-critical.
New in Robot Framework 2.8.
"""
message = message.strip()
if not message:
raise RuntimeError('Message cannot be empty.')
self._set_and_remove_tags(tags)
log_message, level = self._get_logged_test_message_and_level(message)
self.log('Execution passed with message:\n%s' % log_message, level)
raise PassExecution(message)
@run_keyword_variant(resolve=1)
def pass_execution_if(self, condition, message, *tags):
"""Conditionally skips rest of the current test, setup, or teardown with PASS status.
A wrapper for `Pass Execution` to skip rest of the current test,
setup or teardown based the given ``condition``. The condition is
evaluated similarly as with `Should Be True` keyword, and ``message``
and ``*tags`` have same semantics as with `Pass Execution`.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Pass Execution If | '${var}' == 'EXPECTED' | Correct value was found |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
message = self._variables.replace_string(message)
tags = [self._variables.replace_string(tag) for tag in tags]
self.pass_execution(message, *tags)
class _Misc(_BuiltInBase):
def no_operation(self):
"""Does absolutely nothing."""
def sleep(self, time_, reason=None):
"""Pauses the test executed for the given time.
``time`` may be either a number or a time string. Time strings are in
a format such as ``1 day 2 hours 3 minutes 4 seconds 5milliseconds`` or
``1d 2h 3m 4s 5ms``, and they are fully explained in an appendix of
Robot Framework User Guide. Optional `reason` can be used to explain why
sleeping is necessary. Both the time slept and the reason are logged.
Examples:
| Sleep | 42 |
| Sleep | 1.5 |
| Sleep | 2 minutes 10 seconds |
| Sleep | 10s | Wait for a reply |
"""
seconds = timestr_to_secs(time_)
# Python hangs with negative values
if seconds < 0:
seconds = 0
self._sleep_in_parts(seconds)
self.log('Slept %s' % secs_to_timestr(seconds))
if reason:
self.log(reason)
def _sleep_in_parts(self, seconds):
# time.sleep can't be stopped in windows
# to ensure that we can signal stop (with timeout)
# split sleeping to small pieces
endtime = time.time() + float(seconds)
while True:
remaining = endtime - time.time()
if remaining <= 0:
break
time.sleep(min(remaining, 0.5))
def catenate(self, *items):
"""Catenates the given items together and returns the resulted string.
By default, items are catenated with spaces, but if the first item
contains the string ``SEPARATOR=<sep>``, the separator ``<sep>`` is
used instead. Items are converted into strings when necessary.
Examples:
| ${str1} = | Catenate | Hello | world | |
| ${str2} = | Catenate | SEPARATOR=--- | Hello | world |
| ${str3} = | Catenate | SEPARATOR= | Hello | world |
=>
| ${str1} = 'Hello world'
| ${str2} = 'Hello---world'
| ${str3} = 'Helloworld'
"""
if not items:
return ''
items = [unic(item) for item in items]
if items[0].startswith('SEPARATOR='):
sep = items[0][len('SEPARATOR='):]
items = items[1:]
else:
sep = ' '
return sep.join(items)
def log(self, message, level='INFO', html=False, console=False, repr=False):
u"""Logs the given message with the given level.
Valid levels are TRACE, DEBUG, INFO (default), HTML, WARN, and ERROR.
Messages below the current active log level are ignored. See
`Set Log Level` keyword and ``--loglevel`` command line option
for more details about setting the level.
Messages logged with the WARN or ERROR levels will be automatically
visible also in the console and in the Test Execution Errors section
in the log file.
Logging can be configured using optional ``html``, ``console`` and
``repr`` arguments. They are off by default, but can be enabled
by giving them a true value. See `Boolean arguments` section for more
information about true and false values.
If the ``html`` argument is given a true value, the message will be
considered HTML and special characters such as ``<`` in it are not
escaped. For example, logging ``<img src="image.png">`` creates an
image when ``html`` is true, but otherwise the message is that exact
string. An alternative to using the ``html`` argument is using the HTML
pseudo log level. It logs the message as HTML using the INFO level.
If the ``console`` argument is true, the message will be written to
the console where test execution was started from in addition to
the log file. This keyword always uses the standard output stream
and adds a newline after the written message. Use `Log To Console`
instead if either of these is undesirable,
If the ``repr`` argument is true, the given item will be passed through
a custom version of Python's ``pprint.pformat()`` function before
logging it. This is useful, for example, when working with strings or
bytes containing invisible characters, or when working with nested data
structures. The custom version differs from the standard one so that it
omits the ``u`` prefix from Unicode strings and adds ``b`` prefix to
byte strings.
Examples:
| Log | Hello, world! | | | # Normal INFO message. |
| Log | Warning, world! | WARN | | # Warning. |
| Log | <b>Hello</b>, world! | html=yes | | # INFO message as HTML. |
| Log | <b>Hello</b>, world! | HTML | | # Same as above. |
| Log | <b>Hello</b>, world! | DEBUG | html=true | # DEBUG as HTML. |
| Log | Hello, console! | console=yes | | # Log also to the console. |
| Log | Hyv\xe4 \\x00 | repr=yes | | # Log ``'Hyv\\xe4 \\x00'``. |
See `Log Many` if you want to log multiple messages in one go, and
`Log To Console` if you only want to write to the console.
Arguments ``html``, ``console``, and ``repr`` are new in Robot Framework
2.8.2.
Pprint support when ``repr`` is used is new in Robot Framework 2.8.6,
and it was changed to drop the ``u`` prefix and add the ``b`` prefix
in Robot Framework 2.9.
"""
if is_truthy(repr):
message = prepr(message, width=80)
logger.write(message, level, is_truthy(html))
if is_truthy(console):
logger.console(message)
@run_keyword_variant(resolve=0)
def log_many(self, *messages):
"""Logs the given messages as separate entries using the INFO level.
Supports also logging list and dictionary variable items individually.
Examples:
| Log Many | Hello | ${var} |
| Log Many | @{list} | &{dict} |
See `Log` and `Log To Console` keywords if you want to use alternative
log levels, use HTML, or log to the console.
"""
for msg in self._yield_logged_messages(messages):
self.log(msg)
def _yield_logged_messages(self, messages):
for msg in messages:
var = VariableSplitter(msg)
value = self._variables.replace_scalar(msg)
if var.is_list_variable():
for item in value:
yield item
elif var.is_dict_variable():
for name, value in value.items():
yield '%s=%s' % (name, value)
else:
yield value
def log_to_console(self, message, stream='STDOUT', no_newline=False):
"""Logs the given message to the console.
By default uses the standard output stream. Using the standard error
stream is possibly by giving the ``stream`` argument value ``STDERR``
(case-insensitive).
By default appends a newline to the logged message. This can be
disabled by giving the ``no_newline`` argument a true value (see
`Boolean arguments`).
Examples:
| Log To Console | Hello, console! | |
| Log To Console | Hello, stderr! | STDERR |
| Log To Console | Message starts here and is | no_newline=true |
| Log To Console | continued without newline. | |
This keyword does not log the message to the normal log file. Use
`Log` keyword, possibly with argument ``console``, if that is desired.
New in Robot Framework 2.8.2.
"""
logger.console(message, newline=is_falsy(no_newline), stream=stream)
@run_keyword_variant(resolve=0)
def comment(self, *messages):
"""Displays the given messages in the log file as keyword arguments.
This keyword does nothing with the arguments it receives, but as they
are visible in the log, this keyword can be used to display simple
messages. Given arguments are ignored so thoroughly that they can even
contain non-existing variables. If you are interested about variable
values, you can use the `Log` or `Log Many` keywords.
"""
pass
def set_log_level(self, level):
"""Sets the log threshold to the specified level and returns the old level.
Messages below the level will not logged. The default logging level is
INFO, but it can be overridden with the command line option
``--loglevel``.
The available levels: TRACE, DEBUG, INFO (default), WARN, ERROR and NONE (no
logging).
"""
try:
old = self._context.output.set_log_level(level)
except DataError as err:
raise RuntimeError(unicode(err))
self._namespace.variables.set_global('${LOG_LEVEL}', level.upper())
self.log('Log level changed from %s to %s' % (old, level.upper()))
return old
def reload_library(self, name_or_instance):
"""Rechecks what keywords the specified library provides.
Can be called explicitly in the test data or by a library itself
when keywords it provides have changed.
The library can be specified by its name or as the active instance of
the library. The latter is especially useful if the library itself
calls this keyword as a method.
New in Robot Framework 2.9.
"""
library = self._namespace.reload_library(name_or_instance)
self.log('Reloaded library %s with %s keywords.' % (library.name,
len(library)))
@run_keyword_variant(resolve=0)
def import_library(self, name, *args):
"""Imports a library with the given name and optional arguments.
This functionality allows dynamic importing of libraries while tests
are running. That may be necessary, if the library itself is dynamic
and not yet available when test data is processed. In a normal case,
libraries should be imported using the Library setting in the Setting
table.
This keyword supports importing libraries both using library
names and physical paths. When paths are used, they must be
given in absolute format. Forward slashes can be used as path
separators in all operating systems.
It is possible to pass arguments to the imported library and also
named argument syntax works if the library supports it. ``WITH NAME``
syntax can be used to give a custom name to the imported library.
Examples:
| Import Library | MyLibrary |
| Import Library | ${CURDIR}/../Library.py | arg1 | named=arg2 |
| Import Library | ${LIBRARIES}/Lib.java | arg | WITH NAME | JavaLib |
"""
try:
self._namespace.import_library(name, list(args))
except DataError as err:
raise RuntimeError(unicode(err))
@run_keyword_variant(resolve=0)
def import_variables(self, path, *args):
"""Imports a variable file with the given path and optional arguments.
Variables imported with this keyword are set into the test suite scope
similarly when importing them in the Setting table using the Variables
setting. These variables override possible existing variables with
the same names. This functionality can thus be used to import new
variables, for example, for each test in a test suite.
The given path must be absolute. Forward slashes can be used as path
separator regardless the operating system.
Examples:
| Import Variables | ${CURDIR}/variables.py | | |
| Import Variables | ${CURDIR}/../vars/env.py | arg1 | arg2 |
"""
try:
self._namespace.import_variables(path, list(args), overwrite=True)
except DataError as err:
raise RuntimeError(unicode(err))
@run_keyword_variant(resolve=0)
def import_resource(self, path):
"""Imports a resource file with the given path.
Resources imported with this keyword are set into the test suite scope
similarly when importing them in the Setting table using the Resource
setting.
The given path must be absolute. Forward slashes can be used as path
separator regardless the operating system.
Examples:
| Import Resource | ${CURDIR}/resource.txt |
| Import Resource | ${CURDIR}/../resources/resource.html |
"""
try:
self._namespace.import_resource(path)
except DataError as err:
raise RuntimeError(unicode(err))
def set_library_search_order(self, *search_order):
"""Sets the resolution order to use when a name matches multiple keywords.
The library search order is used to resolve conflicts when a keyword
name in the test data matches multiple keywords. The first library
(or resource, see below) containing the keyword is selected and that
keyword implementation used. If the keyword is not found from any library
(or resource), test executing fails the same way as when the search
order is not set.
When this keyword is used, there is no need to use the long
``LibraryName.Keyword Name`` notation. For example, instead of
having
| MyLibrary.Keyword | arg |
| MyLibrary.Another Keyword |
| MyLibrary.Keyword | xxx |
you can have
| Set Library Search Order | MyLibrary |
| Keyword | arg |
| Another Keyword |
| Keyword | xxx |
This keyword can be used also to set the order of keywords in different
resource files. In this case resource names must be given without paths
or extensions like:
| Set Library Search Order | resource | another_resource |
*NOTE:*
- The search order is valid only in the suite where this keywords is used.
- Keywords in resources always have higher priority than
keywords in libraries regardless the search order.
- The old order is returned and can be used to reset the search order later.
- Library and resource names in the search order are both case and space
insensitive.
"""
return self._namespace.set_search_order(search_order)
def keyword_should_exist(self, name, msg=None):
"""Fails unless the given keyword exists in the current scope.
Fails also if there are more than one keywords with the same name.
Works both with the short name (e.g. ``Log``) and the full name
(e.g. ``BuiltIn.Log``).
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Exist`.
"""
try:
handler = self._namespace.get_handler(name)
if isinstance(handler, UserErrorHandler):
handler.run()
except DataError as err:
raise AssertionError(msg or unicode(err))
def get_time(self, format='timestamp', time_='NOW'):
"""Returns the given time in the requested format.
*NOTE:* DateTime library added in Robot Framework 2.8.5 contains
much more flexible keywords for getting the current date and time
and for date and time handling in general.
How time is returned is determined based on the given ``format``
string as follows. Note that all checks are case-insensitive.
1) If ``format`` contains the word ``epoch``, the time is returned
in seconds after the UNIX epoch (1970-01-01 00:00:00 UTC).
The return value is always an integer.
2) If ``format`` contains any of the words ``year``, ``month``,
``day``, ``hour``, ``min``, or ``sec``, only the selected parts are
returned. The order of the returned parts is always the one
in the previous sentence and the order of words in ``format``
is not significant. The parts are returned as zero-padded
strings (e.g. May -> ``05``).
3) Otherwise (and by default) the time is returned as a
timestamp string in the format ``2006-02-24 15:08:31``.
By default this keyword returns the current local time, but
that can be altered using ``time`` argument as explained below.
Note that all checks involving strings are case-insensitive.
1) If ``time`` is a number, or a string that can be converted to
a number, it is interpreted as seconds since the UNIX epoch.
This documentation was originally written about 1177654467
seconds after the epoch.
2) If ``time`` is a timestamp, that time will be used. Valid
timestamp formats are ``YYYY-MM-DD hh:mm:ss`` and
``YYYYMMDD hhmmss``.
3) If ``time`` is equal to ``NOW`` (default), the current local
time is used. This time is got using Python's ``time.time()``
function.
4) If ``time`` is equal to ``UTC``, the current time in
[http://en.wikipedia.org/wiki/Coordinated_Universal_Time|UTC]
is used. This time is got using ``time.time() + time.altzone``
in Python.
5) If ``time`` is in the format like ``NOW - 1 day`` or ``UTC + 1 hour
30 min``, the current local/UTC time plus/minus the time
specified with the time string is used. The time string format
is described in an appendix of Robot Framework User Guide.
Examples (expecting the current local time is 2006-03-29 15:06:21):
| ${time} = | Get Time | | | |
| ${secs} = | Get Time | epoch | | |
| ${year} = | Get Time | return year | | |
| ${yyyy} | ${mm} | ${dd} = | Get Time | year,month,day |
| @{time} = | Get Time | year month day hour min sec | | |
| ${y} | ${s} = | Get Time | seconds and year | |
=>
| ${time} = '2006-03-29 15:06:21'
| ${secs} = 1143637581
| ${year} = '2006'
| ${yyyy} = '2006', ${mm} = '03', ${dd} = '29'
| @{time} = ['2006', '03', '29', '15', '06', '21']
| ${y} = '2006'
| ${s} = '21'
Examples (expecting the current local time is 2006-03-29 15:06:21 and
UTC time is 2006-03-29 12:06:21):
| ${time} = | Get Time | | 1177654467 | # Time given as epoch seconds |
| ${secs} = | Get Time | sec | 2007-04-27 09:14:27 | # Time given as a timestamp |
| ${year} = | Get Time | year | NOW | # The local time of execution |
| @{time} = | Get Time | hour min sec | NOW + 1h 2min 3s | # 1h 2min 3s added to the local time |
| @{utc} = | Get Time | hour min sec | UTC | # The UTC time of execution |
| ${hour} = | Get Time | hour | UTC - 1 hour | # 1h subtracted from the UTC time |
=>
| ${time} = '2007-04-27 09:14:27'
| ${secs} = 27
| ${year} = '2006'
| @{time} = ['16', '08', '24']
| @{utc} = ['12', '06', '21']
| ${hour} = '11'
Support for UTC time was added in Robot Framework 2.7.5 but it did not
work correctly until 2.7.7.
"""
return get_time(format, parse_time(time_))
def evaluate(self, expression, modules=None, namespace=None):
"""Evaluates the given expression in Python and returns the results.
``expression`` is evaluated in Python as explained in `Evaluating
expressions`.
``modules`` argument can be used to specify a comma separated
list of Python modules to be imported and added to the evaluation
namespace.
``namespace`` argument can be used to pass a custom evaluation
namespace as a dictionary. Possible ``modules`` are added to this
namespace. This is a new feature in Robot Framework 2.8.4.
Variables used like ``${variable}`` are replaced in the expression
before evaluation. Variables are also available in the evaluation
namespace and can be accessed using special syntax ``$variable``.
This is a new feature in Robot Framework 2.9 and it is explained more
thoroughly in `Evaluating expressions`.
Examples (expecting ``${result}`` is 3.14):
| ${status} = | Evaluate | 0 < ${result} < 10 | # Would also work with string '3.14' |
| ${status} = | Evaluate | 0 < $result < 10 | # Using variable itself, not string representation |
| ${random} = | Evaluate | random.randint(0, sys.maxint) | modules=random, sys |
| ${ns} = | Create Dictionary | x=${4} | y=${2} |
| ${result} = | Evaluate | x*10 + y | namespace=${ns} |
=>
| ${status} = True
| ${random} = <random integer>
| ${result} = 42
"""
variables = self._variables.as_dict(decoration=False)
expression = self._handle_variables_in_expression(expression, variables)
namespace = self._create_evaluation_namespace(namespace, modules)
variables = self._decorate_variables_for_evaluation(variables)
try:
if not is_string(expression):
raise TypeError("Expression must be string, got %s."
% type_name(expression))
if not expression:
raise ValueError("Expression cannot be empty.")
return eval(expression, namespace, variables)
except:
raise RuntimeError("Evaluating expression '%s' failed: %s"
% (expression, get_error_message()))
def _handle_variables_in_expression(self, expression, variables):
tokens = []
variable_started = seen_variable = False
generated = generate_tokens(StringIO(expression).readline)
for toknum, tokval, _, _, _ in generated:
if variable_started:
if toknum == token.NAME:
if tokval not in variables:
variable_not_found('$%s' % tokval, variables,
deco_braces=False)
tokval = 'RF_VAR_' + tokval
seen_variable = True
else:
tokens.append((token.ERRORTOKEN, '$'))
variable_started = False
if toknum == token.ERRORTOKEN and tokval == '$':
variable_started = True
else:
tokens.append((toknum, tokval))
if seen_variable:
return untokenize(tokens).strip()
return expression
def _create_evaluation_namespace(self, namespace, modules):
namespace = dict(namespace or {})
modules = modules.replace(' ', '').split(',') if modules else []
namespace.update((m, __import__(m)) for m in modules if m)
return namespace
def _decorate_variables_for_evaluation(self, variables):
decorated = [('RF_VAR_' + name, value)
for name, value in variables.items()]
return NormalizedDict(decorated, ignore='_')
def call_method(self, object, method_name, *args, **kwargs):
"""Calls the named method of the given object with the provided arguments.
The possible return value from the method is returned and can be
assigned to a variable. Keyword fails both if the object does not have
a method with the given name or if executing the method raises an
exception.
Support for ``**kwargs`` is new in Robot Framework 2.9. Since that
possible equal signs in other arguments must be escaped with a
backslash like ``\\=``.
Examples:
| Call Method | ${hashtable} | put | myname | myvalue |
| ${isempty} = | Call Method | ${hashtable} | isEmpty | |
| Should Not Be True | ${isempty} | | | |
| ${value} = | Call Method | ${hashtable} | get | myname |
| Should Be Equal | ${value} | myvalue | | |
| Call Method | ${object} | kwargs | name=value | foo=bar |
| Call Method | ${object} | positional | escaped\\=equals |
"""
try:
method = getattr(object, method_name)
except AttributeError:
raise RuntimeError("Object '%s' does not have method '%s'."
% (object, method_name))
try:
return method(*args, **kwargs)
except:
raise RuntimeError("Calling method '%s' failed: %s"
% (method_name, get_error_message()))
def regexp_escape(self, *patterns):
"""Returns each argument string escaped for use as a regular expression.
This keyword can be used to escape strings to be used with
`Should Match Regexp` and `Should Not Match Regexp` keywords.
Escaping is done with Python's ``re.escape()`` function.
Examples:
| ${escaped} = | Regexp Escape | ${original} |
| @{strings} = | Regexp Escape | @{strings} |
"""
if len(patterns) == 0:
return ''
if len(patterns) == 1:
return re.escape(patterns[0])
return [re.escape(p) for p in patterns]
def set_test_message(self, message, append=False):
"""Sets message for the current test case.
If the optional ``append`` argument is given a true value (see `Boolean
arguments`), the given ``message`` is added after the possible earlier
message by joining the messages with a space.
In test teardown this keyword can alter the possible failure message,
but otherwise failures override messages set by this keyword. Notice
that in teardown the initial message is available as a built-in variable
``${TEST MESSAGE}``.
It is possible to use HTML format in the message by starting the message
with ``*HTML*``.
Examples:
| Set Test Message | My message | |
| Set Test Message | is continued. | append=yes |
| Should Be Equal | ${TEST MESSAGE} | My message is continued. |
| Set Test Message | `*`HTML`*` <b>Hello!</b> | |
This keyword can not be used in suite setup or suite teardown.
Support for ``append`` was added in Robot Framework 2.7.7 and support
for HTML format in 2.8.
"""
test = self._namespace.test
if not test:
raise RuntimeError("'Set Test Message' keyword cannot be used in "
"suite setup or teardown.")
test.message = self._get_possibly_appended_value(test.message, message,
append)
message, level = self._get_logged_test_message_and_level(test.message)
self.log('Set test message to:\n%s' % message, level)
def _get_possibly_appended_value(self, initial, new, append):
if not is_unicode(new):
new = unic(new)
if is_truthy(append) and initial:
return '%s %s' % (initial, new)
return new
def _get_logged_test_message_and_level(self, message):
if message.startswith('*HTML*'):
return message[6:].lstrip(), 'HTML'
return message, 'INFO'
def set_test_documentation(self, doc, append=False):
"""Sets documentation for the current test case.
By default the possible existing documentation is overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
The current test documentation is available as a built-in variable
``${TEST DOCUMENTATION}``. This keyword can not be used in suite
setup or suite teardown.
New in Robot Framework 2.7. Support for ``append`` was added in 2.7.7.
"""
test = self._namespace.test
if not test:
raise RuntimeError("'Set Test Documentation' keyword cannot be "
"used in suite setup or teardown.")
test.doc = self._get_possibly_appended_value(test.doc, doc, append)
self._variables.set_test('${TEST_DOCUMENTATION}', test.doc)
self.log('Set test documentation to:\n%s' % test.doc)
def set_suite_documentation(self, doc, append=False, top=False):
"""Sets documentation for the current test suite.
By default the possible existing documentation is overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
This keyword sets the documentation of the current suite by default.
If the optional ``top`` argument is given a true value (see `Boolean
arguments`), the documentation of the top level suite is altered
instead.
The documentation of the current suite is available as a built-in
variable ``${SUITE DOCUMENTATION}``.
New in Robot Framework 2.7. Support for ``append`` and ``top`` were
added in 2.7.7.
"""
top = is_truthy(top)
suite = self._get_namespace(top).suite
suite.doc = self._get_possibly_appended_value(suite.doc, doc, append)
self._variables.set_suite('${SUITE_DOCUMENTATION}', suite.doc, top)
self.log('Set suite documentation to:\n%s' % suite.doc)
def set_suite_metadata(self, name, value, append=False, top=False):
"""Sets metadata for the current test suite.
By default possible existing metadata values are overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
This keyword sets the metadata of the current suite by default.
If the optional ``top`` argument is given a true value (see `Boolean
arguments`), the metadata of the top level suite is altered instead.
The metadata of the current suite is available as a built-in variable
``${SUITE METADATA}`` in a Python dictionary. Notice that modifying this
variable directly has no effect on the actual metadata the suite has.
New in Robot Framework 2.7.4. Support for ``append`` and ``top`` were
added in 2.7.7.
"""
top = is_truthy(top)
if not is_unicode(name):
name = unic(name)
metadata = self._get_namespace(top).suite.metadata
original = metadata.get(name, '')
metadata[name] = self._get_possibly_appended_value(original, value, append)
self._variables.set_suite('${SUITE_METADATA}', metadata.copy(), top)
self.log("Set suite metadata '%s' to value '%s'." % (name, metadata[name]))
def set_tags(self, *tags):
"""Adds given ``tags`` for the current test or all tests in a suite.
When this keyword is used inside a test case, that test gets
the specified tags and other tests are not affected.
If this keyword is used in a suite setup, all test cases in
that suite, recursively, gets the given tags. It is a failure
to use this keyword in a suite teardown.
The current tags are available as a built-in variable ``@{TEST TAGS}``.
See `Remove Tags` if you want to remove certain tags and `Fail` if
you want to fail the test case after setting and/or removing tags.
"""
ctx = self._context
if ctx.test:
ctx.test.tags.add(tags)
ctx.variables.set_test('@{TEST_TAGS}', list(ctx.test.tags))
elif not ctx.in_suite_teardown:
ctx.suite.set_tags(tags, persist=True)
else:
raise RuntimeError("'Set Tags' cannot be used in suite teardown.")
self.log('Set tag%s %s.' % (s(tags), seq2str(tags)))
def remove_tags(self, *tags):
"""Removes given ``tags`` from the current test or all tests in a suite.
Tags can be given exactly or using a pattern where ``*`` matches
anything and ``?`` matches one character.
This keyword can affect either one test case or all test cases in a
test suite similarly as `Set Tags` keyword.
The current tags are available as a built-in variable ``@{TEST TAGS}``.
Example:
| Remove Tags | mytag | something-* | ?ython |
See `Set Tags` if you want to add certain tags and `Fail` if you want
to fail the test case after setting and/or removing tags.
"""
ctx = self._context
if ctx.test:
ctx.test.tags.remove(tags)
ctx.variables.set_test('@{TEST_TAGS}', list(ctx.test.tags))
elif not ctx.in_suite_teardown:
ctx.suite.set_tags(remove=tags, persist=True)
else:
raise RuntimeError("'Remove Tags' cannot be used in suite teardown.")
self.log('Removed tag%s %s.' % (s(tags), seq2str(tags)))
def get_library_instance(self, name):
"""Returns the currently active instance of the specified test library.
This keyword makes it easy for test libraries to interact with
other test libraries that have state. This is illustrated by
the Python example below:
<|fim▁hole|> |
| def title_should_start_with(expected):
| seleniumlib = BuiltIn().get_library_instance('SeleniumLibrary')
| title = seleniumlib.get_title()
| if not title.startswith(expected):
| raise AssertionError("Title '%s' did not start with '%s'"
| % (title, expected))
It is also possible to use this keyword in the test data and
pass the returned library instance to another keyword. If a
library is imported with a custom name, the ``name`` used to get
the instance must be that name and not the original library name.
"""
try:
return self._namespace.get_library_instance(name)
except DataError as err:
raise RuntimeError(unicode(err))
class BuiltIn(_Verify, _Converter, _Variables, _RunKeyword, _Control, _Misc):
"""An always available standard library with often needed keywords.
``BuiltIn`` is Robot Framework's standard library that provides a set
of generic keywords needed often. It is imported automatically and
thus always available. The provided keywords can be used, for example,
for verifications (e.g. `Should Be Equal`, `Should Contain`),
conversions (e.g. `Convert To Integer`) and for various other purposes
(e.g. `Log`, `Sleep`, `Run Keyword If`, `Set Global Variable`).
== Table of contents ==
- `HTML error messages`
- `Evaluating expressions`
- `Boolean arguments`
- `Shortcuts`
- `Keywords`
= HTML error messages =
Many of the keywords accept an optional error message to use if the keyword
fails. Starting from Robot Framework 2.8, it is possible to use HTML in
these messages by prefixing them with ``*HTML*``. See `Fail` keyword for
a usage example. Notice that using HTML in messages is not limited to
BuiltIn library but works with any error message.
= Evaluating expressions =
Many keywords, such as `Evaluate`, `Run Keyword If` and `Should Be True`,
accept an expression that is evaluated in Python. These expressions are
evaluated using Python's
[https://docs.python.org/2/library/functions.html#eval|eval] function so
that all Python built-ins like ``len()`` and ``int()`` are available.
`Evaluate` allows configuring the execution namespace with custom modules,
and other keywords have [https://docs.python.org/2/library/os.html|os]
and [https://docs.python.org/2/library/sys.html|sys] modules available
automatically.
Examples:
| `Run Keyword If` | os.sep == '/' | Log | Not on Windows |
| ${random int} = | `Evaluate` | random.randint(0, 5) | modules=random |
When a variable is used in the expressing using the normal ``${variable}``
syntax, its value is replaces before the expression is evaluated. This
means that the value used in the expression will be the string
representation of the variable value, not the variable value itself.
This is not a problem with numbers and other objects that have a string
representation that can be evaluated directly, but with other objects
the behavior depends on the string representation. Most importantly,
strings must always be quoted, and if they can contain newlines, they must
be triple quoted.
Examples:
| `Should Be True` | ${rc} < 10 | Return code greater than 10 |
| `Run Keyword If` | '${status}' == 'PASS' | Log | Passed |
| `Run Keyword If` | 'FAIL' in '''${output}''' | Log | Output contains FAIL |
Starting from Robot Framework 2.9, variables themselves are automatically
available in the evaluation namespace. They can be accessed using special
variable syntax without the curly braces like ``$variable``. These
variables should never be quoted, and in fact they are not even replaced
inside strings.
Examples:
| `Should Be True` | $rc < 10 | Return code greater than 10 |
| `Run Keyword If` | $status == 'PASS' | `Log` | Passed |
| `Run Keyword If` | 'FAIL' in $output | `Log` | Output contains FAIL |
| `Should Be True` | len($result) > 1 and $result[1] == 'OK' |
Notice that instead of creating complicated expressions, it is often better
to move the logic into a test library.
= Boolean arguments =
Some keywords accept arguments that are handled as Boolean values true or
false. If such an argument is given as a string, it is considered false if
it is either empty or case-insensitively equal to ``false`` or ``no``.
Keywords verifying something that allow dropping actual and expected values
from the possible error message also consider string ``no values`` as false.
Other strings are considered true regardless their value, and other
argument types are tested using same
[http://docs.python.org/2/library/stdtypes.html#truth-value-testing|rules
as in Python].
True examples:
| `Should Be Equal` | ${x} | ${y} | Custom error | values=True | # Strings are generally true. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=yes | # Same as the above. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${TRUE} | # Python ``True`` is true. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${42} | # Numbers other than 0 are true. |
False examples:
| `Should Be Equal` | ${x} | ${y} | Custom error | values=False | # String ``false`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=no | # Also string ``no`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${EMPTY} | # Empty string is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${FALSE} | # Python ``False`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=no values | # ``no values`` works with ``values`` argument |
Note that prior to Robot Framework 2.9 some keywords considered all
non-empty strings, including ``false`` and ``no``, to be true.
"""
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = get_version()
class RobotNotRunningError(AttributeError):
"""Used when something cannot be done because Robot is not running.
Based on AttributeError to be backwards compatible with RF < 2.8.5.
May later be based directly on Exception, so new code should except
this exception explicitly.
"""
pass
def register_run_keyword(library, keyword, args_to_process=None):
"""Registers 'run keyword' so that its arguments can be handled correctly.
1) Why is this method needed
Keywords running other keywords internally (normally using `Run Keyword`
or some variants of it in BuiltIn) must have the arguments meant to the
internally executed keyword handled specially to prevent processing them
twice. This is done ONLY for keywords registered using this method.
If the register keyword has same name as any keyword from Robot Framework
standard libraries, it can be used without getting warnings. Normally
there is a warning in such cases unless the keyword is used in long
format (e.g. MyLib.Keyword).
Keywords executed by registered run keywords can be tested in dry-run mode
if they have 'name' argument which takes the name of the executed keyword.
2) How to use this method
`library` is the name of the library where the registered keyword is
implemented.
`keyword` can be either a function or method implementing the
keyword, or name of the implemented keyword as a string.
`args_to_process` is needed when `keyword` is given as a string, and it
defines how many of the arguments to the registered keyword must be
processed normally. When `keyword` is a method or function, this
information is got directly from it so that varargs (those specified with
syntax '*args') are not processed but others are.
3) Examples
from robotide.lib.robot.libraries.BuiltIn import BuiltIn, register_run_keyword
def my_run_keyword(name, *args):
# do something
return BuiltIn().run_keyword(name, *args)
# Either one of these works
register_run_keyword(__name__, my_run_keyword)
register_run_keyword(__name__, 'My Run Keyword', 1)
-------------
from robotide.lib.robot.libraries.BuiltIn import BuiltIn, register_run_keyword
class MyLibrary:
def my_run_keyword_if(self, expression, name, *args):
# do something
return BuiltIn().run_keyword_if(expression, name, *args)
# Either one of these works
register_run_keyword('MyLibrary', MyLibrary.my_run_keyword_if)
register_run_keyword('MyLibrary', 'my_run_keyword_if', 2)
"""
RUN_KW_REGISTER.register_run_keyword(library, keyword, args_to_process)
[register_run_keyword('BuiltIn', getattr(_RunKeyword, a))
for a in dir(_RunKeyword) if a[0] != '_']<|fim▁end|> | | from robotide.lib.robot.libraries.BuiltIn import BuiltIn |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import datetime
import logging
from functools import reduce
from flask_babelpkg import lazy_gettext
from .filters import Filters
log = logging.getLogger(__name__)
class BaseInterface(object):
"""
Base class for all data model interfaces.
Sub class it to implement your own interface for some data engine.
"""
obj = None
filter_converter_class = None
""" when sub classing override with your own custom filter converter """
""" Messages to display on CRUD Events """
add_row_message = lazy_gettext('Added Row')
edit_row_message = lazy_gettext('Changed Row')
delete_row_message = lazy_gettext('Deleted Row')
delete_integrity_error_message = lazy_gettext('Associated data exists, please delete them first')
add_integrity_error_message = lazy_gettext('Integrity error, probably unique constraint')
edit_integrity_error_message = lazy_gettext('Integrity error, probably unique constraint')
general_error_message = lazy_gettext('General Error')
""" Tuple with message and text with severity type ex: ("Added Row", "info") """
message = ()
def __init__(self, obj):
self.obj = obj
def _get_attr_value(self, item, col):
if not hasattr(item, col):
# it's an inner obj attr
return reduce(getattr, col.split('.'), item)
if hasattr(getattr(item, col), '__call__'):
# its a function
return getattr(item, col)()
else:
# its attribute
return getattr(item, col)
def get_filters(self, search_columns=None):
search_columns = search_columns or []
return Filters(self.filter_converter_class, self, search_columns)
def get_values_item(self, item, show_columns):
return [self._get_attr_value(item, col) for col in show_columns]
def _get_values(self, lst, list_columns):
"""
Get Values: formats values for list template.
returns [{'col_name':'col_value',....},{'col_name':'col_value',....}]
:param lst:
The list of item objects from query
:param list_columns:
The list of columns to include
"""
retlst = []
for item in lst:
retdict = {}
for col in list_columns:
retdict[col] = self._get_attr_value(item, col)
retlst.append(retdict)
return retlst
def get_values(self, lst, list_columns):
"""
Get Values: formats values for list template.
returns [{'col_name':'col_value',....},{'col_name':'col_value',....}]
:param lst:
The list of item objects from query
:param list_columns:
The list of columns to include
"""
for item in lst:
retdict = {}
for col in list_columns:
retdict[col] = self._get_attr_value(item, col)
yield retdict
def get_values_json(self, lst, list_columns):
"""
Converts list of objects from query to JSON
"""
result = []
for item in self.get_values(lst, list_columns):
for key, value in list(item.items()):
if isinstance(value, datetime.datetime) or isinstance(value, datetime.date):
value = value.isoformat()
item[key] = value
if isinstance(value, list):
item[key] = [str(v) for v in value]
result.append(item)
return result
"""
Returns the models class name
useful for auto title on views
"""
@property
def model_name(self):
return self.obj.__class__.__name__
"""
Next methods must be overridden
"""
def query(self, filters=None, order_column='', order_direction='',
page=None, page_size=None):
pass
def is_image(self, col_name):
return False
def is_file(self, col_name):
return False
def is_gridfs_file(self, col_name):
return False
def is_gridfs_image(self, col_name):
return False
def is_string(self, col_name):
return False
def is_text(self, col_name):
return False
def is_integer(self, col_name):
return False
def is_float(self, col_name):
return False
def is_boolean(self, col_name):
return False
def is_date(self, col_name):
return False
def is_datetime(self, col_name):
return False
def is_relation(self, prop):
return False
def is_relation_col(self, col):
return False
def is_relation_many_to_one(self, prop):
return False
def is_relation_many_to_many(self, prop):
return False
def is_relation_one_to_one(self, prop):
return False
def is_relation_one_to_many(self, prop):
return False
def is_nullable(self, col_name):
return True
def is_unique(self, col_name):
return False
def is_pk(self, col_name):
return False
def is_fk(self, col_name):
return False
def get_max_length(self, col_name):<|fim▁hole|>
"""
-----------------------------------------
FUNCTIONS FOR CRUD OPERATIONS
-----------------------------------------
"""
def add(self, item):
"""
Adds object
"""
raise NotImplementedError
def edit(self, item):
"""
Edit (change) object
"""
raise NotImplementedError
def delete(self, item):
"""
Deletes object
"""
raise NotImplementedError
def get_col_default(self, col_name):
pass
def get_keys(self, lst):
"""
return a list of pk values from object list
"""
pk_name = self.get_pk_name()
return [getattr(item, pk_name) for item in lst]
def get_pk_name(self, item):
"""
Returns the primary key name
"""
raise NotImplementedError
def get_pk_value(self, item):
return getattr(item, self.get_pk_name())
def get(self, pk):
"""
return the record from key
"""
pass
def get_related_model(self, prop):
raise NotImplementedError
def get_related_interface(self, col_name):
"""
Returns a BaseInterface for the related model
of column name.
:param col_name: Column name with relation
:return: BaseInterface
"""
raise NotImplementedError
def get_related_obj(self, col_name, value):
raise NotImplementedError
def get_related_fk(self, model):
raise NotImplementedError
def get_columns_list(self):
"""
Returns a list of all the columns names
"""
return []
def get_user_columns_list(self):
"""
Returns a list of user viewable columns names
"""
return self.get_columns_list()
def get_search_columns_list(self):
"""
Returns a list of searchable columns names
"""
return []
def get_order_columns_list(self, list_columns=None):
"""
Returns a list of order columns names
"""
return []
def get_relation_fk(self, prop):
pass<|fim▁end|> | return -1
def get_min_length(self, col_name):
return -1 |
<|file_name|>builder.rs<|end_file_name|><|fim▁begin|>use anyhow::Context;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize)]
#[allow(non_camel_case_types)]
pub enum Arch {
#[serde(rename = "i686")]
I686,
#[serde(rename = "x86_64")]
X86_64,
#[serde(rename = "arm")]
ARM,
#[serde(rename = "armv6h")]
ARMV6H,
#[serde(rename = "armv7h")]
ARMV7H,
#[serde(rename = "aarch64")]<|fim▁hole|>
impl std::fmt::Display for Arch {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match *self {
Arch::I686 => "i686".fmt(f),
Arch::X86_64 => "x86_64".fmt(f),
Arch::ARM => "arm".fmt(f),
Arch::ARMV6H => "armv6h".fmt(f),
Arch::ARMV7H => "armv7h".fmt(f),
Arch::AARCH64 => "aarch64".fmt(f),
}
}
}
#[derive(Debug, Clone)]
pub struct ChrootHelper<'a> {
chroot_dir: &'a str,
#[allow(dead_code)]
arch: Arch,
}
impl<'a> ChrootHelper<'a> {
pub fn new(chroot_dir: &'a str, arch: Arch) -> Self {
ChrootHelper { chroot_dir, arch }
}
pub async fn makechrootpkg<P, Q, R, S>(
&self,
package_dir: P,
srcdest: Q,
pkgdest: R,
logdest: S,
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
R: AsRef<std::path::Path>,
S: AsRef<std::path::Path>,
{
let current_dir_buf = std::env::current_dir()?;
let current_dir = current_dir_buf.as_path();
let mut srcdest_arg = std::ffi::OsString::from("SRCDEST=");
srcdest_arg.push(current_dir.join(srcdest));
let mut pkgdest_arg = std::ffi::OsString::from("PKGDEST=");
pkgdest_arg.push(current_dir.join(pkgdest));
let mut logdest_arg = std::ffi::OsString::from("LOGDEST=");
logdest_arg.push(current_dir.join(logdest));
let mut cmd = tokio::process::Command::new("sudo");
cmd.current_dir(package_dir)
.arg("env")
.arg(srcdest_arg)
.arg(pkgdest_arg)
.arg(logdest_arg)
.arg("makechrootpkg")
.arg("-cur")
.arg(current_dir.join(self.chroot_dir));
log::info!("{:?}", cmd);
let status = cmd.status().await?;
if status.success() {
Ok(())
} else {
Err(anyhow::anyhow!("makechrootpkg failed"))
}
}
}
#[derive(Debug, Clone)]
pub struct Builder<'a> {
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
}
impl<'a> Builder<'a> {
pub fn new(
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
) -> Self {
Builder {
signer,
srcdest,
logdest,
}
}
pub async fn build_package<P, Q>(
&self,
package_dir: P,
repo_dir: Q,
chroot_helper: &ChrootHelper<'a>,
) -> Result<Vec<std::path::PathBuf>, anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
{
let package_dir = package_dir.as_ref();
let tempdir = tempdir::TempDir::new("guzuta-pkgdest")?;
let pkgdest = tempdir.path();
chroot_helper
.makechrootpkg(package_dir, self.srcdest, pkgdest, self.logdest)
.await?;
let mut dir = tokio::fs::read_dir(pkgdest).await?;
let mut futures_unordered = futures::stream::FuturesUnordered::new();
while let Some(entry) = dir.next_entry().await? {
let dest = repo_dir.as_ref().join(entry.file_name());
futures_unordered.push(async move {
let symlink_package_path = package_dir.join(entry.file_name());
if symlink_package_path.read_link().is_ok() {
// Unlink symlink created by makechrootpkg
log::info!("Unlink symlink {}", symlink_package_path.display());
tokio::fs::remove_file(symlink_package_path).await?;
}
log::info!("Copy {} to {}", entry.path().display(), dest.display());
tokio::fs::copy(entry.path(), &dest)
.await
.with_context(|| {
format!("Unable to copy file {:?} to {:?}", entry.path(), dest)
})?;
if let Some(signer) = self.signer {
let mut sig_dest = dest.clone().into_os_string();
sig_dest.push(".sig");
signer.sign(&dest, sig_dest).await?;
}
Ok::<_, anyhow::Error>(dest)
});
}
use futures::StreamExt as _;
let mut paths = vec![];
while let Some(path) = futures_unordered.next().await {
paths.push(path?);
}
Ok(paths)
}
}<|fim▁end|> | AARCH64,
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.