prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>scatter_profile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- noplot -*-
"""
N Classic Base renderer Ext renderer
20 0.22 0.14 0.14
100 0.16 0.14 0.13
1000 0.45 0.26 0.17
10000 3.30 1.31 0.53
50000 19.30 6.53 1.98
"""
from pylab import *<|fim▁hole|>import time
for N in (20,100,1000,10000,50000):
tstart = time.time()
x = 0.9*rand(N)
y = 0.9*rand(N)
s = 20*rand(N)
scatter(x,y,s)
print '%d symbols in %1.2f s' % (N, time.time()-tstart)<|fim▁end|> | |
<|file_name|>prng.cpp<|end_file_name|><|fim▁begin|>// prng.cpp or pseudo-random number generator (prng)
// Generates some pseudo-random numbers.
#include <iostream>
#include <iomanip>
using std::cout; // iostream
using std::endl;
using std::setw; // iomanip
// function generates random number
unsigned pseudoRNG() {
static unsigned seed = 5493; // some (any) initial starting seed; initialized only once!
// Take the current seed and generate new value from it
// Due to larg numbers used to generate numbers is difficult to
// predict next value from previous one.
// Static keyword has program scope and is terminated at the end of
// program. Seed value is stored every time in memory using previous
// value.
seed = (3852591 * seed + 5180347);
// return value between 0 and 65535
return (seed % 65535);
}
int main()
{
// generate 100 random numbers - print in separate fields
for (int i = 1; i <= 100; i++) {
cout << setw(8) << pseudoRNG();<|fim▁hole|> if (i % 5 == 0)
cout << endl;
}
return 0;
}<|fim▁end|> |
// new line every fifth number |
<|file_name|>Hotdot.js<|end_file_name|><|fim▁begin|>import React from 'react';
import {
Link
} from 'react-router';
import HotdotActions from '../actions/HotdotActions';
import HotdotObjStore from '../stores/HotdotObjStore';
import MyInfoNavbar from './MyInfoNavbar';
import Weixin from './Weixin';
class Hotdot extends React.Component {
constructor(props) {
super(props);
this.state = HotdotObjStore.getState();
this.onChange = this.onChange.bind(this);
}
componentDidMount() {
HotdotActions.getHotdotDatas();
$(".month-search").hide();
$(".navbar-hotdot").on("touchend",function(){
var index = $(this).index();
if(index==0){
//本周
$(".month-search").hide();
$(".week-search").show();
}else{
//本月
$(".month-search").show();
$(".week-search").hide();
}
});
HotdotObjStore.listen(this.onChange);
Weixin.getUrl();
Weixin.weixinReady();
}
componentWillUnmount() {
HotdotObjStore.unlisten(this.onChange);
}
onChange(state) {
this.setState(state);
}
getUpOrDown(curData,preData,isWeek){
var preDataItem = isWeek ? preData.week:preData.month;
if(preData==false || preData == [] || preDataItem==undefined){
return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span>
<span className="badge">{curData.value}</span></span>);
}else{
for(var i = 0;i < preDataItem.length;i++){
if(preDataItem[i].word == curData.word){
if(preDataItem[i].value < curData.value){
return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span>
<span className="badge">{curData.value}</span></span>);
}else{
return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-down"></span>
<span className="badge" style={{backgroundColor:"#4F81E3"}}>{curData.value}</span></span>);
}
}
}
}
return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span>
<span className="badge">{curData.value}</span></span>);
}
render() {
var hotdotData = (this.state.data);
var firstHotData = hotdotData[0];
var preHotData ;
if(hotdotData.length > 7){
preHotData = hotdotData[7];
}else{
preHotData = [];
}
if(firstHotData){
var weekList = firstHotData.week.map((weekItem,i)=>(
<li className="list-group-item" key={i}>
{this.getUpOrDown(weekItem,preHotData,true)}
{weekItem.word}
</li>
));
if(weekList.length==0){
weekList = <div className = "noData">数据还没有准备好,要不去其他页面瞅瞅?</div>
}
<|fim▁hole|> {this.getUpOrDown(monthItem,preHotData,false)}
{monthItem.word}
</li>
));
if(monthList.length==0){
monthList = <div className = "noData">Whops,这个页面的数据没有准备好,去其他页面瞅瞅?</div>
}
}else{
var weekList = (<span>正在构建,敬请期待...</span>);
var monthList = (<span>正在构建,敬请期待...</span>);
}
return (<div>
<div className="content-container">
<div className="week-search">
<div className="panel panel-back">
<div className="panel-heading">
<span className="panel-title">本周关键字排行榜</span>
<div className="navbar-key-container">
<span className="navbar-hotdot navbar-week navbar-hotdot-active">本周</span>
<span className="navbar-hotdot navbar-month">本月</span>
</div>
</div>
<div className="panel-body">
<ul className="list-group">
{weekList}
</ul>
</div>
</div>
</div>
<div className="month-search">
<div className="panel panel-back">
<div className="panel-heading">
<span className="panel-title">本月关键字排行榜</span>
<div className="navbar-key-container">
<span className="navbar-hotdot navbar-week">本周</span>
<span className="navbar-hotdot navbar-month navbar-hotdot-active">本月</span>
</div>
</div>
<div className="panel-body">
<ul className="list-group">
{monthList}
</ul>
</div>
</div>
</div>
</div>
</div>);
}
}
export default Hotdot;<|fim▁end|> | var monthList = firstHotData.month.map((monthItem,i)=>(
<li className="list-group-item" key={i}> |
<|file_name|>kindle_test.go<|end_file_name|><|fim▁begin|>package kindle
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"strings"
"testing"
"time"
ledgertools "github.com/ginabythebay/ledger-tools"
)
var happyEmail = strings.TrimSpace(`
Hello Gina White,
Thank you for shopping with us. All Kindle content, including books and Kindle active content, that you've purchased from the Kindle Store is stored in your Kindle library https://www.amazon.com/liblink
............................................................................
Order Information:
E-mail Address:
[email protected]
Order Grand Total:
$5.98
............................................................................
Order Summary:
Details:
Order #: D12-1234567-1234567
Items Subtotal: $5.98
Tax Collected: $0.00
........................
Grand Total: $5.98
............................................................................
First Title
Kindle Edition
Sold by Amazon Digital Services LLC
Second Title
Kindle Edition
Sold by Amazon Digital Services LLC
............................................................................
You can view your receipt or invoice by visiting the Order details page:
http://www.amazon.com/orderdetailslink
The charge for this order will appear on your credit card statement from
the merchant 'AMZN Payment Services'.
You can review your orders in Your Account.
If you've explored the links on that page but still have a question, please
visit our online Help Department:
http://www.amazon.com/somehelplink
............................................................................
Please note: This e-mail was sent from a notification-only address that
cannot accept incoming e-mail. Please do not reply to this message.
Thanks again for shopping with us.
`)
var happyMsg = ledgertools.NewMessage(
"Thu, 20 Oct 2016 22:09:58 +0000",
"[email protected]",
fromMatcher,
"Amazon.com order of First Title and 1 more item",
happyEmail,
"")
func TestHappyImport(t *testing.T) {
parsed, err := importMessage(happyMsg)
ok(t, err)
year, month, day := parsed.Date.Date()
equals(t, 2016, year)
equals(t, time.October, month)
equals(t, 20, day)
equals(t, "D12-1234567-1234567", parsed.CheckNumber)
equals(t,
[]string{
"Order #: D12-1234567-1234567",
"First Title",
"Second Title",
"http://www.amazon.com/orderdetailslink",
},
parsed.Comments)
equals(t, "$5.98", parsed.Amount)
equals(t, defaultPayment, parsed.PaymentInstrument)
}
func BenchmarkHappyImport(b *testing.B) {
for i := 0; i < b.N; i++ {
importMessage(happyMsg)
}
}
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)<|fim▁hole|> }
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}<|fim▁end|> | fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow() |
<|file_name|>stats.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from app import app
from app.authentication import with_login
from flask import Blueprint, jsonify, request, Response
from app.generate_csv import generate_csv_clean
from app.msol_util import get_next_update_estimation_message_aws
from app.es.awsmetric import AWSMetric
from app.es.awsstat import AWSStat
from app.es.awsdetailedlineitem import AWSDetailedLineitem
from app.aws_keys import with_multiple_aws_accounts
from dateutil.relativedelta import relativedelta
from app.generate_csv import generate_csv
from app.cache import compressed_json, decompressed_json, cache, with_cache
from hashlib import sha256
from .. import AWS_KEY_PROCESSING_INTERVAL_HOURS
import itertools
import calendar
import config
aws_cost_stats_bp = Blueprint('aws_cost_stats_bp', __name__)
def cut_cost_by_product(products, cut):
res = []
other = {'product': 'Other Services', 'cost': 0}
i = 0
for p in products:
if i < cut and p['cost'] >= 0.01:
res.append(p)
else:
other['cost'] += p['cost']
i += 1
if other['cost'] >= 0.01:
res.append(other)
return res
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycost', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycost/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycost(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
total_cost:
type: number
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
data = AWSDetailedLineitem.get_monthly_cost(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)
return jsonify(data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/totalcost/<string:time_arg>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_totalcost(accounts, time_arg):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get total cost
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
total_cost:
type: number
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
this_day = now.replace(hour=0, minute=0, second=0, microsecond=0)
this_month = this_day.replace(day=1)
time_val = {
'ever': AWSDetailedLineitem.get_first_date([account.get_aws_user_id() for account in accounts]),
'currentyear': this_month - relativedelta(months=this_month.month),
'currentmonth': this_month,
}
date_from = time_val.get(time_arg, now)
date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999)
raw_data = AWSDetailedLineitem.get_cost(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)
return jsonify(raw_data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregion', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregion/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyregion(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by region
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
region:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
raw_data = AWSDetailedLineitem.get_cost_by_region(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)['intervals']['buckets']
res = [
{
'month': data['key_as_string'].split('T')[0],
'regions': [
{
'region': region['key'],
'cost': region['cost']['value'],
}
for region in data['regions']['buckets']
],
}
for data in raw_data
]
if 'csv' in request.args:
return Response(generate_csv(res, 'regions', 'region'), mimetype='text/csv')
return jsonify(months=res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregionbyaccount', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregionbyaccount/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyregionbyaccount(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by region for each account
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
region:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
raw_data = AWSDetailedLineitem.get_cost_by_region(keys=[account.get_aws_user_id() for account in accounts],
byaccount=True,
date_from=date_from,
date_to=date_to)['accounts']['buckets']
res = [
{
'account_id': account['key'],
'account_name': [a.pretty for a in accounts if account['key'] == a.get_aws_user_id()][0],
'months': [
{
'month': data['key_as_string'].split('T')[0],
'regions': [
{
'region': region['key'],
'cost': region['cost']['value'],
}
for region in data['regions']['buckets']
],
}
for data in account['intervals']['buckets']
]
}
for account in raw_data
]
if 'csv' in request.args:
return Response(generate_csv(res, 'regions', 'region', account=True), mimetype='text/csv')
return jsonify(accounts=res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregionbytagbyaccount', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyregionbytagbyaccount/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyregionbytagbyaccount(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by region for each account
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
region:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
raw_data = AWSDetailedLineitem.get_cost_by_region(keys=[account.get_aws_user_id() for account in accounts],
tagged=True,
byaccount=True,
date_from=date_from,
date_to=date_to)['accounts']['buckets']
def tagged_cost(bucket, total):
total_tag = 0.0
for tag in bucket:
total_tag += tag['cost']['value']
yield (tag['key'], tag['cost']['value'])
if total != total_tag:
yield ('untagged', total - total_tag)
res = [
{
'account_id': account['key'],
'account_name': [a.pretty for a in accounts if a.get_aws_user_id() == account['key']][0],
'months': [
{
'month': data['key_as_string'].split('T')[0],
'regions': [
{
'region': region['key'],
'tags': [
{
'name': tag[0],
'cost': tag[1],
}
for tag in tagged_cost(region['tags']['buckets'], region['cost']['value'])
],
}
for region in data['regions']['buckets']
],
}
for data in account['intervals']['buckets']
]
}
for account in raw_data
]
if 'csv' in request.args:
return Response(generate_csv(res, 'regions', 'region', account=True, tagged=True), mimetype='text/csv')
return jsonify(accounts=res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/dailycostbyproduct', defaults={'nb_days': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/dailycostbyproduct/<int:nb_days>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_dailycostbyproduct(accounts, nb_days):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get daily costs summed by product
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
days:
type: array
items:
properties:
day:
type: string
products:
type: array
items:
properties:
cost:
type: number
product:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow().replace(hour=23, minute=59, second=59, microsecond=999999)
now = AWSDetailedLineitem.get_last_date([account.get_aws_user_id() for account in accounts], limit=now)
date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=nb_days)
date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999) - relativedelta(days=1)
data = AWSDetailedLineitem.get_daily_cost_by_product(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)['days']
for d in data:
d['products'] = cut_cost_by_product(sorted(d['products'], key=lambda x: x['cost'], reverse=True), int(request.args['show']) - 1 if 'show' in request.args else 9)
if not len(data):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(days=data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproduct', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproduct/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyproduct(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by product
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
product:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow().replace(hour=23, minute=59, second=59, microsecond=999999)
now = AWSDetailedLineitem.get_last_date([account.get_aws_user_id() for account in accounts], limit=now)
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1], hour=23, minute=59, second=59, microsecond=999999)
data = AWSDetailedLineitem.get_monthly_cost_by_product(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)['months']
for d in data:
if 'csv' not in request.args:
d['products'] = cut_cost_by_product(sorted(d['products'], key=lambda x: x['cost'], reverse=True), int(request.args['show']) - 1 if 'show' in request.args else 9)
if not len(data):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
if 'csv' in request.args:
return Response(generate_csv(data, 'products', 'product'), mimetype='text/csv')
return jsonify(months=data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproductbyaccount', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproductbyaccount/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyproductbyaccount(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by product for each account
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
product:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
month = nb_months - 1
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=month)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
res = [
{
'account_id': account.get_aws_user_id(),
'account_name': account.pretty,
'months': AWSDetailedLineitem.get_monthly_cost_by_product(keys=account.get_aws_user_id(),
date_from=date_from,
date_to=date_to)['months'],
}
for account in accounts
]
if 'csv' in request.args:
return Response(generate_csv(res, 'products', 'product', account=True), mimetype='text/csv')
return jsonify(accounts=res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproductbytagbyaccount', defaults={'nb_months': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/monthlycostbyproductbytagbyaccount/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_monthlycostbyproductbytagbyaccount(accounts, nb_months):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get monthly costs summed by product for each account
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
months:
type: array
items:
properties:
month:
type: string
products:
type: array
items:
properties:
cost:
type: number
product:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
month = nb_months - 1
date_from = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=month)
date_to = now.replace(day=calendar.monthrange(now.year, now.month)[1],
hour=23, minute=59, second=59, microsecond=999999)
res = [
{
'account_id': account.get_aws_user_id(),
'account_name': account.pretty,
'months': AWSDetailedLineitem.get_monthly_cost_by_product(keys=account.get_aws_user_id(),
tagged=True,
date_from=date_from,
date_to=date_to)['months'],
}
for account in accounts
]
if 'csv' in request.args:
return Response(generate_csv(res, 'products', 'product', account=True, tagged=True), mimetype='text/csv')
return jsonify(accounts=res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/yearlycostbyproduct', defaults={'nb_years': 3})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/yearlycostbyproduct/<int:nb_years>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_yearlycostbyproduct(accounts, nb_years):
"""---
get:
tags:
- aws
produces:
- application/json
description: &desc Get yearly costs summed by product
summary: *desc
responses:
200:
description: List of AWS accounts
schema:
properties:
years:
type: array
items:
properties:
year:
type: string
products:
type: array
items:
properties:
cost:
type: number
product:
type: string
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
now = datetime.utcnow()
date_from = now.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0) - relativedelta(years=nb_years - 1)
date_to = now.replace(month=12, day=31, hour=23, minute=59, second=59, microsecond=999999)
data = AWSDetailedLineitem.get_yearly_cost_by_product(keys=[account.get_aws_user_id() for account in accounts],
date_from=date_from,
date_to=date_to)['years']
for d in data:
d['products'] = cut_cost_by_product(sorted(d['products'], key=lambda x: x['cost'], reverse=True), int(request.args['show']) - 1 if 'show' in request.args else 9)
if not len(data):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(years=data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbyresource/months')
@with_login()
@with_multiple_aws_accounts()
def aws_cost_by_resource_months(accounts):
raw_data = AWSDetailedLineitem.get_first_to_last_date([account.get_aws_user_id() for account in accounts])
if not raw_data:
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(months=[data.strftime("%Y-%m-01") for data in raw_data])
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbyresource/<month>/categories')
@with_login()
@with_multiple_aws_accounts()
def aws_cost_by_resource_month_categories_m(accounts, month):
try:
date_from = datetime.strptime(month, "%Y-%m-%d")
except:
return jsonify(error='Not found.'), 404
raw_data = AWSDetailedLineitem.get_cost_by_resource([account.get_aws_user_id() for account in accounts], date_from=date_from)
cat = []
max_cat = 0
for new in raw_data:
x = 1
while new['cost'] > x:
x *= 10
if x >= max_cat:
max_cat = x
elif '<{}'.format(x) not in cat:
cat.append('<{}'.format(x))
cat.append('>{}'.format(max_cat / 10))
return jsonify(categories=cat)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbyresource/<month>/chart')
@with_login()<|fim▁hole|>@with_multiple_aws_accounts()
def aws_cost_by_resource_month_chart_m(accounts, month):
# TODO: Use ES agg to categorize
try:
date_from = datetime.strptime(month, "%Y-%m-%d")
except:
return jsonify(error='Not found.'), 404
raw_data = [
AWSDetailedLineitem.get_cost_by_resource(account.get_aws_user_id(), date_from=date_from)
for account in accounts
]
data = []
def get_cat_with_cost(cost):
x = 1
while cost > x:
x *= 10
return x
def add_resource_in_data(new):
new_cat = get_cat_with_cost(new['cost'])
for cat in data:
if cat['category'] == '<{}'.format(new_cat):
cat['total'] += new['cost']
return
data.append(dict(category='<{}'.format(new_cat), total=new['cost']))
for one in raw_data:
for new in one:
add_resource_in_data(new)
if not len(data):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
max_cat = 0
for i in range(len(data)):
if len(data[i]['category']) > len(data[max_cat]['category']):
max_cat = i
data[max_cat]['category'] = data[max_cat]['category'][:-1]
data[max_cat]['category'] = data[max_cat]['category'].replace('<', '>', 1)
return jsonify(categories=data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbyresource/<month>/<category>')
@with_login()
@with_multiple_aws_accounts()
def aws_cost_by_resource_m(accounts, month, category):
try:
date_from = datetime.strptime(month, "%Y-%m-%d")
assert category[0] in ['<', '>']
cat = int(category[1:])
except:
return jsonify(error='Not found.'), 404
raw_data = AWSDetailedLineitem.get_cost_by_resource([account.get_aws_user_id() for account in accounts], date_from=date_from)
def transform(r):
r['resource_name'] = r['resource']
return r
minus = category[0] == '<'
data = [
transform(r)
for r in raw_data
if (minus and cat > r['cost'] >= cat / 10) or (not minus and r['cost'] > cat)
]
if len(data) <= 0:
return jsonify(error='Not found.'), 404
return jsonify(category=dict(resources=data, total=sum([x['cost'] for x in data])))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbyresource/<month>/search/<search>')
@with_login()
@with_multiple_aws_accounts()
def aws_cost_by_resource_search_m(accounts, month, search):
try:
date_from = datetime.strptime(month, "%Y-%m-%d")
except:
return jsonify(error='Not found.'), 404
raw_data = [
AWSDetailedLineitem.get_cost_by_resource(account.get_aws_user_id(), date_from=date_from, search=search)
for account in accounts
]
def transform(r):
r['resource_name'] = r['resource']
return r
data = [
transform(r)
for raw in raw_data
for r in raw
]
if not len(data):
return jsonify(error='Not found.'), 404
return jsonify(search_result=data)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/tags')
@with_login()
@with_multiple_aws_accounts()
def aws_get_resource_tags(accounts):
tags = AWSDetailedLineitem.get_available_tags([account.get_aws_user_id() for account in accounts])['tags']
if not len(tags):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(tags=sorted(tags, key=unicode.lower))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/tags_only_with_data')
@with_login()
@with_multiple_aws_accounts()
def aws_get_resource_tags_with_data(accounts):
tags = list(set(itertools.chain.from_iterable(
AWSDetailedLineitem.get_available_tags(account.get_aws_user_id(), only_with_data=account.key)['tags']
for account in accounts
)))
if not len(tags):
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(tags=sorted(tags, key=unicode.lower))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbytag/<path:tag>', defaults={'nb_months': 5})
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/costbytag/<path:tag>/<int:nb_months>')
@with_login()
@with_multiple_aws_accounts()
def aws_cost_by_tags_months(accounts, nb_months, tag):
date_to = datetime.now()
date_from = date_to.replace(day=1, minute=0, second=0, microsecond=0) - relativedelta(months=nb_months - 1)
return jsonify(AWSDetailedLineitem.get_monthly_cost_by_tag([account.get_aws_user_id() for account in accounts], tag, date_from=date_from, date_to=date_to))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/underutilized')
@with_login()
@with_multiple_aws_accounts()
def aws_underutilized_resources(accounts):
return jsonify(AWSMetric.underutilized_resources(account.key for account in accounts))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/underutilizedreducedcost')
@with_login()
@with_multiple_aws_accounts()
def aws_underutilized_resources_reduced_cost(accounts):
now = datetime.utcnow()
date_from = now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(months=6)
date_to = now.replace(hour=23, minute=59, second=59, microsecond=999999)
resources = AWSMetric.underutilized_resources(account.key for account in accounts)
resource_ids = set(r['id'] for r in resources['resources'])
months = AWSDetailedLineitem.get_monthly_cost_by_resource(resource_ids, date_from=date_from, date_to=date_to)
res = { # Simply multiply every cost by 20% as all instances usage is
k: v * 0.2 # less than 20%. TODO: intelligently find the best type
for k, v in months.iteritems()
}
return jsonify(res)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/usagecost')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_usagecost(accounts):
def get_account_data(account):
for date, cpu_usage in dict(AWSMetric.daily_cpu_utilization(account.key)).iteritems():
yield (date, cpu_usage, None)
for date, cost in dict(AWSDetailedLineitem.get_ec2_daily_cost(account.get_aws_user_id())).iteritems():
yield (date, None, cost)
@with_cache()
def get_all_account_data():
return list(
itertools.chain.from_iterable(
get_account_data(account)
for account in accounts
)
)
data = get_all_account_data()
days = {}
for day, cpu_usage, cost in data:
day_data = days.setdefault(day, {'day': day, 'cpu': None, 'cost': None})
if cpu_usage is not None:
day_data['cpu'] = (day_data['cpu'] or 0.0) + cpu_usage
if cost is not None:
day_data['cost'] = (day_data['cost'] or 0.0) + cost
res = sorted([
value
for value in days.itervalues()
if value['cpu'] is not None and value['cost'] is not None # Comment/remove if None values are OK
], key=lambda e: e['day'])
if not res:
return jsonify(message=get_next_update_estimation_message_aws(accounts, AWS_KEY_PROCESSING_INTERVAL_HOURS))
return jsonify(days=res)
def _build_list_used_transfer_types(stat_list):
return frozenset(
elem['type']
for bucket in stat_list
for elem in bucket['transfer_stats']
)
def _check_if_in_list(dict_list, value, key):
return next((item for item in dict_list if item[key] == value), None)
def _append_to_header_list(header_list, new_data):
for elem in new_data:
if elem not in header_list:
header_list.append(elem)
return header_list
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/s3buckettags')
@with_login()
@with_multiple_aws_accounts()
def aws_get_resource_tags_for_s3(accounts):
tags = list(set(itertools.chain.from_iterable(
AWSDetailedLineitem.get_available_tags(
account.get_aws_user_id(),
product_name='Simple Storage Service',
)['tags']
for account in accounts
)))
return jsonify(tags=sorted(tags, key=unicode.lower))
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/s3bucketsizepername')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_s3bucketsizepername(accounts):
"""---
get:
tags:
- aws
produces:
- application/csv
description: &desc Stats about cost and usage of bandwith and storag on s3 buckets, organised by name
summary: *desc
responses:
200:
description: Stats about cost and usage of bandwith and storag on s3 buckets, organised by name
403:
description: Not logged in
404:
description: AWS account not registered
"""
def _create_bandwith_breakdown(transfer_types_list, csv_row, bucket_bandwith_stat):
for elem in transfer_types_list:
_current_transfer_type = _check_if_in_list(bucket_bandwith_stat['transfer_stats'], elem, 'type')
if _current_transfer_type is not None:
csv_row[elem] = _current_transfer_type['data'] * 1024 * 1024 * 1024 # The is by default given in GB
return csv_row
def _create_csv_rows(bucket_list, account, bandwith_cost, csv_row_all):
if bucket_list is None:
return []
for bucket in bucket_list['buckets']:
csv_row = {
'account_id': account.get_aws_user_id(),
'used_space': bucket['used_space'],
'name': bucket['name'],
'storage_cost': _check_if_in_list(bucket['prices'], bucket['provider'], 'provider')['cost']
}
bucket_bandwith_stat = _check_if_in_list(bandwith_cost, bucket['name'], 'bucket_name')
if bucket_bandwith_stat is not None:
csv_row = _create_bandwith_breakdown(transfer_types_list, csv_row, bucket_bandwith_stat)
csv_row['bandwith_cost'] = bucket_bandwith_stat['cost'] if bucket_bandwith_stat is not None else 0
csv_row['total_cost'] = csv_row['storage_cost'] + csv_row['bandwith_cost']
csv_row_all.append(csv_row)
return csv_row_all
assert len(accounts) > 0
csv_header = ['account_id', 'name', 'used_space', 'storage_cost', 'bandwith_cost', 'total_cost']
csv_row_all = []
for account in accounts:
bucket_list = AWSStat.latest_s3_space_usage(account)
bucket_ids = [
bucket['name']
for bucket in (bucket_list['buckets'] if bucket_list is not None else [])
]
bandwith_cost = AWSDetailedLineitem.get_s3_bandwith_info_and_cost_per_name(account.get_aws_user_id(), bucket_ids)
transfer_types_list = _build_list_used_transfer_types(bandwith_cost)
csv_header = _append_to_header_list(csv_header, transfer_types_list)
csv_row_all = _create_csv_rows(bucket_list, account, bandwith_cost, csv_row_all)
if len(csv_row_all) > 0 and csv_row_all[0] is None:
csv_row_all = []
if 'csv' in request.args:
return Response(generate_csv_clean(csv_row_all, csv_header))
return jsonify(accounts=csv_row_all)
@app.route('/aws/accounts/<aws_key_ids:account_ids>/stats/s3bucketsizepertag/<path:tag>')
@with_login()
@with_multiple_aws_accounts()
def aws_accounts_m_stats_s3bucketsizepertag(accounts, tag):
"""---
get:
tags:
- aws
produces:
- application/csv
description: &desc Stats about cost and usage of bandwith and storag on s3 buckets, organised by tag
summary: *desc
responses:
200:
description: Stats about cost and usage of bandwith and storag on s3 buckets, organised by tag
403:
description: Not logged in
404:
description: AWS account not registered
"""
assert len(accounts) > 0
def _get_total_sizes_cost_and_names(bucket_names_list, bucket_list):
total_size = 0
total_cost = 0
names = ""
for bucket in bucket_list['buckets']:
if _check_if_in_list(bucket_names_list, bucket['name'], 'bucket_name') is not None:
total_size += float(bucket['used_space'])
total_cost += _check_if_in_list(bucket['prices'], bucket['provider'], 'provider')['cost']
names += bucket['name'] + ", "
return total_size, names[:-2], total_cost
def _get_bandwith_info(account, bucket_names):
bucket_ids = [
bucket
for bucket in (bucket_names if isinstance(bucket_names, list) else [bucket_names])
]
bandwith_cost = AWSDetailedLineitem.get_s3_bandwith_info_and_cost_per_name(account.get_aws_user_id(), bucket_ids)
return bandwith_cost
def _iterate_over_buckets_in_tag_for_total(bucket_bandwith_stat):
total_cost = 0
for bucket in (bucket_bandwith_stat if bucket_bandwith_stat is not None else []):
total_cost += bucket['cost']
return total_cost
def _iterate_over_buckets_and_make_breakdown_bandwith_stat(bucket_bandwith_stat, buff_row_csv, tag_value):
bandwith_cost = 0
for bucket in bucket_bandwith_stat:
bandwith_cost += bucket['cost']
for elem in bucket['transfer_stats']:
if elem['type'] in buff_row_csv:
buff_row_csv[elem['type']] += (elem['data'] * 1024 * 1024 * 1024)
else:
buff_row_csv[elem['type']] = (elem['data'] * 1024 * 1024 * 1024)
buff_row_csv['bandwith_cost'] = bandwith_cost
return buff_row_csv
def _build_csv_row_and_add_header(bucket_list_tagged, bucket_list, account, csv_header, csv_row_all):
if bucket_list_tagged is None:
return [], []
for tag_value in bucket_list_tagged['tag_value']:
bucket_info = _get_total_sizes_cost_and_names(tag_value['s3_buckets'], bucket_list)
bucket_bandwith_stat = _get_bandwith_info(account, bucket_info[1])
csv_header = _append_to_header_list(csv_header, _build_list_used_transfer_types(bucket_bandwith_stat))
csv_row = {
"tag_key": bucket_list_tagged['tag_key'].split(':')[1],
"tag_value": tag_value['tag_value'],
"account_id": tag_value['s3_buckets'][0]["account_id"],
"total_size": bucket_info[0],
"bucket_names": bucket_info[1],
"storage_cost": bucket_info[2],
}
csv_row = _iterate_over_buckets_and_make_breakdown_bandwith_stat(bucket_bandwith_stat, csv_row, tag_value)
csv_row['total_cost'] = csv_row['storage_cost'] + csv_row['bandwith_cost']
csv_row_all.append(csv_row)
return csv_header, csv_row_all
def _select_bucket_list_tag(bucket_list_per_tag, tag):
for bucket_list_tagged in bucket_list_per_tag:
if tag in bucket_list_tagged['tag_key'].split(':')[1]:
return bucket_list_tagged
csv_header = ["account_id", "tag_key", "tag_value", "total_size", "bucket_names", "bandwith_cost", "storage_cost", "total_cost"]
csv_data = []
for account in accounts:
bucket_list_per_tag = AWSDetailedLineitem.get_s3_buckets_per_tag(account.get_aws_user_id())
bucket_list_tagged = _select_bucket_list_tag(bucket_list_per_tag, tag)
bucket_list = AWSStat.latest_s3_space_usage(account)
csv_header, csv_data = _build_csv_row_and_add_header(bucket_list_tagged, bucket_list, account, csv_header, csv_data)
if 'csv' in request.args:
return Response(generate_csv_clean(csv_data, csv_header))
return jsonify(res=csv_data)<|fim▁end|> | |
<|file_name|>ShoppingCartComponent.java<|end_file_name|><|fim▁begin|>package sunning.democollection.learn._0331.component;
import dagger.Component;
import sunning.democollection.learn._0331.UserActivity;
import sunning.democollection.learn._0331.module.ShoppingCartModule;
<|fim▁hole|>public interface ShoppingCartComponent {
void inject(UserActivity userActivity);
}<|fim▁end|> | /**
* Created by sunning on 16/3/31.
*/
@Component(dependencies = ActivityComponent.class, modules = ShoppingCartModule.class) |
<|file_name|>tty_term.go<|end_file_name|><|fim▁begin|>package namespaces
import (
"io"<|fim▁hole|> "github.com/dotcloud/docker/pkg/term"
)
type TtyTerminal struct {
stdin io.Reader
stdout, stderr io.Writer
master *os.File
state *term.State
}
func (t *TtyTerminal) Resize(h, w int) error {
return term.SetWinsize(t.master.Fd(), &term.Winsize{Height: uint16(h), Width: uint16(w)})
}
func (t *TtyTerminal) SetMaster(master *os.File) {
t.master = master
}
func (t *TtyTerminal) Attach(command *exec.Cmd) error {
go io.Copy(t.stdout, t.master)
go io.Copy(t.master, t.stdin)
state, err := t.setupWindow(t.master, os.Stdin)
if err != nil {
return err
}
t.state = state
return err
}
// SetupWindow gets the parent window size and sets the master
// pty to the current size and set the parents mode to RAW
func (t *TtyTerminal) setupWindow(master, parent *os.File) (*term.State, error) {
ws, err := term.GetWinsize(parent.Fd())
if err != nil {
return nil, err
}
if err := term.SetWinsize(master.Fd(), ws); err != nil {
return nil, err
}
return term.SetRawTerminal(parent.Fd())
}
func (t *TtyTerminal) Close() error {
term.RestoreTerminal(os.Stdin.Fd(), t.state)
return t.master.Close()
}<|fim▁end|> | "os"
"os/exec"
|
<|file_name|>css-hint-server.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012-2015 S-Core Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
define(['external/lodash/lodash.min',
'webida-lib/util/path',
'plugins/webida.editor.code-editor/content-assist/file-server',
'plugins/webida.editor.code-editor/content-assist/reference'],
function (_, pathUtil, fileServer, reference) {
'use strict';
var csshint = {};
function findCompletions(body) {
var result = {};
var token = body.query.token;
if (token) {
if ((token.type === 'tag' || token.type === 'qualifier') && /^\./.test(token.string)) {<|fim▁hole|> token.type = 'id';
token.start = token.start + 1;
token.string = token.string.substr(1);
}
if (token.type === 'id' || token.type === 'class') {
var htmls = reference.getReferenceFroms(body.query.file);
if (pathUtil.isHtml(body.query.file)) {
htmls = _.union(htmls, [body.query.file]);
}
_.each(htmls, function (htmlpath) {
var html = fileServer.getLocalFile(htmlpath);
if (html) {
if (token.type === 'id') {
result.list = _.union(result, html.getHtmlIds());
} else if (token.type === 'class') {
result.list = _.union(result, html.getHtmlClasses());
}
}
});
if (result.list) {
result.to = body.query.end;
result.from = {
line: body.query.end.line,
ch: body.query.end.ch - token.string.length
};
}
}
}
return result;
}
/**
* @param {files: [{name, type, text}], query: {type: string, end:{line,ch}, file: string}} body
* @returns {from: {line, ch}, to: {line, ch}, list: [string]}
**/
csshint.request = function (serverId, body, c) {
_.each(body.files, function (file) {
if (file.type === 'full') {
fileServer.setText(file.name, file.text);
file.type = null;
}
});
body.files = _.filter(body.files, function (file) {
return file.type !== null;
});
var result = {};
if (body.query.type === 'completions') {
result = findCompletions(body);
}
c(undefined, result);
};
return csshint;
});<|fim▁end|> | token.type = 'class';
token.start = token.start + 1;
token.string = token.string.substr(1);
} else if (token.type === 'builtin' && /^#/.test(token.string)) { |
<|file_name|>ge.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
// macro_rules! e {
// ($e:expr) => { $e }
// }
// macro_rules! tuple_impls {
// ($(
// $Tuple:ident {
// $(($idx:tt) -> $T:ident)+
// }
// )+) => {
// $(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Clone),+> Clone for ($($T,)+) {
// fn clone(&self) -> ($($T,)+) {
// ($(e!(self.$idx.clone()),)+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
// #[inline]
// fn eq(&self, other: &($($T,)+)) -> bool {
// e!($(self.$idx == other.$idx)&&+)
// }
// #[inline]
// fn ne(&self, other: &($($T,)+)) -> bool {
// e!($(self.$idx != other.$idx)||+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Eq),+> Eq for ($($T,)+) {}
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
// #[inline]
// fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
// lexical_partial_cmp!($(self.$idx, other.$idx),+)
// }
// #[inline]
// fn lt(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(lt, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn le(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(le, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn ge(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(ge, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn gt(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(gt, $(self.$idx, other.$idx),+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Ord),+> Ord for ($($T,)+) {
// #[inline]
// fn cmp(&self, other: &($($T,)+)) -> Ordering {
// lexical_cmp!($(self.$idx, other.$idx),+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Default),+> Default for ($($T,)+) {
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// fn default() -> ($($T,)+) {
// ($({ let x: $T = Default::default(); x},)+)
// }
// }
// )+
// }
// }
// // Constructs an expression that performs a lexical ordering using method $rel.
// // The values are interleaved, so the macro invocation for
// // `(a1, a2, a3) < (b1, b2, b3)` would be `lexical_ord!(lt, a1, b1, a2, b2,
// // a3, b3)` (and similarly for `lexical_cmp`)
// macro_rules! lexical_ord {
// ($rel: ident, $a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// if $a != $b { lexical_ord!($rel, $a, $b) }
// else { lexical_ord!($rel, $($rest_a, $rest_b),+) }
// };
// ($rel: ident, $a:expr, $b:expr) => { ($a) . $rel (& $b) };
// }
// macro_rules! lexical_partial_cmp {
// ($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// match ($a).partial_cmp(&$b) {
// Some(Equal) => lexical_partial_cmp!($($rest_a, $rest_b),+),
// ordering => ordering
// }
// };
// ($a:expr, $b:expr) => { ($a).partial_cmp(&$b) };
// }
// macro_rules! lexical_cmp {
// ($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// match ($a).cmp(&$b) {
// Equal => lexical_cmp!($($rest_a, $rest_b),+),
// ordering => ordering
// }
// };
// ($a:expr, $b:expr) => { ($a).cmp(&$b) };
// }
// tuple_impls! {
// Tuple1 {
// (0) -> A
// }
// Tuple2 {
// (0) -> A
// (1) -> B
// }
// Tuple3 {
// (0) -> A
// (1) -> B
// (2) -> C
// }
// Tuple4 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// }
// Tuple5 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// }
// Tuple6 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// }
// Tuple7 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// }
// Tuple8 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// }
// Tuple9 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// }
// Tuple10 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// }
// Tuple11 {<|fim▁hole|> // (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// (10) -> K
// }
// Tuple12 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// (10) -> K
// (11) -> L
// }
// }
#[cfg(test)]
mod tests {
macro_rules! ge_test {
(
$($T:ident)+
) => (
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let result: bool = left.ge(&right);
assert_eq!(result, false);
}
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let result: bool = left >= right;
assert_eq!(result, false);
}
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: bool = left.ge(&right);
assert_eq!(result, true);
}
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: bool = left >= right;
assert_eq!(result, true);
}
{
let left: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: bool = left.ge(&right);
assert_eq!(result, true);
}
{
let left: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: bool = left >= right;
assert_eq!(result, true);
}
)
}
type A = u8;
type B = u16;
type C = u32;
type D = u64;
type E = usize;
type F = i8;
type G = i16;
type H = i32;
type I = i64;
type J = isize;
type K = f32;
#[test]
fn ge_test1() {
ge_test! { A B C D E F G H I J K };
}
}<|fim▁end|> | // (0) -> A
// (1) -> B
// (2) -> C |
<|file_name|>getIconUnicode.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import defaultIconsMeta from '../../form/editors/impl/iconEditor/icons.json';
type options = {
iconsMeta: any
};
export default ({ iconsMeta = defaultIconsMeta }: options = { iconsMeta }): Function => {
const getIconUnicode = (iconName: string): string => {
const icon = iconsMeta[iconName];
if (!icon) {
console.warn(`iconsMeta has no '${iconName}' icon`);
return '';
}
return `&#x${icon.unicode};`;
};
const getIconsUnicodes = (iconNames: string): string =>
iconNames
.split(' ')
.map(getIconUnicode)
.join(' ');
return getIconsUnicodes;
};<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import binascii
import hashlib
import logging
import socket
import ssl
import sys
from ansible.module_utils.mt_api.retryloop import RetryError
from ansible.module_utils.mt_api.retryloop import retryloop
from ansible.module_utils.mt_api.socket_utils import set_keepalive
PY2 = sys.version_info[0] < 3
logger = logging.getLogger(__name__)
class RosAPIError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
if isinstance(self.value, dict) and self.value.get('message'):
return self.value['message']
elif isinstance(self.value, list):
elements = (
'%s: %s' %
(element.__class__, str(element)) for element in self.value
)
return '[%s]' % (', '.join(element for element in elements))
else:
return str(self.value)
class RosAPIConnectionError(RosAPIError):
pass
class RosAPIFatalError(RosAPIError):
pass
class RosApiLengthUtils(object):
def __init__(self, api):
self.api = api
def write_lenght(self, length):
self.api.write_bytes(self.length_to_bytes(length))
def length_to_bytes(self, length):
if length < 0x80:
return self.to_bytes(length)
elif length < 0x4000:
length |= 0x8000
return self.to_bytes(length, 2)
elif length < 0x200000:
length |= 0xC00000
return self.to_bytes(length, 3)
elif length < 0x10000000:
length |= 0xE0000000
return self.to_bytes(length, 4)
else:
return self.to_bytes(0xF0) + self.to_bytes(length, 4)
def read_length(self):
b = self.api.read_bytes(1)
i = self.from_bytes(b)
if (i & 0x80) == 0x00:
return i
elif (i & 0xC0) == 0x80:
return self._unpack(1, i & ~0xC0)
elif (i & 0xE0) == 0xC0:
return self._unpack(2, i & ~0xE0)
elif (i & 0xF0) == 0xE0:
return self._unpack(3, i & ~0xF0)
elif (i & 0xF8) == 0xF0:
return self.from_bytes(self.api.read_bytes(1))
else:
raise RosAPIFatalError('Unknown value: %x' % i)
def _unpack(self, times, i):
temp1 = self.to_bytes(i)
temp2 = self.api.read_bytes(times)
try:
temp3 = temp2.decode('utf-8')
except:
try:
temp3 = temp2.decode('windows-1252')
except Exception:
print("Cannot decode response properly:", temp2)
print(Exception)
exit(1)
res = temp1 + temp3
return self.from_bytes(res)
if PY2:
def from_bytes(self, data):
data_values = [ord(char) for char in data]
value = 0
for byte_value in data_values:
value <<= 8
value += byte_value
return value
def to_bytes(self, i, size=1):
data = []
for _ in xrange(size):
data.append(chr(i & 0xff))
i >>= 8
return b''.join(reversed(data))
else:
def from_bytes(self, data):
return int.from_bytes(data, 'big')
def to_bytes(self, i, size=1):
return i.to_bytes(size, 'big')
class RosAPI(object):
"""Routeros api"""
def __init__(self, socket):
self.socket = socket
self.length_utils = RosApiLengthUtils(self)
def login(self, username, pwd):
for _, attrs in self.talk([b'/login']):
token = binascii.unhexlify(attrs[b'ret'])
hasher = hashlib.md5()
hasher.update(b'\x00')
hasher.update(pwd)
hasher.update(token)
self.talk([b'/login', b'=name=' + username,
b'=response=00' + hasher.hexdigest().encode('ascii')])
def talk(self, words):
if self.write_sentence(words) == 0:
return
output = []
while True:
input_sentence = self.read_sentence()
if not len(input_sentence):
continue
attrs = {}
reply = input_sentence.pop(0)
for line in input_sentence:
try:
second_eq_pos = line.index(b'=', 1)
except IndexError:
attrs[line[1:]] = b''
else:
attrs[line[1:second_eq_pos]] = line[second_eq_pos + 1:]
output.append((reply, attrs))
if reply == b'!done':
if output[0][0] == b'!trap':
raise RosAPIError(output[0][1])
if output[0][0] == b'!fatal':
self.socket.close()
raise RosAPIFatalError(output[0][1])
return output
def write_sentence(self, words):
words_written = 0
for word in words:
self.write_word(word)
words_written += 1
self.write_word(b'')
return words_written
def read_sentence(self):
sentence = []
while True:
word = self.read_word()
if not len(word):
return sentence
sentence.append(word)
def write_word(self, word):
logger.debug('>>> %s' % word)
self.length_utils.write_lenght(len(word))
self.write_bytes(word)
def read_word(self):
word = self.read_bytes(self.length_utils.read_length())
logger.debug('<<< %s' % word)
return word
def write_bytes(self, data):
sent_overal = 0
while sent_overal < len(data):
try:
sent = self.socket.send(data[sent_overal:])
except socket.error as e:
raise RosAPIConnectionError(str(e))
if sent == 0:
raise RosAPIConnectionError('Connection closed by remote end.')
sent_overal += sent
def read_bytes(self, length):
received_overal = b''
while len(received_overal) < length:
try:
received = self.socket.recv(
length - len(received_overal))
except socket.error as e:
raise RosAPIConnectionError(str(e))
if len(received) == 0:
raise RosAPIConnectionError('Connection closed by remote end.')
received_overal += received
return received_overal
class BaseRouterboardResource(object):
def __init__(self, api, namespace):
self.api = api
self.namespace = namespace
def call(self, command, set_kwargs, query_kwargs=None):
query_kwargs = query_kwargs or {}
query_arguments = self._prepare_arguments(True, **query_kwargs)
set_arguments = self._prepare_arguments(False, **set_kwargs)
query = ([('%s/%s' % (self.namespace, command)).encode('ascii')] +
query_arguments + set_arguments)
response = self.api.api_client.talk(query)
output = []
for response_type, attributes in response:
if response_type == b'!re':
output.append(self._remove_first_char_from_keys(attributes))
return output
@staticmethod
def _prepare_arguments(is_query, **kwargs):
command_arguments = []
for key, value in kwargs.items():
if key in ['id', 'proplist']:
key = '.%s' % key
key = key.replace('_', '-')
selector_char = '?' if is_query else '='
command_arguments.append(
('%s%s=' % (selector_char, key)).encode('ascii') + value)
return command_arguments
@staticmethod
def _remove_first_char_from_keys(dictionary):
elements = []
for key, value in dictionary.items():
key = key.decode('ascii')
if key in ['.id', '.proplist']:
key = key[1:]
elements.append((key, value))
return dict(elements)
def get(self, **kwargs):
return self.call('print', {}, kwargs)
def detailed_get(self, **kwargs):
return self.call('print', {'detail': b''}, kwargs)
def set(self, **kwargs):
return self.call('set', kwargs)
def add(self, **kwargs):
return self.call('add', kwargs)
def remove(self, **kwargs):
return self.call('remove', kwargs)
class RouterboardResource(BaseRouterboardResource):
def detailed_get(self, **kwargs):
return self.call('print', {'detail': ''}, kwargs)
def call(self, command, set_kwargs, query_kwargs=None):
query_kwargs = query_kwargs or {}
result = super(RouterboardResource, self).call(
command, self._encode_kwargs(set_kwargs),
self._encode_kwargs(query_kwargs))
for item in result:
for k in item:
item[k] = item[k].decode('ascii')
return result
def _encode_kwargs(self, kwargs):
return dict((k, v.encode('ascii')) for k, v in kwargs.items())
class RouterboardAPI(object):
def __init__(self, host, username='api', password='', port=8728, ssl=False):
self.host = host
self.username = username
self.password = password
self.socket = None
self.port = port
self.ssl = ssl
self.reconnect()
def __enter__(self):
return self
def __exit__(self, _, __, ___):
self.close_connection()
def reconnect(self):
if self.socket:
self.close_connection()
try:
for retry in retryloop(10, delay=0.1, timeout=30):
try:
self.connect()
self.login()
except socket.error:
retry()
except (socket.error, RetryError) as e:
raise RosAPIConnectionError(str(e))
def connect(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(15.0)
sock.connect((self.host, self.port))
set_keepalive(sock, after_idle_sec=10)
if self.ssl:
try:
self.socket = ssl.wrap_socket(sock)
except ssl.SSLError as e:
raise RosAPIConnectionError(str(e))
else:
self.socket = sock
self.api_client = RosAPI(self.socket)
def login(self):
self.api_client.login(self.username.encode('ascii'),
self.password.encode('ascii'))
def get_resource(self, namespace):
return RouterboardResource(self, namespace)
def get_base_resource(self, namespace):
return BaseRouterboardResource(self, namespace)
def close_connection(self):
self.socket.close()
class Mikrotik(object):
def __init__(self, hostname, username, password):
self.hostname = hostname
self.username = username
self.password = password
def login(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.hostname, 8728))<|fim▁hole|> def talk(self, talk_command):
r = self.login()
response = r.talk(talk_command)
return(response)
def api_print(self, base_path, params=None):
command = [base_path + '/print']
if params is not None:
for key, value in params.iteritems():
item = b'=' + key + '=' + str(value)
command.append(item)
return self.talk(command)
def api_add(self, base_path, params):
command = [base_path + '/add']
for key, value in params.iteritems():
item = b'=' + key + '=' + str(value)
command.append(item)
return self.talk(command)
def api_edit(self, base_path, params):
command = [base_path + '/set']
for key, value in params.iteritems():
item = b'=' + key + '=' + str(value)
command.append(item)
return self.talk(command)
def api_remove(self, base_path, remove_id):
command = [
base_path + '/remove',
b'=.id=' + remove_id
]
return self.talk(command)
def api_command(self, base_path, params=None):
command = [base_path]
if params is not None:
for key, value in params.iteritems():
item = b'=' + key + '=' + str(value)
command.append(item)
return self.talk(command)<|fim▁end|> | mt = RosAPI(s)
mt.login(self.username, self.password)
return mt
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import httplib2
import urllib
from django.utils import simplejson as json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
import logging
from urlparse import urlparse
from geonode.maps.models import LayerStats, Layer
from xml.etree.ElementTree import XML, ParseError
import re
logger = logging.getLogger("geonode.proxy.views")
HGL_URL = 'http://hgl.harvard.edu:8080/HGL'
_valid_tags = "\{http\:\/\/www\.opengis\.net\/wms\}WMS_Capabilities|\
WMT_MS_Capabilities|WMS_DescribeLayerResponse|\
\{http\:\/\/www\.opengis\.net\/gml\}FeatureCollection|msGMLOutput|\
\{http\:\/\/www.opengis\.net\/wfs\}FeatureCollection|\
rss|{http://www.w3.org/2005/Atom}feed|\
\{http\:\/\/www\.w3\.org\/2001\/XMLSchema\}schema|\
{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF"
_user, _password = settings.GEOSERVER_CREDENTIALS
h = httplib2.Http()
h.add_credentials(_user, _password)
_netloc = urlparse(settings.GEOSERVER_BASE_URL).netloc
h.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
_netloc,
settings.GEOSERVER_BASE_URL,
{},
None,
None,
h
)
)
@csrf_exempt
def proxy(request):
if 'url' not in request.GET:
return HttpResponse(
"The proxy service requires a URL-encoded URL as a parameter.",
status=400,
content_type="text/plain"
)
url = urlsplit(request.GET['url'])
# Don't allow localhost connections unless in DEBUG mode
if not settings.DEBUG and re.search('localhost|127.0.0.1', url.hostname):
return HttpResponse(status=403)
locator = url.path
if url.query != "":
locator += '?' + url.query
if url.fragment != "":
locator += '#' + url.fragment
# Strip all headers and cookie info
headers = {}
conn = HTTPConnection(url.hostname, url.port) if url.scheme == "http" else HTTPSConnection(url.hostname, url.port)
conn.request(request.method, locator, request.raw_post_data, headers)
result = conn.getresponse()
response = HttpResponse(
valid_response(result.read()),
status=result.status,
content_type=result.getheader("Content-Type", "text/plain")
)
return response
def valid_response(responseContent):
#Proxy should only be used when expecting an XML or JSON response
#ArcGIS Server GetFeatureInfo xml response
if re.match("<FeatureInfoResponse", responseContent):
return responseContent
# ows exceptions
if "<ows:ExceptionReport" in responseContent:
return responseContent
if responseContent[0] == "<":
try:
from defusedxml.ElementTree import fromstring
et = fromstring(responseContent)
if re.match(_valid_tags, et.tag):
return responseContent
except ParseError:
return None
elif re.match('\[|\{', responseContent):
try:
json.loads(responseContent)
return responseContent
except:
return None
return None
@csrf_exempt
def geoserver_rest_proxy(request, proxy_path, downstream_path):
if not request.user.is_authenticated():
return HttpResponse(
"You must be logged in to access GeoServer",
mimetype="text/plain",
status=401)
def strip_prefix(path, prefix):
assert path.startswith(prefix)
return path[len(prefix):]
path = strip_prefix(request.get_full_path(), proxy_path)
url = "".join([settings.GEOSERVER_BASE_URL, downstream_path, path])
http = httplib2.Http()
http.add_credentials(*settings.GEOSERVER_CREDENTIALS)
headers = dict()
if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META:
headers["Content-Type"] = request.META["CONTENT_TYPE"]
response, content = http.request(
url, request.method,
body=request.raw_post_data or None,
headers=headers)
return HttpResponse(
content=content,
status=response.status,
mimetype=response.get("content-type", "text/plain"))
def picasa(request):
url = "http://picasaweb.google.com/data/feed/base/all?thumbsize=160c&"
kind = request.GET['kind'] if request.method == 'GET' else request.POST['kind']
bbox = request.GET['bbox'] if request.method == 'GET' else request.POST['bbox']
query = request.GET['q'] if request.method == 'GET' else request.POST['q']
maxResults = request.GET['max-results'] if request.method == 'GET' else request.POST['max-results']
coords = bbox.split(",")
coords[0] = -180 if float(coords[0]) <= -180 else coords[0]
coords[2] = 180 if float(coords[2]) >= 180 else coords[2]
coords[1] = coords[1] if float(coords[1]) > -90 else -90
coords[3] = coords[3] if float(coords[3]) < 90 else 90
newbbox = str(coords[0]) + ',' + str(coords[1]) + ',' + str(coords[2]) + ',' + str(coords[3])
url = url + "kind=" + kind + "&max-results=" + maxResults + "&bbox=" + newbbox + "&q=" + urllib.quote(query.encode('utf-8')) #+ "&alt=json"
feed_response = urllib.urlopen(url).read()
return HttpResponse(feed_response, mimetype="text/xml")
def flickr(request):
url = "http://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=%s" % settings.FLICKR_API_KEY
bbox = request.GET['bbox'] if request.method == 'GET' else request.POST['bbox']
query = request.GET['q'] if request.method == 'GET' else request.POST['q']
maxResults = request.GET['max-results'] if request.method == 'GET' else request.POST['max-results']
coords = bbox.split(",")
coords[0] = -180 if float(coords[0]) <= -180 else coords[0]
coords[2] = 180 if float(coords[2]) >= 180 else coords[2]
coords[1] = coords[1] if float(coords[1]) > -90 else -90
coords[3] = coords[3] if float(coords[3]) < 90 else 90
newbbox = str(coords[0]) + ',' + str(coords[1]) + ',' + str(coords[2]) + ',' + str(coords[3])
url = url + "&tags=%s&per_page=%s&has_geo=1&bbox=%s&format=json&extras=geo,url_q&accuracy=1&nojsoncallback=1" % (query,maxResults,newbbox)
feed_response = urllib.urlopen(url).read()
return HttpResponse(feed_response, mimetype="text/xml")
def hglpoints (request):
from xml.dom import minidom
import re
url = HGL_URL + "/HGLGeoRSS?GeometryType=point"
bbox = ["-180","-90","180","90"]
max_results = request.GET['max-results'] if request.method == 'GET' else request.POST['max-results']
if max_results is None:
max_results = "100"
try:
bbox = request.GET['bbox'].split(",") if request.method == 'GET' else request.POST['bbox'].split(",")
except:
pass
query = request.GET['q'] if request.method == 'GET' else request.POST['q']
url = url + "&UserQuery=" + urllib.quote(query.encode('utf-8')) #+ \
#"&BBSearchOption=1&minx=" + bbox[0] + "&miny=" + bbox[1] + "&maxx=" + bbox[2] + "&maxy=" + bbox[3]
dom = minidom.parse(urllib.urlopen(url))
iterator = 1
for node in dom.getElementsByTagName('item'):
if iterator <= int(max_results):
description = node.getElementsByTagName('description')[0]
guid = node.getElementsByTagName('guid')[0]
title = node.getElementsByTagName('title')[0]
if guid.firstChild.data != 'OWNER.TABLE_NAME':
description.firstChild.data = description.firstChild.data + '<br/><br/><p><a href=\'javascript:void(0);\' onClick=\'app.addHGL("' \
+ escape(title.firstChild.data) + '","' + re.sub("SDE\d?\.","", guid.firstChild.data) + '");\'>Add to Map</a></p>'
iterator +=1
else:
node.parentNode.removeChild(node)
return HttpResponse(dom.toxml(), mimetype="text/xml")
def hglServiceStarter (request, layer):
#Check if the layer is accessible to public, if not return 403
accessUrl = HGL_URL + "/ogpHglLayerInfo.jsp?ValidationKey=" + settings.HGL_VALIDATION_KEY +"&layers=" + layer
accessJSON = json.loads(urllib.urlopen(accessUrl).read())
if accessJSON[layer]['access'] == 'R':
return HttpResponse(status=403)
#Call the RemoteServiceStarter to load the layer into HGL's Geoserver in case it's not already there
startUrl = HGL_URL + "/RemoteServiceStarter?ValidationKey=" + settings.HGL_VALIDATION_KEY + "&AddLayer=" + layer
return HttpResponse(urllib.urlopen(startUrl).read())
def tweetServerProxy(request,geopsip):
url = urlsplit(request.get_full_path())
if geopsip == "standard":
geopsip = settings.GEOPS_IP
tweet_url = "http://" + geopsip + "?" + url.query
identifyQuery = re.search("QUERY_LAYERS", tweet_url)
if identifyQuery is not None:
if re.search("%20limit%2010&", tweet_url)is None:
return HttpResponse(status=403)
step1 = urllib.urlopen(tweet_url)
step2 = step1.read()
if 'content-type' in step1.info().dict:
response = HttpResponse(step2, mimetype= step1.info().dict['content-type'])
else:
response = HttpResponse(step2)
try :
cookie = step1.info().dict['set-cookie'].split(";")[0].split("=")[1]
response.set_cookie("tweet_count", cookie)
except:
pass
return response
def tweetDownload (request):
if (not request.user.is_authenticated() or not request.user.get_profile().is_org_member):
return HttpResponse(status=403)
proxy_url = urlsplit(request.get_full_path())
download_url = "http://" + settings.GEOPS_IP + "?" + proxy_url.query + settings.GEOPS_DOWNLOAD
http = httplib2.Http()
response, content = http.request(
download_url, request.method)
response = HttpResponse(
content=content,
status=response.status,
mimetype=response.get("content-type", "text/plain"))
response['Content-Disposition'] = response.get('Content-Disposition', 'attachment; filename="tweets"' + request.user.username + '.csv');
return response
def tweetTrendProxy (request):
tweetUrl = "http://" + settings.AWS_INSTANCE_IP + "/?agg=trend&bounds=" + request.POST["bounds"] + "&dateStart=" + request.POST["dateStart"] + "&dateEnd=" + request.POST["dateEnd"];
resultJSON =""
# resultJSON = urllib.urlopen(tweetUrl).read()
# import datetime
#
#
# startDate = datetime.datetime.strptime(request.POST["dateStart"], "%Y-%b-%d")
# endDate = datetime.datetime.strptime(request.POST["dateEnd"], "%Y-%b-%d")
#
# recString = "record: ["
#
# while startDate <= endDate:
# recString += "{'date': '$date', 'Ebola$rnd5' : $rnd6, 'Malaria$rnd4' : $rnd7, 'Influenza$rnd3': $rnd8, 'Plague$rnd3': $rnd9, 'Lyme_Disease$rnd1': $rnd10},"
# recString = recString.replace("$rnd6", str(random.randrange(50,500,1)))
# recString = recString.replace("$rnd7", str(random.randrange(50,500,1)))
# recString = recString.replace("$rnd8", str(random.randrange(50,500,1)))
# recString = recString.replace("$rnd9", str(random.randrange(50,500,1)))
# recString = recString.replace("$rnd10", str(random.randrange(50,500,1)))
# recString = recString.replace("$date", datetime.datetime.strftime(startDate, '%b-%d-%Y'))
# startDate = startDate + datetime.timedelta(days=1)
#
# recString += "]"
#
# resultJSON = """
# {
# metaData: {
# root: "record",
# fields: [
# {name: 'date'},
# {name: 'Ebola$rnd5'},
# {name: 'Malaria$rnd4'},
# {name: 'Influenza$rnd3'},
# {name: 'Plague$rnd3'},
# {name: 'Lyme_Disease$rnd1'}
# ],
# },
# // Reader's configured root
# $recString
#}
#"""
#
# resultJSON = resultJSON.replace("$recString", recString)
#
#
#
# resultJSON = resultJSON.replace("$rnd1", str(random.randrange(50,500,1)))
# resultJSON = resultJSON.replace("$rnd2", str(random.randrange(50,500,1)))
# resultJSON = resultJSON.replace("$rnd3", str(random.randrange(50,500,1)))
# resultJSON = resultJSON.replace("$rnd4", str(random.randrange(50,500,1)))
# resultJSON = resultJSON.replace("$rnd5", str(random.randrange(50,500,1)))
# resultJSON = '{"metaData":{"fields":[{"name":"Tuberculosis"},{"name":"STD"},{"name":"Gastroenteritis"},{"name":"Influenza"},{"name":"Common_Cold"},{"name":"date"}],"root":"record"},"record":[{"Common_Cold":18,"Gastroenteritis":104,"Influenza":76,"STD":121,"Tuberculosis":236,"date":"2012-01-26"},{"Common_Cold":19,"Gastroenteritis":115,"Influenza":114,"STD":146,"Tuberculosis":397,"date":"2012-01-27"},{"Common_Cold":26,"Gastroenteritis":104,"Influenza":83,"STD":137,"Tuberculosis":402,"date":"2012-01-28"},{"Common_Cold":25,"Gastroenteritis":96,"Influenza":76,"STD":141,"Tuberculosis":358,"date":"2012-01-29"},{"Common_Cold":30,"Gastroenteritis":106,"Influenza":87,"STD":158,"Tuberculosis":372,"date":"2012-01-30"},{"Common_Cold":12,"Gastroenteritis":74,"Influenza":44,"STD":116,"Tuberculosis":222,"date":"2012-01-31"}]}'
return HttpResponse(resultJSON, mimetype="application/json")
def youtube(request):
url = "http://gdata.youtube.com/feeds/api/videos?v=2&prettyprint=true&"
bbox = request.GET['bbox'] if request.method == 'GET' else request.POST['bbox']
query = request.GET['q'] if request.method == 'GET' else request.POST['q']
maxResults = request.GET['max-results'] if request.method == 'GET' else request.POST['max-results']
coords = bbox.split(",")
coords[0] = coords[0] if float(coords[0]) > -180 else -180
coords[2] = coords[2] if float(coords[2]) < 180 else 180
coords[1] = coords[1] if float(coords[1]) > -90 else -90
coords[3] = coords[3] if float(coords[3]) < 90 else 90
#location would be the center of the map.
location = str((float(coords[3]) + float(coords[1]))/2) + "," + str((float(coords[2]) + float(coords[0]))/2);
#calculating the location-readius
R = 6378.1370;
PI = 3.1415926;
left = R*float(coords[0])/180.0/PI;
right = R*float(coords[2])/180.0/PI;
radius = (right - left)/2*2;
radius = 1000 if (radius > 1000) else radius;
url = url + "location=" + location + "&max-results=" + maxResults + "&location-radius=" + str(radius) + "km&q=" + urllib.quote(query.encode('utf-8'))
feed_response = urllib.urlopen(url).read()
return HttpResponse(feed_response, mimetype="text/xml")
def download(request, service, layer, format):
params = request.GET
#mimetype = params.get("outputFormat") if service == "wfs" else params.get("format")
service=service.replace("_","/")
url = settings.GEOSERVER_BASE_URL + service + "?" + params.urlencode()
layerObj = Layer.objects.get(pk=layer)
if layerObj.downloadable and request.user.has_perm('maps.view_layer', obj=layerObj):
layerstats,created = LayerStats.objects.get_or_create(layer=layer)
layerstats.downloads += 1
layerstats.save()
download_response, content = h.request(
url, request.method,
body=None,
headers=dict())
content_disposition = None
if 'content_disposition' in download_response:
content_disposition = download_response['content-disposition']
mimetype = download_response['content-type']
response = HttpResponse(content, mimetype = mimetype)
if content_disposition is not None:
response['Content-Disposition'] = content_disposition
else:
response['Content-Disposition'] = "attachment; filename=" + layerObj.name + "." + format
return response
else:
return HttpResponse(status=403)<|fim▁end|> | import random
from django.http import HttpResponse
from httplib import HTTPConnection, HTTPSConnection
from urlparse import urlsplit |
<|file_name|>EventInterface.ts<|end_file_name|><|fim▁begin|>interface EventInterface <EventPayload> {
<|fim▁hole|> removeHandler<T>(handler: EventInterface.HandlerInterface<EventPayload, T>, context?: T): EventInterface<EventPayload>;
}
module EventInterface {
export interface HandlerInterface<EventPayload, T> {
(payload: EventPayload, context: T): void;
}
}
export default EventInterface;<|fim▁end|> | addHandler<T>(handler: EventInterface.HandlerInterface<EventPayload, T>, context?: T): EventInterface<EventPayload>;
|
<|file_name|>api.xpack.autoscaling.put_autoscaling_policy.go<|end_file_name|><|fim▁begin|>// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V. licenses this file to you under the Apache 2.0 License.<|fim▁hole|>// See the LICENSE file in the project root for more information.
//
// Code generated from specification version 7.7.0: DO NOT EDIT
package esapi
import (
"context"
"io"
"net/http"
"strings"
)
func newAutoscalingPutAutoscalingPolicyFunc(t Transport) AutoscalingPutAutoscalingPolicy {
return func(name string, body io.Reader, o ...func(*AutoscalingPutAutoscalingPolicyRequest)) (*Response, error) {
var r = AutoscalingPutAutoscalingPolicyRequest{Name: name, Body: body}
for _, f := range o {
f(&r)
}
return r.Do(r.ctx, t)
}
}
// ----- API Definition -------------------------------------------------------
// AutoscalingPutAutoscalingPolicy -
//
// This API is experimental.
//
// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-put-autoscaling-policy.html.
//
type AutoscalingPutAutoscalingPolicy func(name string, body io.Reader, o ...func(*AutoscalingPutAutoscalingPolicyRequest)) (*Response, error)
// AutoscalingPutAutoscalingPolicyRequest configures the Autoscaling Put Autoscaling Policy API request.
//
type AutoscalingPutAutoscalingPolicyRequest struct {
Body io.Reader
Name string
Pretty bool
Human bool
ErrorTrace bool
FilterPath []string
Header http.Header
ctx context.Context
}
// Do executes the request and returns response or error.
//
func (r AutoscalingPutAutoscalingPolicyRequest) Do(ctx context.Context, transport Transport) (*Response, error) {
var (
method string
path strings.Builder
params map[string]string
)
method = "PUT"
path.Grow(1 + len("_autoscaling") + 1 + len("policy") + 1 + len(r.Name))
path.WriteString("/")
path.WriteString("_autoscaling")
path.WriteString("/")
path.WriteString("policy")
path.WriteString("/")
path.WriteString(r.Name)
params = make(map[string]string)
if r.Pretty {
params["pretty"] = "true"
}
if r.Human {
params["human"] = "true"
}
if r.ErrorTrace {
params["error_trace"] = "true"
}
if len(r.FilterPath) > 0 {
params["filter_path"] = strings.Join(r.FilterPath, ",")
}
req, err := newRequest(method, path.String(), r.Body)
if err != nil {
return nil, err
}
if len(params) > 0 {
q := req.URL.Query()
for k, v := range params {
q.Set(k, v)
}
req.URL.RawQuery = q.Encode()
}
if r.Body != nil {
req.Header[headerContentType] = headerContentTypeJSON
}
if len(r.Header) > 0 {
if len(req.Header) == 0 {
req.Header = r.Header
} else {
for k, vv := range r.Header {
for _, v := range vv {
req.Header.Add(k, v)
}
}
}
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := transport.Perform(req)
if err != nil {
return nil, err
}
response := Response{
StatusCode: res.StatusCode,
Body: res.Body,
Header: res.Header,
}
return &response, nil
}
// WithContext sets the request context.
//
func (f AutoscalingPutAutoscalingPolicy) WithContext(v context.Context) func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
r.ctx = v
}
}
// WithPretty makes the response body pretty-printed.
//
func (f AutoscalingPutAutoscalingPolicy) WithPretty() func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
r.Pretty = true
}
}
// WithHuman makes statistical values human-readable.
//
func (f AutoscalingPutAutoscalingPolicy) WithHuman() func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
r.Human = true
}
}
// WithErrorTrace includes the stack trace for errors in the response body.
//
func (f AutoscalingPutAutoscalingPolicy) WithErrorTrace() func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
r.ErrorTrace = true
}
}
// WithFilterPath filters the properties of the response body.
//
func (f AutoscalingPutAutoscalingPolicy) WithFilterPath(v ...string) func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
r.FilterPath = v
}
}
// WithHeader adds the headers to the HTTP request.
//
func (f AutoscalingPutAutoscalingPolicy) WithHeader(h map[string]string) func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
if r.Header == nil {
r.Header = make(http.Header)
}
for k, v := range h {
r.Header.Add(k, v)
}
}
}
// WithOpaqueID adds the X-Opaque-Id header to the HTTP request.
//
func (f AutoscalingPutAutoscalingPolicy) WithOpaqueID(s string) func(*AutoscalingPutAutoscalingPolicyRequest) {
return func(r *AutoscalingPutAutoscalingPolicyRequest) {
if r.Header == nil {
r.Header = make(http.Header)
}
r.Header.Set("X-Opaque-Id", s)
}
}<|fim▁end|> | |
<|file_name|>CmdAppBase.java<|end_file_name|><|fim▁begin|>/**
*
*/
package edu.ku.brc.specify.tools.export;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Locale;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Vector;
import edu.ku.brc.af.auth.SecurityMgr;
import org.apache.commons.lang.StringUtils;
import org.dom4j.Element;
import edu.ku.brc.af.auth.JaasContext;
import edu.ku.brc.af.auth.UserAndMasterPasswordMgr;
import edu.ku.brc.af.core.AppContextMgr;
import edu.ku.brc.af.prefs.AppPreferences;
import edu.ku.brc.dbsupport.DatabaseDriverInfo;
import edu.ku.brc.helpers.XMLHelper;
import edu.ku.brc.specify.Specify;
import edu.ku.brc.specify.config.SpecifyAppContextMgr;
import edu.ku.brc.specify.config.SpecifyAppPrefs;
import edu.ku.brc.specify.conversion.BasicSQLUtils;
import edu.ku.brc.specify.datamodel.Discipline;
import edu.ku.brc.specify.tools.ireportspecify.MainFrameSpecify;
import edu.ku.brc.ui.UIHelper;
import edu.ku.brc.ui.UIRegistry;
import edu.ku.brc.util.Pair;
/**
* @author timo
*
*/
public class CmdAppBase {
protected static final String SCHEMA_VERSION_FILENAME = "schema_version.xml";
private static String[] argkeys = {"-u", "-p", "-d", "-m", "-l", "-h", "-o", "-w", "-U", "-P"};
protected List<Pair<String, String>> argList;
protected String userName;
protected String password;
protected String dbName;
protected String outputName;
protected String hostName;
protected String workingPath = ".";
protected Pair<String, String> master;
protected FileWriter out;
boolean success = false;
protected Vector<DatabaseDriverInfo> dbDrivers = new Vector<DatabaseDriverInfo>();
protected int dbDriverIdx;
protected String collectionName;
protected JaasContext jaasContext;
/**
* @param argkeys
*/
public CmdAppBase(String[] akeys) {
String[] as = new String[argkeys.length + akeys.length];
for (int ak=0; ak < argkeys.length; ak++) {
as[ak] = argkeys[ak];
}
for (int ak=0; ak < akeys.length; ak++) {
as[ak + argkeys.length] = akeys[ak];
}
argList = buildArgList(as);
dbDriverIdx = 0;
hostName = "localhost";
}
/**
* @param keys
* @return
*/
protected List<Pair<String, String>> buildArgList(String[] keys) {
List<Pair<String, String>> result = new ArrayList<Pair<String,String>>();
for (String key : keys) {
result.add(new Pair<String, String>(key, null));
}
return result;
}
/**
* @return
*/
public String getDBSchemaVersionFromXML()
{
String dbVersion = null;
Element root;
try
{
root = XMLHelper.readFileToDOM4J(new FileInputStream(XMLHelper.getConfigDirPath(SCHEMA_VERSION_FILENAME)));//$NON-NLS-1$
if (root != null)
{
dbVersion = ((Element)root).getTextTrim();
}
} catch (FileNotFoundException e)
{
e.printStackTrace();
} catch (Exception e)
{
e.printStackTrace();
}
return dbVersion;
}
/**
* @return
*/
protected boolean checkVersion() {
String schemaVersion = getDBSchemaVersionFromXML();
String appVersion = UIRegistry.getAppVersion();
String schemaVersionFromDb = BasicSQLUtils.querySingleObj("select SchemaVersion from spversion");
String appVersionFromDb = BasicSQLUtils.querySingleObj("select AppVersion from spversion");
return (schemaVersion.equals(schemaVersionFromDb) && appVersion.equals(appVersionFromDb));
}
/**
* @param args
*/
protected void readArgs(String[] args) {
for (Pair<String, String> argPair : argList) {
argPair.setSecond(readArg(argPair.getFirst(), args));
}
}
/**
* @param argKey
* @param args
* @return
*/
protected String readArg(String argKey, String[] args) {
for (int k = 0; k < args.length - 1; k+=2) {
if (args[k].equals(argKey)) {
return args[k+1];
}
}
return null;
}
/**
* @return
*/
protected String checkArgs() {
String result = "";
for (Pair<String, String> arg : argList) {
String err = checkArg(arg);
if (StringUtils.isNotBlank(err)) {
if (result.length() > 0) {
result += "; ";
}
result += err;
}
}
return result;
}
/**
* @param arg
* @return "" if arg checks out, else err msg
*/
protected String checkArg(Pair<String, String> arg) {
return "";
}
/**
*
*/
protected void adjustLocaleFromPrefs()
{
String language = AppPreferences.getLocalPrefs().get("locale.lang", null); //$NON-NLS-1$
if (language != null)
{
String country = AppPreferences.getLocalPrefs().get("locale.country", null); //$NON-NLS-1$
String variant = AppPreferences.getLocalPrefs().get("locale.var", null); //$NON-NLS-1$
Locale prefLocale = new Locale(language, country, variant);
Locale.setDefault(prefLocale);
UIRegistry.setResourceLocale(prefLocale);
}
try
{
ResourceBundle.getBundle("resources", Locale.getDefault()); //$NON-NLS-1$
} catch (MissingResourceException ex)
{
edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(MainFrameSpecify.class, ex);
Locale.setDefault(Locale.ENGLISH);
UIRegistry.setResourceLocale(Locale.ENGLISH);
}
}
/**
* @throws Exception
*/
protected void setupPrefs() throws Exception {
//Apparently this is correct...
System.setProperty(SecurityMgr.factoryName, "edu.ku.brc.af.auth.specify.SpecifySecurityMgr");
UIRegistry.setAppName("Specify"); //$NON-NLS-1$
UIRegistry.setDefaultWorkingPath(this.workingPath);
final AppPreferences localPrefs = AppPreferences.getLocalPrefs();
localPrefs.setDirPath(UIRegistry.getAppDataDir());
adjustLocaleFromPrefs();
final String iRepPrefDir = localPrefs.getDirPath();
int mark = iRepPrefDir.lastIndexOf(UIRegistry.getAppName(), iRepPrefDir.length());
final String SpPrefDir = iRepPrefDir.substring(0, mark) + "Specify";
AppPreferences.getLocalPrefs().flush();
AppPreferences.getLocalPrefs().setDirPath(SpPrefDir);
AppPreferences.getLocalPrefs().setProperties(null);
}
/**
* @return
* @throws Exception
*/
protected boolean hasMasterKey() throws Exception {
UserAndMasterPasswordMgr.getInstance().set(userName, password, dbName);
return UserAndMasterPasswordMgr.getInstance().hasMasterUsernameAndPassword();
}
protected boolean needsMasterKey() {
return !(master != null && master.getFirst() != null && master.getSecond()!= null);
}
/**
* @return
* @throws Exception
*/
protected boolean getMaster() throws Exception {
UserAndMasterPasswordMgr.getInstance().set(userName, password, dbName);
if (!needsMasterKey()) {
UserAndMasterPasswordMgr.getInstance().setUserNamePasswordForDB(master.first, master.second);
return true;
}
Pair<String, String> userpw = UserAndMasterPasswordMgr.getInstance().getUserNamePasswordForDB();
if (userpw != null) {
if (StringUtils.isNotBlank(userpw.getFirst()) && StringUtils.isNotBlank(userpw.getSecond())) {
if (master == null) {
master = new Pair<String, String>(null, null);
}
master.setFirst(userpw.getFirst());
master.setSecond(userpw.getSecond());
return true;
}
}
return false;
}
/**
* @return
*/
protected boolean goodUser() {
String userType = BasicSQLUtils.querySingleObj("select UserType from specifyuser where `name` = '" + userName + "'");
return "manager".equalsIgnoreCase(userType);
}
/**
* @param argKey
* @return
*/
protected String getArg(String argKey) {
for (Pair<String, String> arg : argList) {
if (arg.getFirst().equals(argKey)) {
return arg.getSecond();
}
}
return null;
}
/**
* @param success
*/
protected void setSuccess(boolean success) {
this.success = success;
}
public boolean isSuccess() {
return success;
}
protected DatabaseDriverInfo buildDefaultDriverInfo() {
DatabaseDriverInfo result = new DatabaseDriverInfo("MySQL", "com.mysql.jdbc.Driver", "org.hibernate.dialect.MySQL5InnoDBDialect", false, "3306");
result.addFormat(DatabaseDriverInfo.ConnectionType.Opensys, "jdbc:mysql://SERVER:PORT/");
result.addFormat(DatabaseDriverInfo.ConnectionType.Open, "jdbc:mysql://SERVER:PORT/DATABASE?characterEncoding=UTF-8&autoReconnect=true");
return result;
}
/**
*
*/
public void loadDrivers() {
dbDrivers = DatabaseDriverInfo.getDriversList();
}
/**
* @return
*/
protected String getConnectionStr() {
<|fim▁hole|> return dbDrivers.get(dbDriverIdx).getConnectionStr(DatabaseDriverInfo.ConnectionType.Open,
hostName, dbName);
}
/**
* @param fileName
* @throws Exception
*/
protected void openLog(String fileName) throws Exception {
FileWriter testOut = new FileWriter(new File(fileName), true);
out = testOut;
}
/**
* @return
*/
protected String getTimestamp() {
return new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime());
}
/**
* @return
*/
protected String getLogInitText(String[] args) {
String argStr = "";
boolean isPw = false;
for (String arg : args) {
argStr += " " + (isPw ? "********" : arg);
if (isPw) {
isPw = false;
} else if ("-p".equals(arg)) {
isPw = true;
}
}
return String.format(UIRegistry.getResourceString("ExportCmdLine.LogInitTxt"), argStr);
}
/**
* @return
*/
protected String getLogExitText() {
return String.format(UIRegistry.getResourceString("ExportCmdLine.LogExitTxt"), (success ? "" : "UN-") + "successfully.");
}
/**
* @throws IOException
*/
protected void initLog(String[] args) throws IOException {
out(getLogInitText(args));
}
/**
* @throws IOException
*/
protected void flushLog() throws IOException {
if (out != null) {
out.flush();
}
}
/**
* @throws IOException
*/
protected void exitLog() throws IOException {
out(getLogExitText());
if (out != null) {
out.close();
}
}
/**
* @param line
* @throws IOException
*/
protected void out(String line) throws IOException {
if (out != null) {
out.append(getTimestamp() + ": " + line + "\n");
out.flush();
} else {
System.out.println(getTimestamp() + ": " + line);
}
}
/**
* @return
*/
protected boolean login() {
boolean result = UIHelper.tryLogin(dbDrivers.get(dbDriverIdx).getDriverClassName(),
dbDrivers.get(dbDriverIdx).getDialectClassName(),
dbName,
getConnectionStr(),
master.getFirst(),
master.getSecond());
if (result) {
this.jaasContext = new JaasContext();
jaasLogin();
}
return result;
}
/**
* @return true if ContextManager initializes successfully for collection.
*/
protected boolean setContext() {
Specify.setUpSystemProperties();
System.setProperty(AppContextMgr.factoryName, "edu.ku.brc.specify.tools.export.SpecifyExpCmdAppContextMgr"); // Needed by AppContextMgr //$NON-NLS-1$
AppPreferences.shutdownRemotePrefs();
AppContextMgr.CONTEXT_STATUS status = ((SpecifyAppContextMgr)AppContextMgr.getInstance()).
setContext(dbName, userName, false, false, true, collectionName, false);
// AppContextMgr.getInstance().
SpecifyAppPrefs.initialPrefs();
if (status == AppContextMgr.CONTEXT_STATUS.OK) {
if (AppContextMgr.getInstance().getClassObject(Discipline.class) == null) {
return false;
}
} else if (status == AppContextMgr.CONTEXT_STATUS.Error) {
return false;
}
// ...end specify.restartApp snatch
return true;
}
/**
* @return
*/
protected boolean retrieveCollectionName() {
return false;
}
/**
* @return
*/
public boolean jaasLogin()
{
if (jaasContext != null)
{
return jaasContext.jaasLogin(
userName,
password,
getConnectionStr(),
dbDrivers.get(dbDriverIdx).getDriverClassName(),
master.first,
master.second
);
}
return false;
}
/**
* @throws Exception
*/
protected void setMembers() throws Exception {
userName = getArg("-u");
password = getArg("-p");
if (password == null) password = "";
dbName = getArg("-d");
outputName = getArg("-o");
workingPath = getArg("-w");
String logFile = getArg("-l");
if (logFile != null) {
openLog(logFile);
}
String host = getArg("-h");
if (host != null) {
hostName = host;
}
master = new Pair<>(getArg("-U"), getArg("-P"));
}
}<|fim▁end|> | |
<|file_name|>add-channel-form.js<|end_file_name|><|fim▁begin|><|fim▁hole|> actions: {
addFavorite() {
this.sendAction('addFavorite', this.get('newFavorite'));
}
}
});<|fim▁end|> | import Ember from 'ember';
export default Ember.Component.extend({ |
<|file_name|>manager.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::io;
use std::sync::mpsc::{channel, RecvTimeoutError, Sender};
use std::time::Duration;
use crate::authenticatorservice::AuthenticatorTransport;
use crate::consts::PARAMETER_SIZE;
use crate::errors::*;
use crate::statecallback::StateCallback;
use crate::statemachine::StateMachine;
use runloop::RunLoop;
enum QueueAction {
Register {
flags: crate::RegisterFlags,
timeout: u64,
challenge: Vec<u8>,
application: crate::AppId,
key_handles: Vec<crate::KeyHandle>,
status: Sender<crate::StatusUpdate>,
callback: StateCallback<crate::Result<crate::RegisterResult>>,
},
Sign {
flags: crate::SignFlags,
timeout: u64,
challenge: Vec<u8>,
app_ids: Vec<crate::AppId>,
key_handles: Vec<crate::KeyHandle>,
status: Sender<crate::StatusUpdate>,
callback: StateCallback<crate::Result<crate::SignResult>>,
},
Cancel,
}
pub struct U2FManager {
queue: RunLoop,
tx: Sender<QueueAction>,
}
impl U2FManager {
pub fn new() -> io::Result<Self> {
let (tx, rx) = channel();
// Start a new work queue thread.
let queue = RunLoop::new(move |alive| {
let mut sm = StateMachine::new();
while alive() {
match rx.recv_timeout(Duration::from_millis(50)) {
Ok(QueueAction::Register {
flags,
timeout,
challenge,
application,
key_handles,
status,
callback,
}) => {
// This must not block, otherwise we can't cancel.
sm.register(
flags,
timeout,
challenge,
application,
key_handles,
status,
callback,
);
}
Ok(QueueAction::Sign {
flags,
timeout,
challenge,
app_ids,
key_handles,
status,
callback,
}) => {
// This must not block, otherwise we can't cancel.
sm.sign(
flags,
timeout,
challenge,
app_ids,
key_handles,
status,
callback,
);
}
Ok(QueueAction::Cancel) => {
// Cancelling must block so that we don't start a new
// polling thread before the old one has shut down.
sm.cancel();
}
Err(RecvTimeoutError::Disconnected) => {
break;
}
_ => { /* continue */ }
}
}
// Cancel any ongoing activity.
sm.cancel();
})?;
Ok(Self { queue, tx })
}
}
impl AuthenticatorTransport for U2FManager {
fn register(
&mut self,
flags: crate::RegisterFlags,
timeout: u64,
challenge: Vec<u8>,
application: crate::AppId,
key_handles: Vec<crate::KeyHandle>,
status: Sender<crate::StatusUpdate>,
callback: StateCallback<crate::Result<crate::RegisterResult>>,
) -> crate::Result<()> {
if challenge.len() != PARAMETER_SIZE || application.len() != PARAMETER_SIZE {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
for key_handle in &key_handles {
if key_handle.credential.len() > 256 {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
}
let action = QueueAction::Register {
flags,
timeout,
challenge,<|fim▁hole|> key_handles,
status,
callback,
};
Ok(self.tx.send(action)?)
}
fn sign(
&mut self,
flags: crate::SignFlags,
timeout: u64,
challenge: Vec<u8>,
app_ids: Vec<crate::AppId>,
key_handles: Vec<crate::KeyHandle>,
status: Sender<crate::StatusUpdate>,
callback: StateCallback<crate::Result<crate::SignResult>>,
) -> crate::Result<()> {
if challenge.len() != PARAMETER_SIZE {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
if app_ids.is_empty() {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
for app_id in &app_ids {
if app_id.len() != PARAMETER_SIZE {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
}
for key_handle in &key_handles {
if key_handle.credential.len() > 256 {
return Err(AuthenticatorError::InvalidRelyingPartyInput);
}
}
let action = QueueAction::Sign {
flags,
timeout,
challenge,
app_ids,
key_handles,
status,
callback,
};
Ok(self.tx.send(action)?)
}
fn cancel(&mut self) -> crate::Result<()> {
Ok(self.tx.send(QueueAction::Cancel)?)
}
}
impl Drop for U2FManager {
fn drop(&mut self) {
self.queue.cancel();
}
}<|fim▁end|> | application, |
<|file_name|>IO_coroutine_stu1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding:UTF-8 -*-
#Copyright (c) 1986 Nick Wong.
#Copyright (c) 2016-2026 TP-NEW Corp.
# License: TP-NEW (www.tp-new.com)
__author__ = "Nick Wong"
"""
用asyncio提供的@asyncio.coroutine可以把一个generator标记为coroutine类型,然后在coroutine内部用yield from调用另一个coroutine实现异步操作
从Python 3.5开始引入了新的语法async和await,可以让coroutine的代码更简洁易读
#generator(生成器)
#coroutine(协程)
async和await是针对coroutine的新语法,要使用新的语法,只需要做两步简单的替换:
1.把@asyncio.coroutine替换为async;
2.把yield from替换为await。
"""
import asyncio
#########旧代码#########
@asyncio.coroutine
def hello():
print('Hello World!')
r = yield from asyncio.sleep(2)
print('Hello again!')
#########新代码#########
async def hello1(): #注:async后跟的函数不能换行,否则语法错误
print('Hello World! 1')
r = await asyncio.sleep(2)
print('Hello again! 1')
#获取EventLoop:
loop = asyncio.get_event_loop()
#执行coroutine
loop.run_until_complete(hello())<|fim▁hole|>loop.close()<|fim▁end|> | loop.run_until_complete(hello1()) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
//! FFI bindings to elfapi.
#![no_std]
#![experimental]
extern crate winapi;<|fim▁hole|>}<|fim▁end|> | use winapi::*;
extern "system" { |
<|file_name|>Utility.js<|end_file_name|><|fim▁begin|>define([], function () {
var TAGNAMES = {
'select':'input',
'change':'input',
'submit':'form',
'reset':'form',
'error':'img',
'load':'img',
'abort':'img'
};
var Utility = function() {
};
Utility.isEventSupported = function(eventName) {
var el = document.createElement(TAGNAMES[eventName] || 'div');
eventName = 'on' + eventName;
var isSupported = (eventName in el);
if (!isSupported) {
el.setAttribute(eventName, 'return;');
isSupported = typeof el[eventName] == 'function';
}
el = null;
return isSupported;
};
Utility.getSupportedEvent = function(events) {
// get the length
var len = events.length;
if (typeof(len) == 'undefined') {
len = 0;
}<|fim▁hole|> if (Utility.isEventSupported(events[i])) {
return events[i];
}
}
return null;
};
return Utility;
});<|fim▁end|> |
for (var i = 0; i < len; i++) { |
<|file_name|>syntax-extension-bytes-unsupported-literal.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let vec = bytes!(45f); //~ ERROR Unsupported literal in bytes!
}<|fim▁end|> | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. |
<|file_name|>Communicator.js<|end_file_name|><|fim▁begin|>// @flow
import React from 'react'
import withPropsStream from '@vega/utils/withPropsStream'
import {map} from 'rxjs/operators'<|fim▁hole|>
import styles from './styles/Communicator.css'
import ThreadList from './ThreadList'
import CreateComment from './CreateComment'
function getPropsStream(props$) {
// todo: implement open/close behavior
return props$.pipe(map(props => ({...props, isOpen: true})))
}
type Props = {
isOpen: boolean,
subjectIds: string[],
focusedCommentId: string
}
export default withPropsStream(
getPropsStream,
class Communicator extends React.Component<Props> {
state = {
createCommentIsSticky: false
}
handleCloseCreateComment = event => {
this.setState({
createCommentIsSticky: false
})
event.stopPropagation()
}
handleStickCreateComment = () => {
this.setState({
createCommentIsSticky: true
})
}
render() {
const {isOpen, subjectIds, focusedCommentId} = this.props
const {createCommentIsSticky} = this.state
return isOpen ? (
<div className={styles.root}>
<div
className={
createCommentIsSticky
? styles.feedWithWithStickyCreateComment
: styles.feed
}
>
<ThreadList
subjectId={subjectIds}
focusedCommentId={focusedCommentId}
/>
</div>
{subjectIds.length === 1 && (
<CreateComment
subjectId={subjectIds[0]}
showCloseButton={createCommentIsSticky}
className={
createCommentIsSticky
? styles.createCommentSticky
: styles.createComment
}
onClose={this.handleCloseCreateComment}
onSubmit={this.handleCloseCreateComment}
onClick={this.handleStickCreateComment}
/>
)}
</div>
) : null
}
}
)<|fim▁end|> | |
<|file_name|>OneCustomerCtrl.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular.module('customers')
.controller('OneCustomerCtrl', function ($scope, customers, $stateParams) {
var cid = $stateParams.cid;<|fim▁hole|> console.log($scope.customer);
});
})();<|fim▁end|> | $scope.customer=_.find(customers, function (customer) {
return customer.profile.userName===cid;
}); |
<|file_name|>select.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! See `README.md` for high-level documentation
#![allow(dead_code)] // FIXME -- just temporarily
pub use self::MethodMatchResult::*;
pub use self::MethodMatchedData::*;
use self::SelectionCandidate::*;
use self::BuiltinBoundConditions::*;
use self::EvaluationResult::*;
use super::coherence;
use super::DerivedObligationCause;
use super::project;
use super::project::{normalize_with_depth, Normalized};
use super::{PredicateObligation, TraitObligation, ObligationCause};
use super::report_overflow_error;
use super::{ObligationCauseCode, BuiltinDerivedObligation, ImplDerivedObligation};
use super::{SelectionError, Unimplemented, OutputTypeParameterMismatch};
use super::{ObjectCastObligation, Obligation};
use super::TraitNotObjectSafe;
use super::Selection;
use super::SelectionResult;
use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure,
VtableFnPointer, VtableObject, VtableDefaultImpl};
use super::{VtableImplData, VtableObjectData, VtableBuiltinData,
VtableClosureData, VtableDefaultImplData};
use super::object_safety;
use super::util;
use middle::fast_reject;
use middle::subst::{Subst, Substs, TypeSpace};
use middle::ty::{self, ToPredicate, RegionEscape, ToPolyTraitRef, Ty, HasTypeFlags};
use middle::infer;
use middle::infer::{InferCtxt, TypeFreshener};
use middle::ty_fold::TypeFoldable;
use middle::ty_match;
use middle::ty_relate::TypeRelation;
use std::cell::RefCell;
use std::fmt;
use std::rc::Rc;
use syntax::{abi, ast};
use util::common::ErrorReported;
use util::nodemap::FnvHashMap;
pub struct SelectionContext<'cx, 'tcx:'cx> {
infcx: &'cx InferCtxt<'cx, 'tcx>,
/// Freshener used specifically for skolemizing entries on the
/// obligation stack. This ensures that all entries on the stack
/// at one time will have the same set of skolemized entries,
/// which is important for checking for trait bounds that
/// recursively require themselves.
freshener: TypeFreshener<'cx, 'tcx>,
/// If true, indicates that the evaluation should be conservative
/// and consider the possibility of types outside this crate.
/// This comes up primarily when resolving ambiguity. Imagine
/// there is some trait reference `$0 : Bar` where `$0` is an
/// inference variable. If `intercrate` is true, then we can never
/// say for sure that this reference is not implemented, even if
/// there are *no impls at all for `Bar`*, because `$0` could be
/// bound to some type that in a downstream crate that implements
/// `Bar`. This is the suitable mode for coherence. Elsewhere,
/// though, we set this to false, because we are only interested
/// in types that the user could actually have written --- in
/// other words, we consider `$0 : Bar` to be unimplemented if
/// there is no type that the user could *actually name* that
/// would satisfy it. This avoids crippling inference, basically.
intercrate: bool,
}
// A stack that walks back up the stack frame.
struct TraitObligationStack<'prev, 'tcx: 'prev> {
obligation: &'prev TraitObligation<'tcx>,
/// Trait ref from `obligation` but skolemized with the
/// selection-context's freshener. Used to check for recursion.
fresh_trait_ref: ty::PolyTraitRef<'tcx>,
previous: TraitObligationStackList<'prev, 'tcx>,
}
#[derive(Clone)]
pub struct SelectionCache<'tcx> {
hashmap: RefCell<FnvHashMap<ty::TraitRef<'tcx>,
SelectionResult<'tcx, SelectionCandidate<'tcx>>>>,
}
pub enum MethodMatchResult {
MethodMatched(MethodMatchedData),
MethodAmbiguous(/* list of impls that could apply */ Vec<ast::DefId>),
MethodDidNotMatch,
}
#[derive(Copy, Clone, Debug)]
pub enum MethodMatchedData {
// In the case of a precise match, we don't really need to store
// how the match was found. So don't.
PreciseMethodMatch,
// In the case of a coercion, we need to know the precise impl so
// that we can determine the type to which things were coerced.
CoerciveMethodMatch(/* impl we matched */ ast::DefId)
}
/// The selection process begins by considering all impls, where
/// clauses, and so forth that might resolve an obligation. Sometimes
/// we'll be able to say definitively that (e.g.) an impl does not
/// apply to the obligation: perhaps it is defined for `usize` but the
/// obligation is for `int`. In that case, we drop the impl out of the
/// list. But the other cases are considered *candidates*.
///
/// For selection to succeed, there must be exactly one matching
/// candidate. If the obligation is fully known, this is guaranteed
/// by coherence. However, if the obligation contains type parameters
/// or variables, there may be multiple such impls.
///
/// It is not a real problem if multiple matching impls exist because
/// of type variables - it just means the obligation isn't sufficiently
/// elaborated. In that case we report an ambiguity, and the caller can
/// try again after more type information has been gathered or report a
/// "type annotations required" error.
///
/// However, with type parameters, this can be a real problem - type
/// parameters don't unify with regular types, but they *can* unify
/// with variables from blanket impls, and (unless we know its bounds
/// will always be satisfied) picking the blanket impl will be wrong
/// for at least *some* substitutions. To make this concrete, if we have
///
/// trait AsDebug { type Out : fmt::Debug; fn debug(self) -> Self::Out; }
/// impl<T: fmt::Debug> AsDebug for T {
/// type Out = T;
/// fn debug(self) -> fmt::Debug { self }
/// }
/// fn foo<T: AsDebug>(t: T) { println!("{:?}", <T as AsDebug>::debug(t)); }
///
/// we can't just use the impl to resolve the <T as AsDebug> obligation
/// - a type from another crate (that doesn't implement fmt::Debug) could
/// implement AsDebug.
///
/// Because where-clauses match the type exactly, multiple clauses can
/// only match if there are unresolved variables, and we can mostly just
/// report this ambiguity in that case. This is still a problem - we can't
/// *do anything* with ambiguities that involve only regions. This is issue
/// #21974.
///
/// If a single where-clause matches and there are no inference
/// variables left, then it definitely matches and we can just select
/// it.
///
/// In fact, we even select the where-clause when the obligation contains
/// inference variables. The can lead to inference making "leaps of logic",
/// for example in this situation:
///
/// pub trait Foo<T> { fn foo(&self) -> T; }
/// impl<T> Foo<()> for T { fn foo(&self) { } }
/// impl Foo<bool> for bool { fn foo(&self) -> bool { *self } }
///
/// pub fn foo<T>(t: T) where T: Foo<bool> {
/// println!("{:?}", <T as Foo<_>>::foo(&t));
/// }
/// fn main() { foo(false); }
///
/// Here the obligation <T as Foo<$0>> can be matched by both the blanket
/// impl and the where-clause. We select the where-clause and unify $0=bool,
/// so the program prints "false". However, if the where-clause is omitted,
/// the blanket impl is selected, we unify $0=(), and the program prints
/// "()".
///
/// Exactly the same issues apply to projection and object candidates, except
/// that we can have both a projection candidate and a where-clause candidate
/// for the same obligation. In that case either would do (except that
/// different "leaps of logic" would occur if inference variables are
/// present), and we just pick the projection. This is, for example,
/// required for associated types to work in default impls, as the bounds
/// are visible both as projection bounds and as where-clauses from the
/// parameter environment.
#[derive(PartialEq,Eq,Debug,Clone)]
enum SelectionCandidate<'tcx> {
PhantomFnCandidate,
BuiltinCandidate(ty::BuiltinBound),
ParamCandidate(ty::PolyTraitRef<'tcx>),
ImplCandidate(ast::DefId),
DefaultImplCandidate(ast::DefId),
DefaultImplObjectCandidate(ast::DefId),
/// This is a trait matching with a projected type as `Self`, and
/// we found an applicable bound in the trait definition.
ProjectionCandidate,
/// Implementation of a `Fn`-family trait by one of the
/// anonymous types generated for a `||` expression.
ClosureCandidate(/* closure */ ast::DefId, &'tcx ty::ClosureSubsts<'tcx>),
/// Implementation of a `Fn`-family trait by one of the anonymous
/// types generated for a fn pointer type (e.g., `fn(int)->int`)
FnPointerCandidate,
ObjectCandidate,
BuiltinObjectCandidate,
BuiltinUnsizeCandidate,
ErrorCandidate,
}
struct SelectionCandidateSet<'tcx> {
// a list of candidates that definitely apply to the current
// obligation (meaning: types unify).
vec: Vec<SelectionCandidate<'tcx>>,
// if this is true, then there were candidates that might or might
// not have applied, but we couldn't tell. This occurs when some
// of the input types are type variables, in which case there are
// various "builtin" rules that might or might not trigger.
ambiguous: bool,
}
enum BuiltinBoundConditions<'tcx> {
If(ty::Binder<Vec<Ty<'tcx>>>),
ParameterBuiltin,
AmbiguousBuiltin
}
#[derive(Debug)]
enum EvaluationResult<'tcx> {
EvaluatedToOk,
EvaluatedToAmbig,
EvaluatedToErr(SelectionError<'tcx>),
}
impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>)
-> SelectionContext<'cx, 'tcx> {
SelectionContext {
infcx: infcx,
freshener: infcx.freshener(),
intercrate: false,
}
}
pub fn intercrate(infcx: &'cx InferCtxt<'cx, 'tcx>)
-> SelectionContext<'cx, 'tcx> {
SelectionContext {
infcx: infcx,
freshener: infcx.freshener(),
intercrate: true,
}
}
pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'tcx> {
self.infcx
}
pub fn tcx(&self) -> &'cx ty::ctxt<'tcx> {
self.infcx.tcx
}
pub fn param_env(&self) -> &'cx ty::ParameterEnvironment<'cx, 'tcx> {
self.infcx.param_env()
}
pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'tcx> {
self.infcx
}
///////////////////////////////////////////////////////////////////////////
// Selection
//
// The selection phase tries to identify *how* an obligation will
// be resolved. For example, it will identify which impl or
// parameter bound is to be used. The process can be inconclusive
// if the self type in the obligation is not fully inferred. Selection
// can result in an error in one of two ways:
//
// 1. If no applicable impl or parameter bound can be found.
// 2. If the output type parameters in the obligation do not match
// those specified by the impl/bound. For example, if the obligation
// is `Vec<Foo>:Iterable<Bar>`, but the impl specifies
// `impl<T> Iterable<T> for Vec<T>`, than an error would result.
/// Attempts to satisfy the obligation. If successful, this will affect the surrounding
/// type environment by performing unification.
pub fn select(&mut self, obligation: &TraitObligation<'tcx>)
-> SelectionResult<'tcx, Selection<'tcx>> {
debug!("select({:?})", obligation);
assert!(!obligation.predicate.has_escaping_regions());
let stack = self.push_stack(TraitObligationStackList::empty(), obligation);
match try!(self.candidate_from_obligation(&stack)) {
None => {
self.consider_unification_despite_ambiguity(obligation);
Ok(None)
}
Some(candidate) => Ok(Some(try!(self.confirm_candidate(obligation, candidate)))),
}
}
/// In the particular case of unboxed closure obligations, we can
/// sometimes do some amount of unification for the
/// argument/return types even though we can't yet fully match obligation.
/// The particular case we are interesting in is an obligation of the form:
///
/// C : FnFoo<A>
///
/// where `C` is an unboxed closure type and `FnFoo` is one of the
/// `Fn` traits. Because we know that users cannot write impls for closure types
/// themselves, the only way that `C : FnFoo` can fail to match is under two
/// conditions:
///
/// 1. The closure kind for `C` is not yet known, because inference isn't complete.
/// 2. The closure kind for `C` *is* known, but doesn't match what is needed.
/// For example, `C` may be a `FnOnce` closure, but a `Fn` closure is needed.
///
/// In either case, we always know what argument types are
/// expected by `C`, no matter what kind of `Fn` trait it
/// eventually matches. So we can go ahead and unify the argument
/// types, even though the end result is ambiguous.
///
/// Note that this is safe *even if* the trait would never be
/// matched (case 2 above). After all, in that case, an error will
/// result, so it kind of doesn't matter what we do --- unifying
/// the argument types can only be helpful to the user, because
/// once they patch up the kind of closure that is expected, the
/// argment types won't really change.
fn consider_unification_despite_ambiguity(&mut self, obligation: &TraitObligation<'tcx>) {
// Is this a `C : FnFoo(...)` trait reference for some trait binding `FnFoo`?
match self.tcx().lang_items.fn_trait_kind(obligation.predicate.0.def_id()) {
Some(_) => { }
None => { return; }
}
// Is the self-type a closure type? We ignore bindings here
// because if it is a closure type, it must be a closure type from
// within this current fn, and hence none of the higher-ranked
// lifetimes can appear inside the self-type.
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let (closure_def_id, substs) = match self_ty.sty {
ty::TyClosure(id, ref substs) => (id, substs),
_ => { return; }
};
assert!(!substs.has_escaping_regions());
// It is OK to call the unnormalized variant here - this is only
// reached for TyClosure: Fn inputs where the closure kind is
// still unknown, which should only occur in typeck where the
// closure type is already normalized.
let closure_trait_ref = self.closure_trait_ref_unnormalized(obligation,
closure_def_id,
substs);
match self.confirm_poly_trait_refs(obligation.cause.clone(),
obligation.predicate.to_poly_trait_ref(),
closure_trait_ref) {
Ok(()) => { }
Err(_) => { /* Silently ignore errors. */ }
}
}
///////////////////////////////////////////////////////////////////////////
// EVALUATION
//
// Tests whether an obligation can be selected or whether an impl
// can be applied to particular types. It skips the "confirmation"
// step and hence completely ignores output type parameters.
//
// The result is "true" if the obligation *may* hold and "false" if
// we can be sure it does not.
/// Evaluates whether the obligation `obligation` can be satisfied (by any means).
pub fn evaluate_obligation(&mut self,
obligation: &PredicateObligation<'tcx>)
-> bool
{
debug!("evaluate_obligation({:?})",
obligation);
self.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation)
.may_apply()
}
fn evaluate_builtin_bound_recursively<'o>(&mut self,
bound: ty::BuiltinBound,
previous_stack: &TraitObligationStack<'o, 'tcx>,
ty: Ty<'tcx>)
-> EvaluationResult<'tcx>
{
let obligation =
util::predicate_for_builtin_bound(
self.tcx(),
previous_stack.obligation.cause.clone(),
bound,
previous_stack.obligation.recursion_depth + 1,
ty);
match obligation {
Ok(obligation) => {
self.evaluate_predicate_recursively(previous_stack.list(), &obligation)
}
Err(ErrorReported) => {
EvaluatedToOk
}
}
}
fn evaluate_predicates_recursively<'a,'o,I>(&mut self,
stack: TraitObligationStackList<'o, 'tcx>,
predicates: I)
-> EvaluationResult<'tcx>
where I : Iterator<Item=&'a PredicateObligation<'tcx>>, 'tcx:'a
{
let mut result = EvaluatedToOk;
for obligation in predicates {
match self.evaluate_predicate_recursively(stack, obligation) {
EvaluatedToErr(e) => { return EvaluatedToErr(e); }
EvaluatedToAmbig => { result = EvaluatedToAmbig; }
EvaluatedToOk => { }
}
}
result
}
fn evaluate_predicate_recursively<'o>(&mut self,
previous_stack: TraitObligationStackList<'o, 'tcx>,
obligation: &PredicateObligation<'tcx>)
-> EvaluationResult<'tcx>
{
debug!("evaluate_predicate_recursively({:?})",
obligation);
// Check the cache from the tcx of predicates that we know
// have been proven elsewhere. This cache only contains
// predicates that are global in scope and hence unaffected by
// the current environment.
if self.tcx().fulfilled_predicates.borrow().is_duplicate(&obligation.predicate) {
return EvaluatedToOk;
}
match obligation.predicate {
ty::Predicate::Trait(ref t) => {
assert!(!t.has_escaping_regions());
let obligation = obligation.with(t.clone());
self.evaluate_obligation_recursively(previous_stack, &obligation)
}
ty::Predicate::Equate(ref p) => {
let result = self.infcx.probe(|_| {
self.infcx.equality_predicate(obligation.cause.span, p)
});
match result {
Ok(()) => EvaluatedToOk,
Err(_) => EvaluatedToErr(Unimplemented),
}
}
ty::Predicate::TypeOutlives(..) | ty::Predicate::RegionOutlives(..) => {
// we do not consider region relationships when
// evaluating trait matches
EvaluatedToOk
}
ty::Predicate::Projection(ref data) => {
self.infcx.probe(|_| {
let project_obligation = obligation.with(data.clone());
match project::poly_project_and_unify_type(self, &project_obligation) {
Ok(Some(subobligations)) => {
self.evaluate_predicates_recursively(previous_stack,
subobligations.iter())
}
Ok(None) => {
EvaluatedToAmbig
}
Err(_) => {
EvaluatedToErr(Unimplemented)
}
}
})
}
}
}
fn evaluate_obligation_recursively<'o>(&mut self,
previous_stack: TraitObligationStackList<'o, 'tcx>,
obligation: &TraitObligation<'tcx>)
-> EvaluationResult<'tcx>
{
debug!("evaluate_obligation_recursively({:?})",
obligation);
let stack = self.push_stack(previous_stack, obligation);
let result = self.evaluate_stack(&stack);
debug!("result: {:?}", result);
result
}
fn evaluate_stack<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
-> EvaluationResult<'tcx>
{
// In intercrate mode, whenever any of the types are unbound,
// there can always be an impl. Even if there are no impls in
// this crate, perhaps the type would be unified with
// something from another crate that does provide an impl.
//
// In intracrate mode, we must still be conservative. The reason is
// that we want to avoid cycles. Imagine an impl like:
//
// impl<T:Eq> Eq for Vec<T>
//
// and a trait reference like `$0 : Eq` where `$0` is an
// unbound variable. When we evaluate this trait-reference, we
// will unify `$0` with `Vec<$1>` (for some fresh variable
// `$1`), on the condition that `$1 : Eq`. We will then wind
// up with many candidates (since that are other `Eq` impls
// that apply) and try to winnow things down. This results in
// a recursive evaluation that `$1 : Eq` -- as you can
// imagine, this is just where we started. To avoid that, we
// check for unbound variables and return an ambiguous (hence possible)
// match if we've seen this trait before.
//
// This suffices to allow chains like `FnMut` implemented in
// terms of `Fn` etc, but we could probably make this more
// precise still.
let input_types = stack.fresh_trait_ref.0.input_types();
let unbound_input_types = input_types.iter().any(|ty| ty.is_fresh());
if
unbound_input_types &&
(self.intercrate ||
stack.iter().skip(1).any(
|prev| self.match_fresh_trait_refs(&stack.fresh_trait_ref,
&prev.fresh_trait_ref)))
{
debug!("evaluate_stack({:?}) --> unbound argument, recursion --> ambiguous",
stack.fresh_trait_ref);
return EvaluatedToAmbig;
}
// If there is any previous entry on the stack that precisely
// matches this obligation, then we can assume that the
// obligation is satisfied for now (still all other conditions
// must be met of course). One obvious case this comes up is
// marker traits like `Send`. Think of a linked list:
//
// struct List<T> { data: T, next: Option<Box<List<T>>> {
//
// `Box<List<T>>` will be `Send` if `T` is `Send` and
// `Option<Box<List<T>>>` is `Send`, and in turn
// `Option<Box<List<T>>>` is `Send` if `Box<List<T>>` is
// `Send`.
//
// Note that we do this comparison using the `fresh_trait_ref`
// fields. Because these have all been skolemized using
// `self.freshener`, we can be sure that (a) this will not
// affect the inferencer state and (b) that if we see two
// skolemized types with the same index, they refer to the
// same unbound type variable.
if
stack.iter()
.skip(1) // skip top-most frame
.any(|prev| stack.fresh_trait_ref == prev.fresh_trait_ref)
{
debug!("evaluate_stack({:?}) --> recursive",
stack.fresh_trait_ref);
return EvaluatedToOk;
}
match self.candidate_from_obligation(stack) {
Ok(Some(c)) => self.winnow_candidate(stack, &c),
Ok(None) => EvaluatedToAmbig,
Err(e) => EvaluatedToErr(e),
}
}
/// Evaluates whether the impl with id `impl_def_id` could be applied to the self type
/// `obligation_self_ty`. This can be used either for trait or inherent impls.
pub fn evaluate_impl(&mut self,
impl_def_id: ast::DefId,
obligation: &TraitObligation<'tcx>)
-> bool
{
debug!("evaluate_impl(impl_def_id={:?}, obligation={:?})",
impl_def_id,
obligation);
self.infcx.probe(|snapshot| {
match self.match_impl(impl_def_id, obligation, snapshot) {
Ok((substs, skol_map)) => {
let vtable_impl = self.vtable_impl(impl_def_id,
substs,
obligation.cause.clone(),
obligation.recursion_depth + 1,
skol_map,
snapshot);
self.winnow_selection(TraitObligationStackList::empty(),
VtableImpl(vtable_impl)).may_apply()
}
Err(()) => {
false
}
}
})
}
///////////////////////////////////////////////////////////////////////////
// CANDIDATE ASSEMBLY
//
// The selection process begins by examining all in-scope impls,
// caller obligations, and so forth and assembling a list of
// candidates. See `README.md` and the `Candidate` type for more
// details.
fn candidate_from_obligation<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
-> SelectionResult<'tcx, SelectionCandidate<'tcx>>
{
// Watch out for overflow. This intentionally bypasses (and does
// not update) the cache.
let recursion_limit = self.infcx.tcx.sess.recursion_limit.get();
if stack.obligation.recursion_depth >= recursion_limit {
report_overflow_error(self.infcx(), &stack.obligation);
}
// Check the cache. Note that we skolemize the trait-ref
// separately rather than using `stack.fresh_trait_ref` -- this
// is because we want the unbound variables to be replaced
// with fresh skolemized types starting from index 0.
let cache_fresh_trait_pred =
self.infcx.freshen(stack.obligation.predicate.clone());
debug!("candidate_from_obligation(cache_fresh_trait_pred={:?}, obligation={:?})",
cache_fresh_trait_pred,
stack);
assert!(!stack.obligation.predicate.has_escaping_regions());
match self.check_candidate_cache(&cache_fresh_trait_pred) {
Some(c) => {
debug!("CACHE HIT: cache_fresh_trait_pred={:?}, candidate={:?}",
cache_fresh_trait_pred,
c);
return c;
}
None => { }
}
// If no match, compute result and insert into cache.
let candidate = self.candidate_from_obligation_no_cache(stack);
if self.should_update_candidate_cache(&cache_fresh_trait_pred, &candidate) {
debug!("CACHE MISS: cache_fresh_trait_pred={:?}, candidate={:?}",
cache_fresh_trait_pred, candidate);
self.insert_candidate_cache(cache_fresh_trait_pred, candidate.clone());
}
candidate
}
fn candidate_from_obligation_no_cache<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
-> SelectionResult<'tcx, SelectionCandidate<'tcx>>
{
if stack.obligation.predicate.0.self_ty().references_error() {
return Ok(Some(ErrorCandidate));
}
if !self.is_knowable(stack) {
debug!("intercrate not knowable");
return Ok(None);
}
let candidate_set = try!(self.assemble_candidates(stack));
if candidate_set.ambiguous {
debug!("candidate set contains ambig");
return Ok(None);
}
let mut candidates = candidate_set.vec;
debug!("assembled {} candidates for {:?}: {:?}",
candidates.len(),
stack,
candidates);
// At this point, we know that each of the entries in the
// candidate set is *individually* applicable. Now we have to
// figure out if they contain mutual incompatibilities. This
// frequently arises if we have an unconstrained input type --
// for example, we are looking for $0:Eq where $0 is some
// unconstrained type variable. In that case, we'll get a
// candidate which assumes $0 == int, one that assumes $0 ==
// usize, etc. This spells an ambiguity.
// If there is more than one candidate, first winnow them down
// by considering extra conditions (nested obligations and so
// forth). We don't winnow if there is exactly one
// candidate. This is a relatively minor distinction but it
// can lead to better inference and error-reporting. An
// example would be if there was an impl:
//
// impl<T:Clone> Vec<T> { fn push_clone(...) { ... } }
//
// and we were to see some code `foo.push_clone()` where `boo`
// is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
// we were to winnow, we'd wind up with zero candidates.
// Instead, we select the right impl now but report `Bar does
// not implement Clone`.
if candidates.len() > 1 {
candidates.retain(|c| self.winnow_candidate(stack, c).may_apply())
}
// If there are STILL multiple candidate, we can further reduce
// the list by dropping duplicates.
if candidates.len() > 1 {
let mut i = 0;
while i < candidates.len() {
let is_dup =
(0..candidates.len())
.filter(|&j| i != j)
.any(|j| self.candidate_should_be_dropped_in_favor_of(&candidates[i],
&candidates[j]));
if is_dup {
debug!("Dropping candidate #{}/{}: {:?}",
i, candidates.len(), candidates[i]);
candidates.swap_remove(i);
} else {
debug!("Retaining candidate #{}/{}: {:?}",
i, candidates.len(), candidates[i]);
i += 1;
}
}
}
// If there are *STILL* multiple candidates, give up and
// report ambiguity.
if candidates.len() > 1 {
debug!("multiple matches, ambig");
return Ok(None);
}
// If there are *NO* candidates, that there are no impls --
// that we know of, anyway. Note that in the case where there
// are unbound type variables within the obligation, it might
// be the case that you could still satisfy the obligation
// from another crate by instantiating the type variables with
// a type from another crate that does have an impl. This case
// is checked for in `evaluate_stack` (and hence users
// who might care about this case, like coherence, should use
// that function).
if candidates.is_empty() {
return Err(Unimplemented);
}
// Just one candidate left.
let candidate = candidates.pop().unwrap();
match candidate {
ImplCandidate(def_id) => {
match self.tcx().trait_impl_polarity(def_id) {
Some(ast::ImplPolarity::Negative) => return Err(Unimplemented),
_ => {}
}
}
_ => {}
}
Ok(Some(candidate))
}
fn is_knowable<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
-> bool
{
debug!("is_knowable(intercrate={})", self.intercrate);
if !self.intercrate {
return true;
}
let obligation = &stack.obligation;
let predicate = self.infcx().resolve_type_vars_if_possible(&obligation.predicate);
// ok to skip binder because of the nature of the
// trait-ref-is-knowable check, which does not care about
// bound regions
let trait_ref = &predicate.skip_binder().trait_ref;
coherence::trait_ref_is_knowable(self.tcx(), trait_ref)
}
fn pick_candidate_cache(&self) -> &SelectionCache<'tcx> {
// If there are any where-clauses in scope, then we always use
// a cache local to this particular scope. Otherwise, we
// switch to a global cache. We used to try and draw
// finer-grained distinctions, but that led to a serious of
// annoying and weird bugs like #22019 and #18290. This simple
// rule seems to be pretty clearly safe and also still retains
// a very high hit rate (~95% when compiling rustc).
if !self.param_env().caller_bounds.is_empty() {
return &self.param_env().selection_cache;
}
// Avoid using the master cache during coherence and just rely
// on the local cache. This effectively disables caching
// during coherence. It is really just a simplification to
// avoid us having to fear that coherence results "pollute"
// the master cache. Since coherence executes pretty quickly,
// it's not worth going to more trouble to increase the
// hit-rate I don't think.
if self.intercrate {
return &self.param_env().selection_cache;
}
// Otherwise, we can use the global cache.
&self.tcx().selection_cache
}
fn check_candidate_cache(&mut self,
cache_fresh_trait_pred: &ty::PolyTraitPredicate<'tcx>)
-> Option<SelectionResult<'tcx, SelectionCandidate<'tcx>>>
{
let cache = self.pick_candidate_cache();
let hashmap = cache.hashmap.borrow();
hashmap.get(&cache_fresh_trait_pred.0.trait_ref).cloned()
}
fn insert_candidate_cache(&mut self,
cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>,
candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>)
{
let cache = self.pick_candidate_cache();
let mut hashmap = cache.hashmap.borrow_mut();
hashmap.insert(cache_fresh_trait_pred.0.trait_ref.clone(), candidate);
}
fn should_update_candidate_cache(&mut self,
cache_fresh_trait_pred: &ty::PolyTraitPredicate<'tcx>,
candidate: &SelectionResult<'tcx, SelectionCandidate<'tcx>>)
-> bool
{
// In general, it's a good idea to cache results, even
// ambiguous ones, to save us some trouble later. But we have
// to be careful not to cache results that could be
// invalidated later by advances in inference. Normally, this
// is not an issue, because any inference variables whose
// types are not yet bound are "freshened" in the cache key,
// which means that if we later get the same request once that
// type variable IS bound, we'll have a different cache key.
// For example, if we have `Vec<_#0t> : Foo`, and `_#0t` is
// not yet known, we may cache the result as `None`. But if
// later `_#0t` is bound to `Bar`, then when we freshen we'll
// have `Vec<Bar> : Foo` as the cache key.
//
// HOWEVER, it CAN happen that we get an ambiguity result in
// one particular case around closures where the cache key
// would not change. That is when the precise types of the
// upvars that a closure references have not yet been figured
// out (i.e., because it is not yet known if they are captured
// by ref, and if by ref, what kind of ref). In these cases,
// when matching a builtin bound, we will yield back an
// ambiguous result. But the *cache key* is just the closure type,
// it doesn't capture the state of the upvar computation.
//
// To avoid this trap, just don't cache ambiguous results if
// the self-type contains no inference byproducts (that really
// shouldn't happen in other circumstances anyway, given
// coherence).
match *candidate {
Ok(Some(_)) | Err(_) => true,
Ok(None) => {
cache_fresh_trait_pred.0.input_types().has_infer_types()
}
}
}
fn assemble_candidates<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
-> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>>
{
let TraitObligationStack { obligation, .. } = *stack;
let mut candidates = SelectionCandidateSet {
vec: Vec::new(),
ambiguous: false
};
// Other bounds. Consider both in-scope bounds from fn decl
// and applicable impls. There is a certain set of precedence rules here.
match self.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
Some(ty::BoundCopy) => {
debug!("obligation self ty is {:?}",
obligation.predicate.0.self_ty());
// User-defined copy impls are permitted, but only for
// structs and enums.
try!(self.assemble_candidates_from_impls(obligation, &mut candidates));
// For other types, we'll use the builtin rules.
try!(self.assemble_builtin_bound_candidates(ty::BoundCopy,
stack,
&mut candidates));
}
Some(bound @ ty::BoundSized) => {
// Sized is never implementable by end-users, it is
// always automatically computed.
try!(self.assemble_builtin_bound_candidates(bound, stack, &mut candidates));
}
None if self.tcx().lang_items.unsize_trait() ==
Some(obligation.predicate.def_id()) => {
self.assemble_candidates_for_unsizing(obligation, &mut candidates);
}
Some(ty::BoundSend) |
Some(ty::BoundSync) |
None => {
try!(self.assemble_closure_candidates(obligation, &mut candidates));
try!(self.assemble_fn_pointer_candidates(obligation, &mut candidates));
try!(self.assemble_candidates_from_impls(obligation, &mut candidates));
self.assemble_candidates_from_object_ty(obligation, &mut candidates);
}
}
self.assemble_candidates_from_projected_tys(obligation, &mut candidates);
try!(self.assemble_candidates_from_caller_bounds(stack, &mut candidates));
// Default implementations have lower priority, so we only
// consider triggering a default if there is no other impl that can apply.
if candidates.vec.is_empty() {
try!(self.assemble_candidates_from_default_impls(obligation, &mut candidates));
}
debug!("candidate list size: {}", candidates.vec.len());
Ok(candidates)
}
fn assemble_candidates_from_projected_tys(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
{
let poly_trait_predicate =
self.infcx().resolve_type_vars_if_possible(&obligation.predicate);
debug!("assemble_candidates_for_projected_tys({:?},{:?})",
obligation,
poly_trait_predicate);
// FIXME(#20297) -- just examining the self-type is very simplistic
// before we go into the whole skolemization thing, just
// quickly check if the self-type is a projection at all.
let trait_def_id = match poly_trait_predicate.0.trait_ref.self_ty().sty {
ty::TyProjection(ref data) => data.trait_ref.def_id,
ty::TyInfer(ty::TyVar(_)) => {
// If the self-type is an inference variable, then it MAY wind up
// being a projected type, so induce an ambiguity.
//
// FIXME(#20297) -- being strict about this can cause
// inference failures with BorrowFrom, which is
// unfortunate. Can we do better here?
debug!("assemble_candidates_for_projected_tys: ambiguous self-type");
candidates.ambiguous = true;
return;
}
_ => { return; }
};
debug!("assemble_candidates_for_projected_tys: trait_def_id={:?}",
trait_def_id);
let result = self.infcx.probe(|snapshot| {
self.match_projection_obligation_against_bounds_from_trait(obligation,
snapshot)
});
if result {
candidates.vec.push(ProjectionCandidate);
}
}
fn match_projection_obligation_against_bounds_from_trait(
&mut self,
obligation: &TraitObligation<'tcx>,
snapshot: &infer::CombinedSnapshot)
-> bool
{
let poly_trait_predicate =
self.infcx().resolve_type_vars_if_possible(&obligation.predicate);
let (skol_trait_predicate, skol_map) =
self.infcx().skolemize_late_bound_regions(&poly_trait_predicate, snapshot);
debug!("match_projection_obligation_against_bounds_from_trait: \
skol_trait_predicate={:?} skol_map={:?}",
skol_trait_predicate,
skol_map);
let projection_trait_ref = match skol_trait_predicate.trait_ref.self_ty().sty {
ty::TyProjection(ref data) => &data.trait_ref,
_ => {
self.tcx().sess.span_bug(
obligation.cause.span,
&format!("match_projection_obligation_against_bounds_from_trait() called \
but self-ty not a projection: {:?}",
skol_trait_predicate.trait_ref.self_ty()));
}
};
debug!("match_projection_obligation_against_bounds_from_trait: \
projection_trait_ref={:?}",
projection_trait_ref);
let trait_predicates = self.tcx().lookup_predicates(projection_trait_ref.def_id);
let bounds = trait_predicates.instantiate(self.tcx(), projection_trait_ref.substs);
debug!("match_projection_obligation_against_bounds_from_trait: \
bounds={:?}",
bounds);
let matching_bound =
util::elaborate_predicates(self.tcx(), bounds.predicates.into_vec())
.filter_to_traits()
.find(
|bound| self.infcx.probe(
|_| self.match_projection(obligation,
bound.clone(),
skol_trait_predicate.trait_ref.clone(),
&skol_map,
snapshot)));
debug!("match_projection_obligation_against_bounds_from_trait: \
matching_bound={:?}",
matching_bound);
match matching_bound {
None => false,
Some(bound) => {
// Repeat the successful match, if any, this time outside of a probe.
let result = self.match_projection(obligation,
bound,
skol_trait_predicate.trait_ref.clone(),
&skol_map,
snapshot);
assert!(result);
true
}
}
}
fn match_projection(&mut self,
obligation: &TraitObligation<'tcx>,
trait_bound: ty::PolyTraitRef<'tcx>,
skol_trait_ref: ty::TraitRef<'tcx>,
skol_map: &infer::SkolemizationMap,
snapshot: &infer::CombinedSnapshot)
-> bool
{
assert!(!skol_trait_ref.has_escaping_regions());
let origin = infer::RelateOutputImplTypes(obligation.cause.span);
match self.infcx.sub_poly_trait_refs(false,
origin,
trait_bound.clone(),
ty::Binder(skol_trait_ref.clone())) {
Ok(()) => { }
Err(_) => { return false; }
}
self.infcx.leak_check(skol_map, snapshot).is_ok()
}
/// Given an obligation like `<SomeTrait for T>`, search the obligations that the caller
/// supplied to find out whether it is listed among them.
///
/// Never affects inference environment.
fn assemble_candidates_from_caller_bounds<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(),SelectionError<'tcx>>
{
debug!("assemble_candidates_from_caller_bounds({:?})",
stack.obligation);
let all_bounds =
self.param_env().caller_bounds
.iter()
.filter_map(|o| o.to_opt_poly_trait_ref());
let matching_bounds =
all_bounds.filter(
|bound| self.evaluate_where_clause(stack, bound.clone()).may_apply());
let param_candidates =
matching_bounds.map(|bound| ParamCandidate(bound));
candidates.vec.extend(param_candidates);
Ok(())
}
fn evaluate_where_clause<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>,
where_clause_trait_ref: ty::PolyTraitRef<'tcx>)
-> EvaluationResult<'tcx>
{
self.infcx().probe(move |_| {
match self.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) {
Ok(obligations) => {
self.evaluate_predicates_recursively(stack.list(), obligations.iter())
}
Err(()) => {
EvaluatedToErr(Unimplemented)
}
}
})
}
/// Check for the artificial impl that the compiler will create for an obligation like `X :
/// FnMut<..>` where `X` is a closure type.
///
/// Note: the type parameters on a closure candidate are modeled as *output* type
/// parameters and hence do not affect whether this trait is a match or not. They will be
/// unified during the confirmation step.
fn assemble_closure_candidates(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(),SelectionError<'tcx>>
{
let kind = match self.tcx().lang_items.fn_trait_kind(obligation.predicate.0.def_id()) {
Some(k) => k,
None => { return Ok(()); }
};
// ok to skip binder because the substs on closure types never
// touch bound regions, they just capture the in-scope
// type/region parameters
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let (closure_def_id, substs) = match self_ty.sty {
ty::TyClosure(id, ref substs) => (id, substs),
ty::TyInfer(ty::TyVar(_)) => {
debug!("assemble_unboxed_closure_candidates: ambiguous self-type");
candidates.ambiguous = true;
return Ok(());
}
_ => { return Ok(()); }
};
debug!("assemble_unboxed_candidates: self_ty={:?} kind={:?} obligation={:?}",
self_ty,
kind,
obligation);
match self.infcx.closure_kind(closure_def_id) {
Some(closure_kind) => {
debug!("assemble_unboxed_candidates: closure_kind = {:?}", closure_kind);
if closure_kind.extends(kind) {
candidates.vec.push(ClosureCandidate(closure_def_id, substs));
}
}
None => {
debug!("assemble_unboxed_candidates: closure_kind not yet known");
candidates.ambiguous = true;
}
}
Ok(())
}
/// Implement one of the `Fn()` family for a fn pointer.
fn assemble_fn_pointer_candidates(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(),SelectionError<'tcx>>
{
// We provide impl of all fn traits for fn pointers.
if self.tcx().lang_items.fn_trait_kind(obligation.predicate.def_id()).is_none() {
return Ok(());
}
// ok to skip binder because what we are inspecting doesn't involve bound regions
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
match self_ty.sty {
ty::TyInfer(ty::TyVar(_)) => {
debug!("assemble_fn_pointer_candidates: ambiguous self-type");
candidates.ambiguous = true; // could wind up being a fn() type
}
// provide an impl, but only for suitable `fn` pointers
ty::TyBareFn(_, &ty::BareFnTy {
unsafety: ast::Unsafety::Normal,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: _,
output: ty::FnConverging(_),
variadic: false
})
}) => {
candidates.vec.push(FnPointerCandidate);
}
_ => { }
}
Ok(())
}
/// Search for impls that might apply to `obligation`.
fn assemble_candidates_from_impls(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(), SelectionError<'tcx>>
{
debug!("assemble_candidates_from_impls(obligation={:?})", obligation);
let def = self.tcx().lookup_trait_def(obligation.predicate.def_id());
def.for_each_relevant_impl(
self.tcx(),
obligation.predicate.0.trait_ref.self_ty(),
|impl_def_id| {
self.infcx.probe(|snapshot| {
if let Ok(_) = self.match_impl(impl_def_id, obligation, snapshot) {
candidates.vec.push(ImplCandidate(impl_def_id));
}
});
}
);
Ok(())
}
fn assemble_candidates_from_default_impls(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(), SelectionError<'tcx>>
{
// OK to skip binder here because the tests we do below do not involve bound regions
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
debug!("assemble_candidates_from_default_impls(self_ty={:?})", self_ty);
let def_id = obligation.predicate.def_id();
if self.tcx().trait_has_default_impl(def_id) {
match self_ty.sty {
ty::TyTrait(..) => {
// For object types, we don't know what the closed
// over types are. For most traits, this means we
// conservatively say nothing; a candidate may be
// added by `assemble_candidates_from_object_ty`.
// However, for the kind of magic reflect trait,
// we consider it to be implemented even for
// object types, because it just lets you reflect
// onto the object type, not into the object's
// interior.
if self.tcx().has_attr(def_id, "rustc_reflect_like") {
candidates.vec.push(DefaultImplObjectCandidate(def_id));
}
}
ty::TyParam(..) |
ty::TyProjection(..) => {
// In these cases, we don't know what the actual
// type is. Therefore, we cannot break it down
// into its constituent types. So we don't
// consider the `..` impl but instead just add no
// candidates: this means that typeck will only
// succeed if there is another reason to believe
// that this obligation holds. That could be a
// where-clause or, in the case of an object type,
// it could be that the object type lists the
// trait (e.g. `Foo+Send : Send`). See
// `compile-fail/typeck-default-trait-impl-send-param.rs`
// for an example of a test case that exercises
// this path.
}
ty::TyInfer(ty::TyVar(_)) => {
// the defaulted impl might apply, we don't know
candidates.ambiguous = true;
}
_ => {
candidates.vec.push(DefaultImplCandidate(def_id.clone()))
}
}
}
Ok(())
}
/// Search for impls that might apply to `obligation`.
fn assemble_candidates_from_object_ty(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
{
debug!("assemble_candidates_from_object_ty(self_ty={:?})",
self.infcx.shallow_resolve(*obligation.self_ty().skip_binder()));
// Object-safety candidates are only applicable to object-safe
// traits. Including this check is useful because it helps
// inference in cases of traits like `BorrowFrom`, which are
// not object-safe, and which rely on being able to infer the
// self-type from one of the other inputs. Without this check,
// these cases wind up being considered ambiguous due to a
// (spurious) ambiguity introduced here.
let predicate_trait_ref = obligation.predicate.to_poly_trait_ref();
if !object_safety::is_object_safe(self.tcx(), predicate_trait_ref.def_id()) {
return;
}
self.infcx.commit_if_ok(|snapshot| {
let bound_self_ty =
self.infcx.resolve_type_vars_if_possible(&obligation.self_ty());
let (self_ty, _) =
self.infcx().skolemize_late_bound_regions(&bound_self_ty, snapshot);
let poly_trait_ref = match self_ty.sty {
ty::TyTrait(ref data) => {
match self.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
Some(bound @ ty::BoundSend) | Some(bound @ ty::BoundSync) => {
if data.bounds.builtin_bounds.contains(&bound) {
debug!("assemble_candidates_from_object_ty: matched builtin bound, \
pushing candidate");
candidates.vec.push(BuiltinObjectCandidate);
return Ok(());
}
}
_ => {}
}
data.principal_trait_ref_with_self_ty(self.tcx(), self_ty)
}
ty::TyInfer(ty::TyVar(_)) => {
debug!("assemble_candidates_from_object_ty: ambiguous");
candidates.ambiguous = true; // could wind up being an object type
return Ok(());
}
_ => {
return Ok(());
}
};
debug!("assemble_candidates_from_object_ty: poly_trait_ref={:?}",
poly_trait_ref);
// Count only those upcast versions that match the trait-ref
// we are looking for. Specifically, do not only check for the
// correct trait, but also the correct type parameters.
// For example, we may be trying to upcast `Foo` to `Bar<i32>`,
// but `Foo` is declared as `trait Foo : Bar<u32>`.
let upcast_trait_refs =
util::supertraits(self.tcx(), poly_trait_ref)
.filter(|upcast_trait_ref| {
self.infcx.probe(|_| {
let upcast_trait_ref = upcast_trait_ref.clone();
self.match_poly_trait_ref(obligation, upcast_trait_ref).is_ok()
})
})
.count();
if upcast_trait_refs > 1 {
// can be upcast in many ways; need more type information
candidates.ambiguous = true;
} else if upcast_trait_refs == 1 {
candidates.vec.push(ObjectCandidate);
}
Ok::<(),()>(())
}).unwrap();
}
/// Search for unsizing that might apply to `obligation`.
fn assemble_candidates_for_unsizing(&mut self,
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>) {
// We currently never consider higher-ranked obligations e.g.
// `for<'a> &'a T: Unsize<Trait+'a>` to be implemented. This is not
// because they are a priori invalid, and we could potentially add support
// for them later, it's just that there isn't really a strong need for it.
// A `T: Unsize<U>` obligation is always used as part of a `T: CoerceUnsize<U>`
// impl, and those are generally applied to concrete types.
//
// That said, one might try to write a fn with a where clause like
// for<'a> Foo<'a, T>: Unsize<Foo<'a, Trait>>
// where the `'a` is kind of orthogonal to the relevant part of the `Unsize`.
// Still, you'd be more likely to write that where clause as
// T: Trait
// so it seems ok if we (conservatively) fail to accept that `Unsize`
// obligation above. Should be possible to extend this in the future.
let self_ty = match self.tcx().no_late_bound_regions(&obligation.self_ty()) {
Some(t) => t,
None => {
// Don't add any candidates if there are bound regions.
return;
}
};
let source = self.infcx.shallow_resolve(self_ty);
let target = self.infcx.shallow_resolve(obligation.predicate.0.input_types()[0]);
debug!("assemble_candidates_for_unsizing(source={:?}, target={:?})",
source, target);
let may_apply = match (&source.sty, &target.sty) {
// Trait+Kx+'a -> Trait+Ky+'b (upcasts).
(&ty::TyTrait(ref data_a), &ty::TyTrait(ref data_b)) => {
// Upcasts permit two things:
//
// 1. Dropping builtin bounds, e.g. `Foo+Send` to `Foo`
// 2. Tightening the region bound, e.g. `Foo+'a` to `Foo+'b` if `'a : 'b`
//
// Note that neither of these changes requires any
// change at runtime. Eventually this will be
// generalized.
//
// We always upcast when we can because of reason
// #2 (region bounds).
data_a.principal.def_id() == data_a.principal.def_id() &&
data_a.bounds.builtin_bounds.is_superset(&data_b.bounds.builtin_bounds)
}
// T -> Trait.
(_, &ty::TyTrait(_)) => true,
// Ambiguous handling is below T -> Trait, because inference
// variables can still implement Unsize<Trait> and nested
// obligations will have the final say (likely deferred).
(&ty::TyInfer(ty::TyVar(_)), _) |
(_, &ty::TyInfer(ty::TyVar(_))) => {
debug!("assemble_candidates_for_unsizing: ambiguous");
candidates.ambiguous = true;
false
}
// [T; n] -> [T].
(&ty::TyArray(_, _), &ty::TySlice(_)) => true,
// Struct<T> -> Struct<U>.
(&ty::TyStruct(def_id_a, _), &ty::TyStruct(def_id_b, _)) => {
def_id_a == def_id_b
}
_ => false
};
if may_apply {
candidates.vec.push(BuiltinUnsizeCandidate);
}
}
///////////////////////////////////////////////////////////////////////////
// WINNOW
//
// Winnowing is the process of attempting to resolve ambiguity by
// probing further. During the winnowing process, we unify all
// type variables (ignoring skolemization) and then we also
// attempt to evaluate recursive bounds to see if they are
// satisfied.
/// Further evaluate `candidate` to decide whether all type parameters match and whether nested
/// obligations are met. Returns true if `candidate` remains viable after this further
/// scrutiny.
fn winnow_candidate<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>,
candidate: &SelectionCandidate<'tcx>)
-> EvaluationResult<'tcx>
{
debug!("winnow_candidate: candidate={:?}", candidate);
let result = self.infcx.probe(|_| {
let candidate = (*candidate).clone();
match self.confirm_candidate(stack.obligation, candidate) {
Ok(selection) => self.winnow_selection(stack.list(),
selection),
Err(error) => EvaluatedToErr(error),
}
});
debug!("winnow_candidate depth={} result={:?}",
stack.obligation.recursion_depth, result);
result
}
fn winnow_selection<'o>(&mut self,
stack: TraitObligationStackList<'o,'tcx>,
selection: Selection<'tcx>)
-> EvaluationResult<'tcx>
{
self.evaluate_predicates_recursively(stack,
selection.nested_obligations().iter())
}
/// Returns true if `candidate_i` should be dropped in favor of
/// `candidate_j`. Generally speaking we will drop duplicate
/// candidates and prefer where-clause candidates.
/// Returns true if `victim` should be dropped in favor of
/// `other`. Generally speaking we will drop duplicate
/// candidates and prefer where-clause candidates.
///
/// See the comment for "SelectionCandidate" for more details.
fn candidate_should_be_dropped_in_favor_of<'o>(&mut self,
victim: &SelectionCandidate<'tcx>,
other: &SelectionCandidate<'tcx>)
-> bool
{
if victim == other {
return true;
}
match other {
&ObjectCandidate(..) |
&ParamCandidate(_) | &ProjectionCandidate => match victim {
&DefaultImplCandidate(..) => {
self.tcx().sess.bug(
"default implementations shouldn't be recorded \
when there are other valid candidates");
}
&PhantomFnCandidate => {
self.tcx().sess.bug("PhantomFn didn't short-circuit selection");
}
&ImplCandidate(..) |
&ClosureCandidate(..) |
&FnPointerCandidate(..) |
&BuiltinObjectCandidate(..) |
&BuiltinUnsizeCandidate(..) |
&DefaultImplObjectCandidate(..) |
&BuiltinCandidate(..) => {
// We have a where-clause so don't go around looking
// for impls.
true
}
&ObjectCandidate(..) |
&ProjectionCandidate => {
// Arbitrarily give param candidates priority
// over projection and object candidates.
true
},
&ParamCandidate(..) => false,
&ErrorCandidate => false // propagate errors
},
_ => false
}
}
///////////////////////////////////////////////////////////////////////////
// BUILTIN BOUNDS
//
// These cover the traits that are built-in to the language
// itself. This includes `Copy` and `Sized` for sure. For the
// moment, it also includes `Send` / `Sync` and a few others, but
// those will hopefully change to library-defined traits in the
// future.
fn assemble_builtin_bound_candidates<'o>(&mut self,
bound: ty::BuiltinBound,
stack: &TraitObligationStack<'o, 'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>)
-> Result<(),SelectionError<'tcx>>
{
match self.builtin_bound(bound, stack.obligation) {
Ok(If(..)) => {
debug!("builtin_bound: bound={:?}",
bound);
candidates.vec.push(BuiltinCandidate(bound));
Ok(())
}
Ok(ParameterBuiltin) => { Ok(()) }
Ok(AmbiguousBuiltin) => {
debug!("assemble_builtin_bound_candidates: ambiguous builtin");
Ok(candidates.ambiguous = true)
}
Err(e) => { Err(e) }
}
}
fn builtin_bound(&mut self,
bound: ty::BuiltinBound,
obligation: &TraitObligation<'tcx>)
-> Result<BuiltinBoundConditions<'tcx>,SelectionError<'tcx>>
{
// Note: these tests operate on types that may contain bound
// regions. To be proper, we ought to skolemize here, but we
// forego the skolemization and defer it until the
// confirmation step.
let self_ty = self.infcx.shallow_resolve(obligation.predicate.0.self_ty());
return match self_ty.sty {
ty::TyInfer(ty::IntVar(_)) |
ty::TyInfer(ty::FloatVar(_)) |
ty::TyUint(_) |
ty::TyInt(_) |
ty::TyBool |
ty::TyFloat(_) |
ty::TyBareFn(..) |
ty::TyChar => {
// safe for everything
ok_if(Vec::new())
}
ty::TyBox(_) => { // Box<T>
match bound {
ty::BoundCopy => Err(Unimplemented),
ty::BoundSized => ok_if(Vec::new()),
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
}
}
ty::TyRawPtr(..) => { // *const T, *mut T
match bound {
ty::BoundCopy | ty::BoundSized => ok_if(Vec::new()),
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
}
}
ty::TyTrait(ref data) => {
match bound {
ty::BoundSized => Err(Unimplemented),
ty::BoundCopy => {
if data.bounds.builtin_bounds.contains(&bound) {
ok_if(Vec::new())
} else {
// Recursively check all supertraits to find out if any further
// bounds are required and thus we must fulfill.
let principal =
data.principal_trait_ref_with_self_ty(self.tcx(),
self.tcx().types.err);
let copy_def_id = obligation.predicate.def_id();
for tr in util::supertraits(self.tcx(), principal) {
if tr.def_id() == copy_def_id {
return ok_if(Vec::new())
}
}
Err(Unimplemented)
}
}
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
}
}
ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl }) => {
// &mut T or &T
match bound {
ty::BoundCopy => {
match mutbl {
// &mut T is affine and hence never `Copy`
ast::MutMutable => Err(Unimplemented),
// &T is always copyable
ast::MutImmutable => ok_if(Vec::new()),
}
}
ty::BoundSized => ok_if(Vec::new()),
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
}
}
ty::TyArray(element_ty, _) => {
// [T; n]
match bound {
ty::BoundCopy => ok_if(vec![element_ty]),
ty::BoundSized => ok_if(Vec::new()),
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
}
}
ty::TyStr | ty::TySlice(_) => {
match bound {
ty::BoundSync | ty::BoundSend => {
self.tcx().sess.bug("Send/Sync shouldn't occur in builtin_bounds()");
}
ty::BoundCopy | ty::BoundSized => Err(Unimplemented),
}
}
// (T1, ..., Tn) -- meets any bound that all of T1...Tn meet
ty::TyTuple(ref tys) => ok_if(tys.clone()),
ty::TyClosure(def_id, ref substs) => {
// FIXME -- This case is tricky. In the case of by-ref
// closures particularly, we need the results of
// inference to decide how to reflect the type of each
// upvar (the upvar may have type `T`, but the runtime
// type could be `&mut`, `&`, or just `T`). For now,
// though, we'll do this unsoundly and assume that all
// captures are by value. Really what we ought to do
// is reserve judgement and then intertwine this
// analysis with closure inference.
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
// Unboxed closures shouldn't be
// implicitly copyable
if bound == ty::BoundCopy {
return Ok(ParameterBuiltin);
}
// Upvars are always local variables or references to
// local variables, and local variables cannot be
// unsized, so the closure struct as a whole must be
// Sized.
if bound == ty::BoundSized {
return ok_if(Vec::new());
}
ok_if(substs.upvar_tys.clone())
}
ty::TyStruct(def_id, substs) => {
let types: Vec<Ty> =
self.tcx().struct_fields(def_id, substs).iter()
.map(|f| f.mt.ty)
.collect();
nominal(bound, types)
}
ty::TyEnum(def_id, substs) => {
let types: Vec<Ty> =
self.tcx().substd_enum_variants(def_id, substs)
.iter()
.flat_map(|variant| &variant.args)
.cloned()
.collect();
nominal(bound, types)
}
ty::TyProjection(_) | ty::TyParam(_) => {
// Note: A type parameter is only considered to meet a
// particular bound if there is a where clause telling
// us that it does, and that case is handled by
// `assemble_candidates_from_caller_bounds()`.
Ok(ParameterBuiltin)
}
ty::TyInfer(ty::TyVar(_)) => {
// Unbound type variable. Might or might not have
// applicable impls and so forth, depending on what
// those type variables wind up being bound to.
debug!("assemble_builtin_bound_candidates: ambiguous builtin");
Ok(AmbiguousBuiltin)
}
ty::TyError => ok_if(Vec::new()),
ty::TyInfer(ty::FreshTy(_))
| ty::TyInfer(ty::FreshIntTy(_))
| ty::TyInfer(ty::FreshFloatTy(_)) => {
self.tcx().sess.bug(
&format!(
"asked to assemble builtin bounds of unexpected type: {:?}",
self_ty));
}
};
fn ok_if<'tcx>(v: Vec<Ty<'tcx>>)
-> Result<BuiltinBoundConditions<'tcx>, SelectionError<'tcx>> {
Ok(If(ty::Binder(v)))
}
fn nominal<'cx, 'tcx>(bound: ty::BuiltinBound,
types: Vec<Ty<'tcx>>)
-> Result<BuiltinBoundConditions<'tcx>, SelectionError<'tcx>>
{
// First check for markers and other nonsense.
match bound {
// Fallback to whatever user-defined impls exist in this case.
ty::BoundCopy => Ok(ParameterBuiltin),
// Sized if all the component types are sized.
ty::BoundSized => ok_if(types),
// Shouldn't be coming through here.
ty::BoundSend | ty::BoundSync => unreachable!(),
}
}
}
/// For default impls, we need to break apart a type into its
/// "constituent types" -- meaning, the types that it contains.
///
/// Here are some (simple) examples:
///
/// ```
/// (i32, u32) -> [i32, u32]
/// Foo where struct Foo { x: i32, y: u32 } -> [i32, u32]
/// Bar<i32> where struct Bar<T> { x: T, y: u32 } -> [i32, u32]
/// Zed<i32> where enum Zed { A(T), B(u32) } -> [i32, u32]
/// ```
fn constituent_types_for_ty(&self, t: Ty<'tcx>) -> Vec<Ty<'tcx>> {
match t.sty {
ty::TyUint(_) |
ty::TyInt(_) |
ty::TyBool |
ty::TyFloat(_) |
ty::TyBareFn(..) |
ty::TyStr |
ty::TyError |
ty::TyInfer(ty::IntVar(_)) |
ty::TyInfer(ty::FloatVar(_)) |
ty::TyChar => {
Vec::new()
}
ty::TyTrait(..) |
ty::TyParam(..) |
ty::TyProjection(..) |
ty::TyInfer(ty::TyVar(_)) |
ty::TyInfer(ty::FreshTy(_)) |
ty::TyInfer(ty::FreshIntTy(_)) |
ty::TyInfer(ty::FreshFloatTy(_)) => {
self.tcx().sess.bug(
&format!(
"asked to assemble constituent types of unexpected type: {:?}",
t));
}
ty::TyBox(referent_ty) => { // Box<T>
vec![referent_ty]
}
ty::TyRawPtr(ty::TypeAndMut { ty: element_ty, ..}) |
ty::TyRef(_, ty::TypeAndMut { ty: element_ty, ..}) => {
vec![element_ty]
},
ty::TyArray(element_ty, _) | ty::TySlice(element_ty) => {
vec![element_ty]
}
ty::TyTuple(ref tys) => {
// (T1, ..., Tn) -- meets any bound that all of T1...Tn meet
tys.clone()
}
ty::TyClosure(def_id, ref substs) => {
// FIXME(#27086). We are invariant w/r/t our
// substs.func_substs, but we don't see them as
// constituent types; this seems RIGHT but also like
// something that a normal type couldn't simulate. Is
// this just a gap with the way that PhantomData and
// OIBIT interact? That is, there is no way to say
// "make me invariant with respect to this TYPE, but
// do not act as though I can reach it"
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
substs.upvar_tys.clone()
}
// for `PhantomData<T>`, we pass `T`
ty::TyStruct(def_id, substs)
if Some(def_id) == self.tcx().lang_items.phantom_data() =>
{
substs.types.get_slice(TypeSpace).to_vec()
}
ty::TyStruct(def_id, substs) => {
self.tcx().struct_fields(def_id, substs)
.iter()
.map(|f| f.mt.ty)
.collect()
}
ty::TyEnum(def_id, substs) => {
self.tcx().substd_enum_variants(def_id, substs)
.iter()
.flat_map(|variant| &variant.args)
.map(|&ty| ty)
.collect()
}
}
}
fn collect_predicates_for_types(&mut self,
obligation: &TraitObligation<'tcx>,
trait_def_id: ast::DefId,
types: ty::Binder<Vec<Ty<'tcx>>>)
-> Vec<PredicateObligation<'tcx>>
{
let derived_cause = match self.tcx().lang_items.to_builtin_kind(trait_def_id) {
Some(_) => {
self.derived_cause(obligation, BuiltinDerivedObligation)
},
None => {
self.derived_cause(obligation, ImplDerivedObligation)
}
};
// Because the types were potentially derived from
// higher-ranked obligations they may reference late-bound
// regions. For example, `for<'a> Foo<&'a int> : Copy` would
// yield a type like `for<'a> &'a int`. In general, we
// maintain the invariant that we never manipulate bound
// regions, so we have to process these bound regions somehow.
//
// The strategy is to:
//
// 1. Instantiate those regions to skolemized regions (e.g.,
// `for<'a> &'a int` becomes `&0 int`.
// 2. Produce something like `&'0 int : Copy`
// 3. Re-bind the regions back to `for<'a> &'a int : Copy`
// Move the binder into the individual types
let bound_types: Vec<ty::Binder<Ty<'tcx>>> =
types.skip_binder()
.iter()
.map(|&nested_ty| ty::Binder(nested_ty))
.collect();
// For each type, produce a vector of resulting obligations
let obligations: Result<Vec<Vec<_>>, _> = bound_types.iter().map(|nested_ty| {
self.infcx.commit_if_ok(|snapshot| {
let (skol_ty, skol_map) =
self.infcx().skolemize_late_bound_regions(nested_ty, snapshot);
let Normalized { value: normalized_ty, mut obligations } =
project::normalize_with_depth(self,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&skol_ty);
let skol_obligation =
util::predicate_for_trait_def(self.tcx(),
derived_cause.clone(),
trait_def_id,
obligation.recursion_depth + 1,
normalized_ty,
vec![]);
obligations.push(skol_obligation);
Ok(self.infcx().plug_leaks(skol_map, snapshot, &obligations))
})
}).collect();
// Flatten those vectors (couldn't do it above due `collect`)
match obligations {
Ok(obligations) => obligations.into_iter().flat_map(|o| o).collect(),
Err(ErrorReported) => Vec::new(),
}
}
///////////////////////////////////////////////////////////////////////////
// CONFIRMATION
//
// Confirmation unifies the output type parameters of the trait
// with the values found in the obligation, possibly yielding a
// type error. See `README.md` for more details.
fn confirm_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
candidate: SelectionCandidate<'tcx>)
-> Result<Selection<'tcx>,SelectionError<'tcx>>
{
debug!("confirm_candidate({:?}, {:?})",
obligation,
candidate);
match candidate {
BuiltinCandidate(builtin_bound) => {
Ok(VtableBuiltin(
try!(self.confirm_builtin_candidate(obligation, builtin_bound))))
}
PhantomFnCandidate |
ErrorCandidate => {
Ok(VtableBuiltin(VtableBuiltinData { nested: vec![] }))
}
ParamCandidate(param) => {
let obligations = self.confirm_param_candidate(obligation, param);
Ok(VtableParam(obligations))
}
DefaultImplCandidate(trait_def_id) => {
let data = self.confirm_default_impl_candidate(obligation, trait_def_id);
Ok(VtableDefaultImpl(data))
}
DefaultImplObjectCandidate(trait_def_id) => {
let data = self.confirm_default_impl_object_candidate(obligation, trait_def_id);
Ok(VtableDefaultImpl(data))
}
ImplCandidate(impl_def_id) => {
let vtable_impl =
try!(self.confirm_impl_candidate(obligation, impl_def_id));
Ok(VtableImpl(vtable_impl))
}
ClosureCandidate(closure_def_id, substs) => {
let vtable_closure =
try!(self.confirm_closure_candidate(obligation, closure_def_id, substs));
Ok(VtableClosure(vtable_closure))
}
BuiltinObjectCandidate => {
// This indicates something like `(Trait+Send) :
// Send`. In this case, we know that this holds
// because that's what the object type is telling us,
// and there's really no additional obligations to
// prove and no types in particular to unify etc.
Ok(VtableParam(Vec::new()))
}
ObjectCandidate => {
let data = self.confirm_object_candidate(obligation);
Ok(VtableObject(data))
}
FnPointerCandidate => {
let fn_type =
try!(self.confirm_fn_pointer_candidate(obligation));
Ok(VtableFnPointer(fn_type))
}
ProjectionCandidate => {
self.confirm_projection_candidate(obligation);
Ok(VtableParam(Vec::new()))
}
BuiltinUnsizeCandidate => {
let data = try!(self.confirm_builtin_unsize_candidate(obligation));
Ok(VtableBuiltin(data))
}
}
}
fn confirm_projection_candidate(&mut self,
obligation: &TraitObligation<'tcx>)
{
let _: Result<(),()> =
self.infcx.commit_if_ok(|snapshot| {
let result =
self.match_projection_obligation_against_bounds_from_trait(obligation,
snapshot);
assert!(result);
Ok(())
});
}
fn confirm_param_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
param: ty::PolyTraitRef<'tcx>)
-> Vec<PredicateObligation<'tcx>>
{
debug!("confirm_param_candidate({:?},{:?})",
obligation,
param);
// During evaluation, we already checked that this
// where-clause trait-ref could be unified with the obligation
// trait-ref. Repeat that unification now without any
// transactional boundary; it should not fail.
match self.match_where_clause_trait_ref(obligation, param.clone()) {
Ok(obligations) => obligations,
Err(()) => {
self.tcx().sess.bug(
&format!("Where clause `{:?}` was applicable to `{:?}` but now is not",
param,
obligation));
}
}
}
fn confirm_builtin_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
bound: ty::BuiltinBound)
-> Result<VtableBuiltinData<PredicateObligation<'tcx>>,
SelectionError<'tcx>>
{
debug!("confirm_builtin_candidate({:?})",
obligation);
match try!(self.builtin_bound(bound, obligation)) {
If(nested) => Ok(self.vtable_builtin_data(obligation, bound, nested)),
AmbiguousBuiltin | ParameterBuiltin => {
self.tcx().sess.span_bug(
obligation.cause.span,
&format!("builtin bound for {:?} was ambig",
obligation));
}
}
}
fn vtable_builtin_data(&mut self,
obligation: &TraitObligation<'tcx>,
bound: ty::BuiltinBound,
nested: ty::Binder<Vec<Ty<'tcx>>>)
-> VtableBuiltinData<PredicateObligation<'tcx>>
{
let trait_def = match self.tcx().lang_items.from_builtin_kind(bound) {
Ok(def_id) => def_id,
Err(_) => {
self.tcx().sess.bug("builtin trait definition not found");
}
};
let obligations = self.collect_predicates_for_types(obligation, trait_def, nested);
debug!("vtable_builtin_data: obligations={:?}",
obligations);
VtableBuiltinData { nested: obligations }
}
/// This handles the case where a `impl Foo for ..` impl is being used.
/// The idea is that the impl applies to `X : Foo` if the following conditions are met:
///
/// 1. For each constituent type `Y` in `X`, `Y : Foo` holds
/// 2. For each where-clause `C` declared on `Foo`, `[Self => X] C` holds.
fn confirm_default_impl_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
trait_def_id: ast::DefId)
-> VtableDefaultImplData<PredicateObligation<'tcx>>
{
debug!("confirm_default_impl_candidate({:?}, {:?})",
obligation,
trait_def_id);
// binder is moved below
let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty());<|fim▁hole|> let types = self.constituent_types_for_ty(self_ty);
self.vtable_default_impl(obligation, trait_def_id, ty::Binder(types))
}
fn confirm_default_impl_object_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
trait_def_id: ast::DefId)
-> VtableDefaultImplData<PredicateObligation<'tcx>>
{
debug!("confirm_default_impl_object_candidate({:?}, {:?})",
obligation,
trait_def_id);
assert!(self.tcx().has_attr(trait_def_id, "rustc_reflect_like"));
// OK to skip binder, it is reintroduced below
let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty());
match self_ty.sty {
ty::TyTrait(ref data) => {
// OK to skip the binder, it is reintroduced below
let input_types = data.principal.skip_binder().substs.types.get_slice(TypeSpace);
let assoc_types = data.bounds.projection_bounds
.iter()
.map(|pb| pb.skip_binder().ty);
let all_types: Vec<_> = input_types.iter().cloned()
.chain(assoc_types)
.collect();
// reintroduce the two binding levels we skipped, then flatten into one
let all_types = ty::Binder(ty::Binder(all_types));
let all_types = self.tcx().flatten_late_bound_regions(&all_types);
self.vtable_default_impl(obligation, trait_def_id, all_types)
}
_ => {
self.tcx().sess.bug(
&format!(
"asked to confirm default object implementation for non-object type: {:?}",
self_ty));
}
}
}
/// See `confirm_default_impl_candidate`
fn vtable_default_impl(&mut self,
obligation: &TraitObligation<'tcx>,
trait_def_id: ast::DefId,
nested: ty::Binder<Vec<Ty<'tcx>>>)
-> VtableDefaultImplData<PredicateObligation<'tcx>>
{
debug!("vtable_default_impl_data: nested={:?}", nested);
let mut obligations = self.collect_predicates_for_types(obligation,
trait_def_id,
nested);
let trait_obligations: Result<Vec<_>,()> = self.infcx.commit_if_ok(|snapshot| {
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
let (trait_ref, skol_map) =
self.infcx().skolemize_late_bound_regions(&poly_trait_ref, snapshot);
Ok(self.impl_or_trait_obligations(obligation.cause.clone(),
obligation.recursion_depth + 1,
trait_def_id,
&trait_ref.substs,
skol_map,
snapshot))
});
// no Errors in that code above
obligations.append(&mut trait_obligations.unwrap());
debug!("vtable_default_impl_data: obligations={:?}", obligations);
VtableDefaultImplData {
trait_def_id: trait_def_id,
nested: obligations
}
}
fn confirm_impl_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
impl_def_id: ast::DefId)
-> Result<VtableImplData<'tcx, PredicateObligation<'tcx>>,
SelectionError<'tcx>>
{
debug!("confirm_impl_candidate({:?},{:?})",
obligation,
impl_def_id);
// First, create the substitutions by matching the impl again,
// this time not in a probe.
self.infcx.commit_if_ok(|snapshot| {
let (substs, skol_map) =
self.rematch_impl(impl_def_id, obligation,
snapshot);
debug!("confirm_impl_candidate substs={:?}", substs);
Ok(self.vtable_impl(impl_def_id, substs, obligation.cause.clone(),
obligation.recursion_depth + 1, skol_map, snapshot))
})
}
fn vtable_impl(&mut self,
impl_def_id: ast::DefId,
mut substs: Normalized<'tcx, Substs<'tcx>>,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
skol_map: infer::SkolemizationMap,
snapshot: &infer::CombinedSnapshot)
-> VtableImplData<'tcx, PredicateObligation<'tcx>>
{
debug!("vtable_impl(impl_def_id={:?}, substs={:?}, recursion_depth={}, skol_map={:?})",
impl_def_id,
substs,
recursion_depth,
skol_map);
let mut impl_obligations =
self.impl_or_trait_obligations(cause,
recursion_depth,
impl_def_id,
&substs.value,
skol_map,
snapshot);
debug!("vtable_impl: impl_def_id={:?} impl_obligations={:?}",
impl_def_id,
impl_obligations);
impl_obligations.append(&mut substs.obligations);
VtableImplData { impl_def_id: impl_def_id,
substs: substs.value,
nested: impl_obligations }
}
fn confirm_object_candidate(&mut self,
obligation: &TraitObligation<'tcx>)
-> VtableObjectData<'tcx>
{
debug!("confirm_object_candidate({:?})",
obligation);
// FIXME skipping binder here seems wrong -- we should
// probably flatten the binder from the obligation and the
// binder from the object. Have to try to make a broken test
// case that results. -nmatsakis
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let poly_trait_ref = match self_ty.sty {
ty::TyTrait(ref data) => {
data.principal_trait_ref_with_self_ty(self.tcx(), self_ty)
}
_ => {
self.tcx().sess.span_bug(obligation.cause.span,
"object candidate with non-object");
}
};
let mut upcast_trait_ref = None;
let vtable_base;
{
// We want to find the first supertrait in the list of
// supertraits that we can unify with, and do that
// unification. We know that there is exactly one in the list
// where we can unify because otherwise select would have
// reported an ambiguity. (When we do find a match, also
// record it for later.)
let nonmatching =
util::supertraits(self.tcx(), poly_trait_ref)
.take_while(|&t| {
match
self.infcx.commit_if_ok(
|_| self.match_poly_trait_ref(obligation, t))
{
Ok(_) => { upcast_trait_ref = Some(t); false }
Err(_) => { true }
}
});
// Additionally, for each of the nonmatching predicates that
// we pass over, we sum up the set of number of vtable
// entries, so that we can compute the offset for the selected
// trait.
vtable_base =
nonmatching.map(|t| util::count_own_vtable_entries(self.tcx(), t))
.sum();
}
VtableObjectData {
upcast_trait_ref: upcast_trait_ref.unwrap(),
vtable_base: vtable_base,
}
}
fn confirm_fn_pointer_candidate(&mut self,
obligation: &TraitObligation<'tcx>)
-> Result<ty::Ty<'tcx>,SelectionError<'tcx>>
{
debug!("confirm_fn_pointer_candidate({:?})",
obligation);
// ok to skip binder; it is reintroduced below
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let sig = self_ty.fn_sig();
let trait_ref =
util::closure_trait_ref_and_return_type(self.tcx(),
obligation.predicate.def_id(),
self_ty,
sig,
util::TupleArgumentsFlag::Yes)
.map_bound(|(trait_ref, _)| trait_ref);
try!(self.confirm_poly_trait_refs(obligation.cause.clone(),
obligation.predicate.to_poly_trait_ref(),
trait_ref));
Ok(self_ty)
}
fn confirm_closure_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
closure_def_id: ast::DefId,
substs: &ty::ClosureSubsts<'tcx>)
-> Result<VtableClosureData<'tcx, PredicateObligation<'tcx>>,
SelectionError<'tcx>>
{
debug!("confirm_closure_candidate({:?},{:?},{:?})",
obligation,
closure_def_id,
substs);
let Normalized {
value: trait_ref,
obligations
} = self.closure_trait_ref(obligation, closure_def_id, substs);
debug!("confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})",
closure_def_id,
trait_ref,
obligations);
try!(self.confirm_poly_trait_refs(obligation.cause.clone(),
obligation.predicate.to_poly_trait_ref(),
trait_ref));
Ok(VtableClosureData {
closure_def_id: closure_def_id,
substs: substs.clone(),
nested: obligations
})
}
/// In the case of closure types and fn pointers,
/// we currently treat the input type parameters on the trait as
/// outputs. This means that when we have a match we have only
/// considered the self type, so we have to go back and make sure
/// to relate the argument types too. This is kind of wrong, but
/// since we control the full set of impls, also not that wrong,
/// and it DOES yield better error messages (since we don't report
/// errors as if there is no applicable impl, but rather report
/// errors are about mismatched argument types.
///
/// Here is an example. Imagine we have an closure expression
/// and we desugared it so that the type of the expression is
/// `Closure`, and `Closure` expects an int as argument. Then it
/// is "as if" the compiler generated this impl:
///
/// impl Fn(int) for Closure { ... }
///
/// Now imagine our obligation is `Fn(usize) for Closure`. So far
/// we have matched the self-type `Closure`. At this point we'll
/// compare the `int` to `usize` and generate an error.
///
/// Note that this checking occurs *after* the impl has selected,
/// because these output type parameters should not affect the
/// selection of the impl. Therefore, if there is a mismatch, we
/// report an error to the user.
fn confirm_poly_trait_refs(&mut self,
obligation_cause: ObligationCause,
obligation_trait_ref: ty::PolyTraitRef<'tcx>,
expected_trait_ref: ty::PolyTraitRef<'tcx>)
-> Result<(), SelectionError<'tcx>>
{
let origin = infer::RelateOutputImplTypes(obligation_cause.span);
let obligation_trait_ref = obligation_trait_ref.clone();
match self.infcx.sub_poly_trait_refs(false,
origin,
expected_trait_ref.clone(),
obligation_trait_ref.clone()) {
Ok(()) => Ok(()),
Err(e) => Err(OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
}
}
fn confirm_builtin_unsize_candidate(&mut self,
obligation: &TraitObligation<'tcx>,)
-> Result<VtableBuiltinData<PredicateObligation<'tcx>>,
SelectionError<'tcx>> {
let tcx = self.tcx();
// assemble_candidates_for_unsizing should ensure there are no late bound
// regions here. See the comment there for more details.
let source = self.infcx.shallow_resolve(
tcx.no_late_bound_regions(&obligation.self_ty()).unwrap());
let target = self.infcx.shallow_resolve(obligation.predicate.0.input_types()[0]);
debug!("confirm_builtin_unsize_candidate(source={:?}, target={:?})",
source, target);
let mut nested = vec![];
match (&source.sty, &target.sty) {
// Trait+Kx+'a -> Trait+Ky+'b (upcasts).
(&ty::TyTrait(ref data_a), &ty::TyTrait(ref data_b)) => {
// See assemble_candidates_for_unsizing for more info.
let bounds = ty::ExistentialBounds {
region_bound: data_b.bounds.region_bound,
builtin_bounds: data_b.bounds.builtin_bounds,
projection_bounds: data_a.bounds.projection_bounds.clone(),
};
let new_trait = tcx.mk_trait(data_a.principal.clone(), bounds);
let origin = infer::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, new_trait, target).is_err() {
return Err(Unimplemented);
}
// Register one obligation for 'a: 'b.
let cause = ObligationCause::new(obligation.cause.span,
obligation.cause.body_id,
ObjectCastObligation(target));
let outlives = ty::OutlivesPredicate(data_a.bounds.region_bound,
data_b.bounds.region_bound);
nested.push(Obligation::with_depth(cause,
obligation.recursion_depth + 1,
ty::Binder(outlives).to_predicate()));
}
// T -> Trait.
(_, &ty::TyTrait(ref data)) => {
let object_did = data.principal_def_id();
if !object_safety::is_object_safe(tcx, object_did) {
return Err(TraitNotObjectSafe(object_did));
}
let cause = ObligationCause::new(obligation.cause.span,
obligation.cause.body_id,
ObjectCastObligation(target));
let mut push = |predicate| {
nested.push(Obligation::with_depth(cause.clone(),
obligation.recursion_depth + 1,
predicate));
};
// Create the obligation for casting from T to Trait.
push(data.principal_trait_ref_with_self_ty(tcx, source).to_predicate());
// We can only make objects from sized types.
let mut builtin_bounds = data.bounds.builtin_bounds;
builtin_bounds.insert(ty::BoundSized);
// Create additional obligations for all the various builtin
// bounds attached to the object cast. (In other words, if the
// object type is Foo+Send, this would create an obligation
// for the Send check.)
for bound in &builtin_bounds {
if let Ok(tr) = util::trait_ref_for_builtin_bound(tcx, bound, source) {
push(tr.to_predicate());
} else {
return Err(Unimplemented);
}
}
// Create obligations for the projection predicates.
for bound in data.projection_bounds_with_self_ty(tcx, source) {
push(bound.to_predicate());
}
// If the type is `Foo+'a`, ensures that the type
// being cast to `Foo+'a` outlives `'a`:
let outlives = ty::OutlivesPredicate(source,
data.bounds.region_bound);
push(ty::Binder(outlives).to_predicate());
}
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let origin = infer::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, a, b).is_err() {
return Err(Unimplemented);
}
}
// Struct<T> -> Struct<U>.
(&ty::TyStruct(def_id, substs_a), &ty::TyStruct(_, substs_b)) => {
let fields = tcx.lookup_struct_fields(def_id).iter().map(|f| {
tcx.lookup_field_type_unsubstituted(def_id, f.id)
}).collect::<Vec<_>>();
// The last field of the structure has to exist and contain type parameters.
let field = if let Some(&field) = fields.last() {
field
} else {
return Err(Unimplemented);
};
let mut ty_params = vec![];
for ty in field.walk() {
if let ty::TyParam(p) = ty.sty {
assert!(p.space == TypeSpace);
let idx = p.idx as usize;
if !ty_params.contains(&idx) {
ty_params.push(idx);
}
}
}
if ty_params.is_empty() {
return Err(Unimplemented);
}
// Replace type parameters used in unsizing with
// TyError and ensure they do not affect any other fields.
// This could be checked after type collection for any struct
// with a potentially unsized trailing field.
let mut new_substs = substs_a.clone();
for &i in &ty_params {
new_substs.types.get_mut_slice(TypeSpace)[i] = tcx.types.err;
}
for &ty in fields.split_last().unwrap().1 {
if ty.subst(tcx, &new_substs).references_error() {
return Err(Unimplemented);
}
}
// Extract Field<T> and Field<U> from Struct<T> and Struct<U>.
let inner_source = field.subst(tcx, substs_a);
let inner_target = field.subst(tcx, substs_b);
// Check that the source structure with the target's
// type parameters is a subtype of the target.
for &i in &ty_params {
let param_b = *substs_b.types.get(TypeSpace, i);
new_substs.types.get_mut_slice(TypeSpace)[i] = param_b;
}
let new_struct = tcx.mk_struct(def_id, tcx.mk_substs(new_substs));
let origin = infer::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, new_struct, target).is_err() {
return Err(Unimplemented);
}
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
nested.push(util::predicate_for_trait_def(tcx,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
inner_source,
vec![inner_target]));
}
_ => unreachable!()
};
Ok(VtableBuiltinData { nested: nested })
}
///////////////////////////////////////////////////////////////////////////
// Matching
//
// Matching is a common path used for both evaluation and
// confirmation. It basically unifies types that appear in impls
// and traits. This does affect the surrounding environment;
// therefore, when used during evaluation, match routines must be
// run inside of a `probe()` so that their side-effects are
// contained.
fn rematch_impl(&mut self,
impl_def_id: ast::DefId,
obligation: &TraitObligation<'tcx>,
snapshot: &infer::CombinedSnapshot)
-> (Normalized<'tcx, Substs<'tcx>>, infer::SkolemizationMap)
{
match self.match_impl(impl_def_id, obligation, snapshot) {
Ok((substs, skol_map)) => (substs, skol_map),
Err(()) => {
self.tcx().sess.bug(
&format!("Impl {:?} was matchable against {:?} but now is not",
impl_def_id,
obligation));
}
}
}
fn match_impl(&mut self,
impl_def_id: ast::DefId,
obligation: &TraitObligation<'tcx>,
snapshot: &infer::CombinedSnapshot)
-> Result<(Normalized<'tcx, Substs<'tcx>>,
infer::SkolemizationMap), ()>
{
let impl_trait_ref = self.tcx().impl_trait_ref(impl_def_id).unwrap();
// Before we create the substitutions and everything, first
// consider a "quick reject". This avoids creating more types
// and so forth that we need to.
if self.fast_reject_trait_refs(obligation, &impl_trait_ref) {
return Err(());
}
let (skol_obligation, skol_map) = self.infcx().skolemize_late_bound_regions(
&obligation.predicate,
snapshot);
let skol_obligation_trait_ref = skol_obligation.trait_ref;
let impl_substs = util::fresh_type_vars_for_impl(self.infcx,
obligation.cause.span,
impl_def_id);
let impl_trait_ref = impl_trait_ref.subst(self.tcx(),
&impl_substs);
let impl_trait_ref =
project::normalize_with_depth(self,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref);
debug!("match_impl(impl_def_id={:?}, obligation={:?}, \
impl_trait_ref={:?}, skol_obligation_trait_ref={:?})",
impl_def_id,
obligation,
impl_trait_ref,
skol_obligation_trait_ref);
let origin = infer::RelateOutputImplTypes(obligation.cause.span);
if let Err(e) = self.infcx.sub_trait_refs(false,
origin,
impl_trait_ref.value.clone(),
skol_obligation_trait_ref) {
debug!("match_impl: failed sub_trait_refs due to `{}`", e);
return Err(());
}
if let Err(e) = self.infcx.leak_check(&skol_map, snapshot) {
debug!("match_impl: failed leak check due to `{}`", e);
return Err(());
}
debug!("match_impl: success impl_substs={:?}", impl_substs);
Ok((Normalized {
value: impl_substs,
obligations: impl_trait_ref.obligations
}, skol_map))
}
fn fast_reject_trait_refs(&mut self,
obligation: &TraitObligation,
impl_trait_ref: &ty::TraitRef)
-> bool
{
// We can avoid creating type variables and doing the full
// substitution if we find that any of the input types, when
// simplified, do not match.
obligation.predicate.0.input_types().iter()
.zip(impl_trait_ref.input_types())
.any(|(&obligation_ty, &impl_ty)| {
let simplified_obligation_ty =
fast_reject::simplify_type(self.tcx(), obligation_ty, true);
let simplified_impl_ty =
fast_reject::simplify_type(self.tcx(), impl_ty, false);
simplified_obligation_ty.is_some() &&
simplified_impl_ty.is_some() &&
simplified_obligation_ty != simplified_impl_ty
})
}
/// Normalize `where_clause_trait_ref` and try to match it against
/// `obligation`. If successful, return any predicates that
/// result from the normalization. Normalization is necessary
/// because where-clauses are stored in the parameter environment
/// unnormalized.
fn match_where_clause_trait_ref(&mut self,
obligation: &TraitObligation<'tcx>,
where_clause_trait_ref: ty::PolyTraitRef<'tcx>)
-> Result<Vec<PredicateObligation<'tcx>>,()>
{
try!(self.match_poly_trait_ref(obligation, where_clause_trait_ref));
Ok(Vec::new())
}
/// Returns `Ok` if `poly_trait_ref` being true implies that the
/// obligation is satisfied.
fn match_poly_trait_ref(&self,
obligation: &TraitObligation<'tcx>,
poly_trait_ref: ty::PolyTraitRef<'tcx>)
-> Result<(),()>
{
debug!("match_poly_trait_ref: obligation={:?} poly_trait_ref={:?}",
obligation,
poly_trait_ref);
let origin = infer::RelateOutputImplTypes(obligation.cause.span);
match self.infcx.sub_poly_trait_refs(false,
origin,
poly_trait_ref,
obligation.predicate.to_poly_trait_ref()) {
Ok(()) => Ok(()),
Err(_) => Err(()),
}
}
/// Determines whether the self type declared against
/// `impl_def_id` matches `obligation_self_ty`. If successful,
/// returns the substitutions used to make them match. See
/// `match_impl()`. For example, if `impl_def_id` is declared
/// as:
///
/// impl<T:Copy> Foo for Box<T> { ... }
///
/// and `obligation_self_ty` is `int`, we'd get back an `Err(_)`
/// result. But if `obligation_self_ty` were `Box<int>`, we'd get
/// back `Ok(T=int)`.
fn match_inherent_impl(&mut self,
impl_def_id: ast::DefId,
obligation_cause: &ObligationCause,
obligation_self_ty: Ty<'tcx>)
-> Result<Substs<'tcx>,()>
{
// Create fresh type variables for each type parameter declared
// on the impl etc.
let impl_substs = util::fresh_type_vars_for_impl(self.infcx,
obligation_cause.span,
impl_def_id);
// Find the self type for the impl.
let impl_self_ty = self.tcx().lookup_item_type(impl_def_id).ty;
let impl_self_ty = impl_self_ty.subst(self.tcx(), &impl_substs);
debug!("match_impl_self_types(obligation_self_ty={:?}, impl_self_ty={:?})",
obligation_self_ty,
impl_self_ty);
match self.match_self_types(obligation_cause,
impl_self_ty,
obligation_self_ty) {
Ok(()) => {
debug!("Matched impl_substs={:?}", impl_substs);
Ok(impl_substs)
}
Err(()) => {
debug!("NoMatch");
Err(())
}
}
}
fn match_self_types(&mut self,
cause: &ObligationCause,
// The self type provided by the impl/caller-obligation:
provided_self_ty: Ty<'tcx>,
// The self type the obligation is for:
required_self_ty: Ty<'tcx>)
-> Result<(),()>
{
// FIXME(#5781) -- equating the types is stronger than
// necessary. Should consider variance of trait w/r/t Self.
let origin = infer::RelateSelfType(cause.span);
match self.infcx.eq_types(false,
origin,
provided_self_ty,
required_self_ty) {
Ok(()) => Ok(()),
Err(_) => Err(()),
}
}
///////////////////////////////////////////////////////////////////////////
// Miscellany
fn match_fresh_trait_refs(&self,
previous: &ty::PolyTraitRef<'tcx>,
current: &ty::PolyTraitRef<'tcx>)
-> bool
{
let mut matcher = ty_match::Match::new(self.tcx());
matcher.relate(previous, current).is_ok()
}
fn push_stack<'o,'s:'o>(&mut self,
previous_stack: TraitObligationStackList<'s, 'tcx>,
obligation: &'o TraitObligation<'tcx>)
-> TraitObligationStack<'o, 'tcx>
{
let fresh_trait_ref =
obligation.predicate.to_poly_trait_ref().fold_with(&mut self.freshener);
TraitObligationStack {
obligation: obligation,
fresh_trait_ref: fresh_trait_ref,
previous: previous_stack,
}
}
fn closure_trait_ref_unnormalized(&mut self,
obligation: &TraitObligation<'tcx>,
closure_def_id: ast::DefId,
substs: &ty::ClosureSubsts<'tcx>)
-> ty::PolyTraitRef<'tcx>
{
let closure_type = self.infcx.closure_type(closure_def_id, substs);
let ty::Binder((trait_ref, _)) =
util::closure_trait_ref_and_return_type(self.tcx(),
obligation.predicate.def_id(),
obligation.predicate.0.self_ty(), // (1)
&closure_type.sig,
util::TupleArgumentsFlag::No);
// (1) Feels icky to skip the binder here, but OTOH we know
// that the self-type is an unboxed closure type and hence is
// in fact unparameterized (or at least does not reference any
// regions bound in the obligation). Still probably some
// refactoring could make this nicer.
ty::Binder(trait_ref)
}
fn closure_trait_ref(&mut self,
obligation: &TraitObligation<'tcx>,
closure_def_id: ast::DefId,
substs: &ty::ClosureSubsts<'tcx>)
-> Normalized<'tcx, ty::PolyTraitRef<'tcx>>
{
let trait_ref = self.closure_trait_ref_unnormalized(
obligation, closure_def_id, substs);
// A closure signature can contain associated types which
// must be normalized.
normalize_with_depth(self,
obligation.cause.clone(),
obligation.recursion_depth+1,
&trait_ref)
}
/// Returns the obligations that are implied by instantiating an
/// impl or trait. The obligations are substituted and fully
/// normalized. This is used when confirming an impl or default
/// impl.
fn impl_or_trait_obligations(&mut self,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
def_id: ast::DefId, // of impl or trait
substs: &Substs<'tcx>, // for impl or trait
skol_map: infer::SkolemizationMap,
snapshot: &infer::CombinedSnapshot)
-> Vec<PredicateObligation<'tcx>>
{
debug!("impl_or_trait_obligations(def_id={:?})", def_id);
let predicates = self.tcx().lookup_predicates(def_id);
let predicates = predicates.instantiate(self.tcx(), substs);
let predicates = normalize_with_depth(self, cause.clone(), recursion_depth, &predicates);
let mut predicates = self.infcx().plug_leaks(skol_map, snapshot, &predicates);
let mut obligations =
util::predicates_for_generics(cause,
recursion_depth,
&predicates.value);
obligations.append(&mut predicates.obligations);
obligations
}
#[allow(unused_comparisons)]
fn derived_cause(&self,
obligation: &TraitObligation<'tcx>,
variant: fn(DerivedObligationCause<'tcx>) -> ObligationCauseCode<'tcx>)
-> ObligationCause<'tcx>
{
/*!
* Creates a cause for obligations that are derived from
* `obligation` by a recursive search (e.g., for a builtin
* bound, or eventually a `impl Foo for ..`). If `obligation`
* is itself a derived obligation, this is just a clone, but
* otherwise we create a "derived obligation" cause so as to
* keep track of the original root obligation for error
* reporting.
*/
// NOTE(flaper87): As of now, it keeps track of the whole error
// chain. Ideally, we should have a way to configure this either
// by using -Z verbose or just a CLI argument.
if obligation.recursion_depth >= 0 {
let derived_cause = DerivedObligationCause {
parent_trait_ref: obligation.predicate.to_poly_trait_ref(),
parent_code: Rc::new(obligation.cause.code.clone()),
};
ObligationCause::new(obligation.cause.span,
obligation.cause.body_id,
variant(derived_cause))
} else {
obligation.cause.clone()
}
}
}
impl<'tcx> SelectionCache<'tcx> {
pub fn new() -> SelectionCache<'tcx> {
SelectionCache {
hashmap: RefCell::new(FnvHashMap())
}
}
}
impl<'o,'tcx> TraitObligationStack<'o,'tcx> {
fn list(&'o self) -> TraitObligationStackList<'o,'tcx> {
TraitObligationStackList::with(self)
}
fn iter(&'o self) -> TraitObligationStackList<'o,'tcx> {
self.list()
}
}
#[derive(Copy, Clone)]
struct TraitObligationStackList<'o,'tcx:'o> {
head: Option<&'o TraitObligationStack<'o,'tcx>>
}
impl<'o,'tcx> TraitObligationStackList<'o,'tcx> {
fn empty() -> TraitObligationStackList<'o,'tcx> {
TraitObligationStackList { head: None }
}
fn with(r: &'o TraitObligationStack<'o,'tcx>) -> TraitObligationStackList<'o,'tcx> {
TraitObligationStackList { head: Some(r) }
}
}
impl<'o,'tcx> Iterator for TraitObligationStackList<'o,'tcx>{
type Item = &'o TraitObligationStack<'o,'tcx>;
fn next(&mut self) -> Option<&'o TraitObligationStack<'o,'tcx>> {
match self.head {
Some(o) => {
*self = o.previous;
Some(o)
}
None => None
}
}
}
impl<'o,'tcx> fmt::Debug for TraitObligationStack<'o,'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TraitObligationStack({:?})", self.obligation)
}
}
impl<'tcx> EvaluationResult<'tcx> {
fn may_apply(&self) -> bool {
match *self {
EvaluatedToOk |
EvaluatedToAmbig |
EvaluatedToErr(OutputTypeParameterMismatch(..)) |
EvaluatedToErr(TraitNotObjectSafe(_)) =>
true,
EvaluatedToErr(Unimplemented) =>
false,
}
}
}
impl MethodMatchResult {
pub fn may_apply(&self) -> bool {
match *self {
MethodMatched(_) => true,
MethodAmbiguous(_) => true,
MethodDidNotMatch => false,
}
}
}<|fim▁end|> | |
<|file_name|>ECBDataLoaderScheduler.java<|end_file_name|><|fim▁begin|>package ch.bisi.koukan.job;
import ch.bisi.koukan.provider.XMLExchangeRatesProvider;
import ch.bisi.koukan.repository.DataAccessException;
import ch.bisi.koukan.repository.ExchangeRatesRepository;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
/**
* Executes scheduled tasks for updating the in memory exchange rates
* by querying the European Central Bank endpoints.
*/
@Component
public class ECBDataLoaderScheduler {
private static final Logger logger = LoggerFactory.getLogger(ECBDataLoaderScheduler.class);
private final XMLExchangeRatesProvider xmlExchangeRatesProvider;
private final ExchangeRatesRepository exchangeRatesRepository;
private final URL dailyEndpoint;
private final URL pastDaysEndpoint;
/**
* Instantiates a new {@link ECBDataLoaderScheduler}.
*
* @param xmlExchangeRatesProvider the provider of exchange rates
* @param exchangeRatesRepository the repository
* @param dailyEndpoint the ECB daily endpoint {@link URL}
* @param pastDaysEndpoint the ECB endpoint {@link URL} for retrieving past days data
*/
@Autowired
public ECBDataLoaderScheduler(
@Qualifier("ECBProvider") final XMLExchangeRatesProvider xmlExchangeRatesProvider,
final ExchangeRatesRepository exchangeRatesRepository,
@Qualifier("dailyEndpoint") final URL dailyEndpoint,
@Qualifier("pastDaysEndpoint") final URL pastDaysEndpoint) {
this.xmlExchangeRatesProvider = xmlExchangeRatesProvider;
this.exchangeRatesRepository = exchangeRatesRepository;
this.dailyEndpoint = dailyEndpoint;
this.pastDaysEndpoint = pastDaysEndpoint;
}
/**
* Retrieves the whole exchange rates daily data.
*
* @throws IOException in case of the problems accessing the ECB endpoint<|fim▁hole|> * @throws XMLStreamException in case of problems parsing the ECB XML
* @throws DataAccessException in case of problems accessing the underlying data
*/
@Scheduled(initialDelay = 0, fixedRateString = "${daily.rates.update.rate}")
public void loadDailyData() throws IOException, XMLStreamException, DataAccessException {
try (final InputStream inputStream = dailyEndpoint.openStream()) {
logger.info("Updating ECB daily exchange rates data");
loadData(inputStream);
}
}
/**
* Retrieves the whole exchange rates data for past days.
*
* @throws IOException in case of the problems accessing the ECB endpoint
* @throws XMLStreamException in case of problems parsing the ECB XML
* @throws DataAccessException in case of problems accessing the underlying data
*/
@Scheduled(initialDelay = 0, fixedRateString = "${past.days.rates.update.rate}")
public void loadPastDaysData() throws IOException, XMLStreamException, DataAccessException {
try (final InputStream inputStream = pastDaysEndpoint.openStream()) {
logger.info("Updating ECB exchange rates data for the past 90 days");
loadData(inputStream);
}
}
/**
* Loads exchange rates data from the given {@link InputStream}.
*
* @param inputStream the {@link InputStream}
* @throws XMLStreamException in case of problems parsing the ECB XML
* @throws DataAccessException in case of problems accessing the underlying data
*/
private void loadData(final InputStream inputStream)
throws XMLStreamException, DataAccessException {
final XMLStreamReader xmlStreamReader = XMLInputFactory.newInstance()
.createXMLStreamReader(inputStream);
exchangeRatesRepository.save(xmlExchangeRatesProvider.retrieveAll(xmlStreamReader));
}
}<|fim▁end|> | |
<|file_name|>viewport.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! An adapter which makes widgets scrollable
use gtk::ShadowType;
use gtk::cast::GTK_VIEWPORT;
use gtk::{mod, ffi};
/// GtkViewport — An adapter which makes widgets scrollable
struct_Widget!(Viewport)
impl Viewport {
pub fn new(hadjustment: >k::Adjustment, vadjustment: >k::Adjustment) -> Option<Viewport> {
let tmp_pointer = unsafe { ffi::gtk_viewport_new(hadjustment.get_pointer(), vadjustment.get_pointer()) };
check_pointer!(tmp_pointer, Viewport)
}
pub fn get_shadow_type(&self) -> gtk::ShadowType {
unsafe {
ffi::gtk_viewport_get_shadow_type(GTK_VIEWPORT(self.pointer))<|fim▁hole|>
pub fn set_shadow_type(&mut self, ty: gtk::ShadowType) {
unsafe {
ffi::gtk_viewport_set_shadow_type(GTK_VIEWPORT(self.pointer), ty)
}
}
}
impl_drop!(Viewport)
impl_TraitWidget!(Viewport)
impl gtk::ContainerTrait for Viewport {}
impl gtk::BinTrait for Viewport {}
impl gtk::ScrollableTrait for Viewport {}
impl_widget_events!(Viewport)<|fim▁end|> | }
} |
<|file_name|>1A.py<|end_file_name|><|fim▁begin|>num = input("What is the numerator")
dem = input("What is the denominator")
counta = 2
countb = 2
def math (num,dem):
remainsa = 1
remainsb = 1
remains = remainsa - remainsb
while remains > 0:
a = num / counta
b = dem / countb<|fim▁hole|> if remains =<|fim▁end|> | remainsa = num % counta
remainsb = num % countb
remains = remainsa - remainsb |
<|file_name|>nested.py<|end_file_name|><|fim▁begin|>def g(x):
import random
return int(x * random.random())
def h(x):
return sum(tmap(g, x))
def f(x,y):
return x*y<|fim▁hole|>
x = range(10)
y = range(5)
if __name__ == '__main__':
from pathos.helpers import freeze_support
freeze_support()
from pathos.pools import ProcessPool, ThreadPool
amap = ProcessPool().amap
tmap = ThreadPool().map
print amap(f, [h(x),h(x),h(x),h(x),h(x)], y).get()
def _f(m, g, x, y):
return sum(m(g,x))*y
print amap(_f, [tmap]*len(y), [g]*len(y), [x]*len(y), y).get()
from math import sin, cos
print amap(tmap, [sin,cos], [range(10),range(10)]).get()<|fim▁end|> | |
<|file_name|>ckanutils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanutils
~~~~~~~~~
Provides methods for interacting with a CKAN instance
Examples:
literal blocks::
python example_google.py
Attributes:
CKAN_KEYS (List[str]): available CKAN keyword arguments.
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import requests
import ckanapi
import itertools as it
from os import environ, path as p
from datetime import datetime as dt
from operator import itemgetter
from pprint import pprint
from ckanapi import NotFound, NotAuthorized, ValidationError
from tabutils import process as pr, io, fntools as ft, convert as cv
__version__ = '0.14.9'
__title__ = 'ckanutils'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility library'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CKAN_KEYS = ['hash_table', 'remote', 'api_key', 'ua', 'force', 'quiet']
API_KEY_ENV = 'CKAN_API_KEY'
REMOTE_ENV = 'CKAN_REMOTE_URL'
UA_ENV = 'CKAN_USER_AGENT'
DEF_USER_AGENT = 'ckanutils/%s' % __version__
DEF_HASH_PACK = 'hash-table'
DEF_HASH_RES = 'hash-table.csv'
CHUNKSIZE_ROWS = 10 ** 3
CHUNKSIZE_BYTES = 2 ** 20
ENCODING = 'utf-8'
class CKAN(object):
"""Interacts with a CKAN instance.
Attributes:
force (bool): Force.
verbose (bool): Print debug statements.
quiet (bool): Suppress debug statements.
address (str): CKAN url.
hash_table (str): The hash table package id.
keys (List[str]):
"""
def __init__(self, **kwargs):
"""Initialization method.
Args:
**kwargs: Keyword arguments.
Kwargs:
hash_table (str): The hash table package id.
remote (str): The remote ckan url.
api_key (str): The ckan api key.
ua (str): The user agent.
force (bool): Force (default: True).
quiet (bool): Suppress debug statements (default: False).
Returns:
New instance of :class:`CKAN`
Examples:
>>> CKAN() #doctest: +ELLIPSIS
<ckanutils.CKAN object at 0x...>
"""
default_ua = environ.get(UA_ENV, DEF_USER_AGENT)
def_remote = environ.get(REMOTE_ENV)
def_api_key = environ.get(API_KEY_ENV)
remote = kwargs.get('remote', def_remote)
self.api_key = kwargs.get('api_key', def_api_key)
self.force = kwargs.get('force', True)
self.quiet = kwargs.get('quiet')
self.user_agent = kwargs.get('ua', default_ua)
self.verbose = not self.quiet
self.hash_table = kwargs.get('hash_table', DEF_HASH_PACK)
ckan_kwargs = {'apikey': self.api_key, 'user_agent': self.user_agent}
attr = 'RemoteCKAN' if remote else 'LocalCKAN'
ckan = getattr(ckanapi, attr)(remote, **ckan_kwargs)
self.address = ckan.address
self.package_show = ckan.action.package_show
try:
self.hash_table_pack = self.package_show(id=self.hash_table)
except NotFound:
self.hash_table_pack = None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
self.hash_table_pack = None
else:
raise err
try:
self.hash_table_id = self.hash_table_pack['resources'][0]['id']
except (IndexError, TypeError):
self.hash_table_id = None
# shortcuts
self.datastore_search = ckan.action.datastore_search
self.datastore_create = ckan.action.datastore_create
self.datastore_delete = ckan.action.datastore_delete
self.datastore_upsert = ckan.action.datastore_upsert
self.datastore_search = ckan.action.datastore_search
self.resource_show = ckan.action.resource_show
self.resource_create = ckan.action.resource_create
self.package_create = ckan.action.package_create
self.package_update = ckan.action.package_update
self.package_privatize = ckan.action.bulk_update_private
self.revision_show = ckan.action.revision_show
self.organization_list = ckan.action.organization_list_for_user
self.organization_show = ckan.action.organization_show
self.license_list = ckan.action.license_list
self.group_list = ckan.action.group_list
self.user = ckan.action.get_site_user()
def create_table(self, resource_id, fields, **kwargs):
"""Creates a datastore table for an existing filestore resource.
Args:
resource_id (str): The filestore resource id.
fields (List[dict]): fields/columns and their extra metadata.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
force (bool): Create resource even if read-only.
aliases (List[str]): name(s) for read only alias(es) of the
resource.
primary_key (List[str]): field(s) that represent a unique key.
indexes (List[str]): index(es) on table.
Returns:
dict: The newly created data object.
Raises:
ValidationError: If unable to validate user on ckan site.
NotFound: If unable to find resource.
Examples:
>>> CKAN(quiet=True).create_table('rid', fields=[{'id': 'field', \
'type': 'text'}])
Traceback (most recent call last):
NotFound: Resource `rid` was not found in filestore.
"""
kwargs.setdefault('force', self.force)
kwargs['resource_id'] = resource_id
kwargs['fields'] = fields
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
if self.verbose:
print('Creating table `%s` in datastore...' % resource_id)
try:
return self.datastore_create(**kwargs)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise
def delete_table(self, resource_id, **kwargs):
"""Deletes a datastore table.
Args:
resource_id (str): The datastore resource id.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
force (bool): Delete resource even if read-only.
filters (dict): Filters to apply before deleting, e.g.,
{"name": "fred"}. If missing delete whole table and all
dependent views.
Returns:
dict: Original filters sent if table was found, `None` otherwise.
Raises:
ValidationError: If unable to validate user on ckan site.
Examples:
>>> CKAN(quiet=True).delete_table('rid')
Can't delete. Table `rid` was not found in datastore.
"""
kwargs.setdefault('force', self.force)
kwargs['resource_id'] = resource_id
init_msg = "Can't delete. Table `%s`" % resource_id
err_msg = '%s was not found in datastore.' % init_msg
read_msg = '%s is read only.' % init_msg
if self.verbose:
print('Deleting table `%s` from datastore...' % resource_id)
try:
result = self.datastore_delete(**kwargs)
except NotFound:
print(err_msg)
result = None
except ValidationError as err:
if 'read-only' in err.error_dict:
print(read_msg)
print("Set 'force' to True and try again.")
result = None
elif err.error_dict.get('resource_id') == ['Not found: Resource']:
print(err_msg)
result = None
else:
raise err
return result
def insert_records(self, resource_id, records, **kwargs):
"""Inserts records into a datastore table.
Args:
resource_id (str): The datastore resource id.
records (List[dict]): The records to insert.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
method (str): Insert method. One of ['update, 'insert', 'upsert']
(default: 'insert').
force (bool): Create resource even if read-only.
start (int): Row number to start from (zero indexed).
stop (int): Row number to stop at (zero indexed).
chunksize (int): Number of rows to write at a time.
Returns:
int: Number of records inserted.
Raises:
NotFound: If unable to find the resource.
Examples:
>>> CKAN(quiet=True).insert_records('rid', [{'field': 'value'}])
Traceback (most recent call last):
NotFound: Resource `rid` was not found in filestore.
"""
recoded = pr.json_recode(records)
chunksize = kwargs.pop('chunksize', 0)
start = kwargs.pop('start', 0)
stop = kwargs.pop('stop', None)
kwargs.setdefault('force', self.force)
kwargs.setdefault('method', 'insert')
kwargs['resource_id'] = resource_id
count = 1
for chunk in ft.chunk(recoded, chunksize, start=start, stop=stop):
length = len(chunk)
if self.verbose:
print(
'Adding records %i - %i to resource %s...' % (
count, count + length - 1, resource_id))
kwargs['records'] = chunk
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
self.datastore_upsert(**kwargs)
except requests.exceptions.ConnectionError as err:
if 'Broken pipe' in err.message[1]:
print('Chunksize too large. Try using a smaller chunksize.')
return 0
else:
raise err
except NotFound:
# Keep exception message consistent with the others
raise NotFound(err_msg)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
count += length
return count
def get_hash(self, resource_id):
"""Gets the hash of a datastore table.
Args:
resource_id (str): The datastore resource id.
Returns:
str: The datastore resource hash.
Raises:
NotFound: If `hash_table_id` isn't set or not in datastore.
NotAuthorized: If unable to authorize ckan user.
Examples:
>>> CKAN(hash_table='hash_jhb34rtj34t').get_hash('rid')
Traceback (most recent call last):
NotFound: {u'item': u'package', u'message': u'Package \
`hash_jhb34rtj34t` was not found!'}
"""
if not self.hash_table_pack:
message = 'Package `%s` was not found!' % self.hash_table
raise NotFound({'message': message, 'item': 'package'})
if not self.hash_table_id:
message = 'No resources found in package `%s`!' % self.hash_table
raise NotFound({'message': message, 'item': 'resource'})
kwargs = {
'resource_id': self.hash_table_id,
'filters': {'datastore_id': resource_id},
'fields': 'hash',
'limit': 1
}
err_msg = 'Resource `%s` was not found' % resource_id
alt_msg = 'Hash table `%s` was not found' % self.hash_table_id
try:
result = self.datastore_search(**kwargs)
resource_hash = result['records'][0]['hash']
except NotFound:
message = '%s in datastore!' % alt_msg
raise NotFound({'message': message, 'item': 'datastore'})
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound('%s in filestore.' % err_msg)
else:
raise err
except IndexError:
print('%s in hash table.' % err_msg)
resource_hash = None
if self.verbose:
print('Resource `%s` hash is `%s`.' % (resource_id, resource_hash))
return resource_hash
def fetch_resource(self, resource_id, user_agent=None, stream=True):
"""Fetches a single resource from filestore.
Args:
resource_id (str): The filestore resource id.
Kwargs:
user_agent (str): The user agent.
stream (bool): Stream content (default: True).
Returns:
obj: requests.Response object.
Raises:
NotFound: If unable to find the resource.
NotAuthorized: If access to fetch resource is denied.
Examples:
>>> CKAN(quiet=True).fetch_resource('rid')
Traceback (most recent call last):
NotFound: Resource `rid` was not found in filestore.
"""
user_agent = user_agent or self.user_agent
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
resource = self.resource_show(id=resource_id)
except NotFound:
raise NotFound(err_msg)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
url = resource.get('perma_link') or resource.get('url')
if self.verbose:
print('Downloading url %s...' % url)
headers = {'User-Agent': user_agent}
r = requests.get(url, stream=stream, headers=headers)
err_msg = 'Access to fetch resource %s was denied.' % resource_id
if any('403' in h.headers.get('x-ckan-error', '') for h in r.history):
raise NotAuthorized(err_msg)
elif r.status_code == 401:
raise NotAuthorized(err_msg)
else:
return r
def get_filestore_update_func(self, resource, **kwargs):
"""Returns the function to create or update a single resource on
filestore. To create a resource, you must supply either `url`,
`filepath`, or `fileobj`.
Args:
resource (dict): The resource passed to resource_create.
**kwargs: Keyword arguments that are passed to resource_create.
Kwargs:
url (str): New file url (for file link, requires `format`).
format (str): New file format (for file link, requires `url`).
fileobj (obj): New file like object (for file upload).
filepath (str): New file path (for file upload).
post (bool): Post data using requests instead of ckanapi.
name (str): The resource name.
description (str): The resource description.
hash (str): The resource hash.
Returns:
tuple: (func, args, data)
where func is `requests.post` if `post` option is specified,
`self.resource_create` otherwise. `args` and `data` should be
passed as *args and **kwargs respectively.
See also:
ckanutils._update_filestore
Examples:
>>> ckan = CKAN(quiet=True)
>>> resource = {
... 'name': 'name', 'package_id': 'pid', 'resource_id': 'rid',
... 'description': 'description', 'hash': 'hash'}
>>> kwargs = {'url': 'http://example.com/file', 'format': 'csv'}
>>> res = ckan.get_filestore_update_func(resource, **kwargs)
>>> func, args, kwargs = res
>>> func(*args, **kwargs)
Traceback (most recent call last):
NotFound: Not found
"""
post = kwargs.pop('post', None)
filepath = kwargs.pop('filepath', None)
fileobj = kwargs.pop('fileobj', None)
f = open(filepath, 'rb') if filepath else fileobj
resource.update(kwargs)
if post:
args = ['%s/api/action/resource_create' % self.address]
hdrs = {
'X-CKAN-API-Key': self.api_key, 'User-Agent': self.user_agent}
data = {'data': resource, 'headers': hdrs}
data.update({'files': {'upload': f}}) if f else None
func = requests.post
else:
args = []
resource.update({'upload': f}) if f else None
data = {
k: v for k, v in resource.items() if not isinstance(v, dict)}
func = self.resource_create
return (func, args, data)
def _update_filestore(self, func, *args, **kwargs):
"""Helps create or update a single resource on filestore.
To create a resource, you must supply either `url`, `filepath`, or
`fileobj`.
Args:
func (func): The resource passed to resource_create.
*args: Postional arguments that are passed to `func`
**kwargs: Keyword arguments that are passed to `func`.
Kwargs:
url (str): New file url (for file link).
fileobj (obj): New file like object (for file upload).
filepath (str): New file path (for file upload).
name (str): The resource name.
description (str): The resource description.
hash (str): The resource hash.
Returns:
obj: requests.Response object if `post` option is specified,
ckan resource object otherwise.
See also:
ckanutils.get_filestore_update_func
Examples:
>>> ckan = CKAN(quiet=True)
>>> url = 'http://example.com/file'
>>> resource = {'package_id': 'pid'}
>>> kwargs = {'name': 'name', 'url': url, 'format': 'csv'}
>>> res = ckan.get_filestore_update_func(resource, **kwargs)
>>> ckan._update_filestore(res[0], *res[1], **res[2])
Package `pid` was not found.
>>> resource['resource_id'] = 'rid'
>>> res = ckan.get_filestore_update_func(resource, **kwargs)
>>> ckan._update_filestore(res[0], *res[1], **res[2])
Resource `rid` was not found in filestore.
"""
data = kwargs.get('data', {})
files = kwargs.get('files', {})
resource_id = kwargs.get('resource_id', data.get('resource_id'))
package_id = kwargs.get('package_id', data.get('package_id'))
f = kwargs.get('upload', files.get('upload'))
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
r = func(*args, **kwargs) or {'id': None}
except NotFound:
pck_msg = 'Package `%s` was not found.' % package_id
print(err_msg if resource_id else pck_msg)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
print(err_msg)
r = None
else:
raise err
except requests.exceptions.ConnectionError as err:
if 'Broken pipe' in err.message[1]:
print('File size too large. Try uploading a smaller file.')
r = None
else:
raise err
else:
return r
finally:
f.close() if f else None
def create_resource(self, package_id, **kwargs):
"""Creates a single resource on filestore. You must supply either
`url`, `filepath`, or `fileobj`.
Args:
package_id (str): The filestore package id.
**kwargs: Keyword arguments that are passed to resource_create.
Kwargs:
url (str): New file url (for file link).
filepath (str): New file path (for file upload).
fileobj (obj): New file like object (for file upload).
post (bool): Post data using requests instead of ckanapi.
name (str): The resource name (defaults to the filename).
description (str): The resource description.
hash (str): The resource hash.
Returns:
obj: requests.Response object if `post` option is specified,
ckan resource object otherwise.
Raises:
TypeError: If neither `url`, `filepath`, nor `fileobj` are supplied.
Examples:
>>> ckan = CKAN(quiet=True)
>>> ckan.create_resource('pid')
Traceback (most recent call last):
TypeError: You must specify either a `url`, `filepath`, or `fileobj`
>>> ckan.create_resource('pid', url='http://example.com/file')
Package `pid` was not found.
"""
if not any(map(kwargs.get, ['url', 'filepath', 'fileobj'])):
raise TypeError(
'You must specify either a `url`, `filepath`, or `fileobj`')
path = filter(None, map(kwargs.get, ['url', 'filepath', 'fileobj']))[0]
try:
if 'docs.google.com' in path:
def_name = path.split('gid=')[1].split('&')[0]
else:
def_name = p.basename(path)
except AttributeError:
def_name = None
file_format = 'csv'
else:
# copy/pasted from utils... fix later
if 'format=' in path:
file_format = path.split('format=')[1].split('&')[0]
else:
file_format = p.splitext(path)[1].lstrip('.')
kwargs.setdefault('name', def_name)
# Will get `ckan.logic.ValidationError` if url isn't set
kwargs.setdefault('url', 'http://example.com')
kwargs['format'] = file_format
resource = {'package_id': package_id}
if self.verbose:
print('Creating new resource in package %s...' % package_id)
func, args, data = self.get_filestore_update_func(resource, **kwargs)
return self._update_filestore(func, *args, **data)
def update_filestore(self, resource_id, **kwargs):
"""Updates a single resource on filestore.
Args:
resource_id (str): The filestore resource id.
**kwargs: Keyword arguments that are passed to resource_create.
Kwargs:
url (str): New file url (for file link).
filepath (str): New file path (for file upload).
fileobj (obj): New file like object (for file upload).
post (bool): Post data using requests instead of ckanapi.
name (str): The resource name.
description (str): The resource description.
hash (str): The resource hash.
Returns:
obj: requests.Response object if `post` option is specified,
ckan resource object otherwise.
Examples:
>>> CKAN(quiet=True).update_filestore('rid')
Resource `rid` was not found in filestore.
"""
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
resource = self.resource_show(id=resource_id)
except NotFound:
print(err_msg)
return None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
else:
resource['package_id'] = self.get_package_id(resource_id)
if self.verbose:
print('Updating resource %s...' % resource_id)
f, args, data = self.get_filestore_update_func(resource, **kwargs)
return self._update_filestore(f, *args, **data)
def update_datastore(self, resource_id, filepath, **kwargs):
verbose = not kwargs.get('quiet')
chunk_rows = kwargs.get('chunksize_rows')
primary_key = kwargs.get('primary_key')
content_type = kwargs.get('content_type')
type_cast = kwargs.get('type_cast')
method = 'upsert' if primary_key else 'insert'
keys = ['aliases', 'primary_key', 'indexes']
try:
extension = p.splitext(filepath)[1].split('.')[1]
except (IndexError, AttributeError):
# no file extension given, e.g., a tempfile
extension = cv.ctype2ext(content_type)
try:
reader = io.get_reader(extension)
except TypeError:
print('Error: plugin for extension `%s` not found!' % extension)
return False
else:
records = reader(filepath, **kwargs)
first = records.next()
keys = first.keys()<|fim▁hole|> records = it.chain([first], records)
if type_cast:
records, results = pr.detect_types(records)
types = results['types']
casted_records = pr.type_cast(records, types)
else:
types = [{'id': key, 'type': 'text'} for key in keys]
casted_records = records
if verbose:
print('Parsed types:')
pprint(types)
create_kwargs = {k: v for k, v in kwargs.items() if k in keys}
if not primary_key:
self.delete_table(resource_id)
insert_kwargs = {'chunksize': chunk_rows, 'method': method}
self.create_table(resource_id, types, **create_kwargs)
args = [resource_id, casted_records]
return self.insert_records(*args, **insert_kwargs)
def find_ids(self, packages, **kwargs):
default = {'rid': '', 'pname': ''}
kwargs.update({'method': self.query, 'default': default})
return pr.find(packages, **kwargs)
def get_package_id(self, resource_id):
"""Gets the package id of a single resource on filestore.
Args:
resource_id (str): The filestore resource id.
Returns:
str: The package id.
Examples:
>>> CKAN(quiet=True).get_package_id('rid')
Resource `rid` was not found in filestore.
"""
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
resource = self.resource_show(id=resource_id)
except NotFound:
print(err_msg)
return None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
else:
revision = self.revision_show(id=resource['revision_id'])
return revision['packages'][0]
def create_hash_table(self, verbose=False):
kwargs = {
'resource_id': self.hash_table_id,
'fields': [
{'id': 'datastore_id', 'type': 'text'},
{'id': 'hash', 'type': 'text'}],
'primary_key': 'datastore_id'
}
if verbose:
print('Creating hash table...')
self.create_table(**kwargs)
def update_hash_table(self, resource_id, resource_hash, verbose=False):
records = [{'datastore_id': resource_id, 'hash': resource_hash}]
if verbose:
print('Updating hash table...')
self.insert_records(self.hash_table_id, records, method='upsert')
def get_update_date(self, item):
timestamps = {
'revision_timestamp': 'revision',
'last_modified': 'resource',
'metadata_modified': 'package'
}
for key, value in timestamps.items():
if key in item:
timestamp = item[key]
item_type = value
break
else:
keys = timestamps.keys()
msg = 'None of the following keys found in item: %s' % keys
raise TypeError(msg)
if not timestamp and item_type == 'resource':
# print('Resource timestamp is empty. Querying revision.')
timestamp = self.revision_show(id=item['revision_id'])['timestamp']
return dt.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f')
def filter(self, items, tagged=None, named=None, updated=None):
for i in items:
if i['state'] != 'active':
continue
if updated and updated(self.get_update_date(i)):
yield i
continue
if named and named.lower() in i['name'].lower():
yield i
continue
tags = it.imap(itemgetter('name'), i['tags'])
is_tagged = tagged and 'tags' in i
if is_tagged and any(it.ifilter(lambda t: t == tagged, tags)):
yield i
continue
if not (named or tagged or updated):
yield i
def query(self, packages, **kwargs):
pkwargs = {
'named': kwargs.get('pnamed'),
'tagged': kwargs.get('ptagged')}
rkwargs = {
'named': kwargs.get('rnamed'),
'tagged': kwargs.get('rtagged')}
skwargs = {'key': self.get_update_date, 'reverse': True}
filtered_packages = self.filter(packages, **pkwargs)
for pack in sorted(filtered_packages, **skwargs):
package = self.package_show(id=pack['name'])
resources = self.filter(package['resources'], **rkwargs)
for resource in sorted(resources, **skwargs):
yield {'rid': resource['id'], 'pname': package['name']}<|fim▁end|> | |
<|file_name|>test_OCB.py<|end_file_name|><|fim▁begin|># ===================================================================
#
# Copyright (c) 2014, Legrandin <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
import os
import re
import unittest
from binascii import hexlify
from Cryptodome.Util.py3compat import b, tobytes, bchr, unhexlify
from Cryptodome.Util.strxor import strxor_c
from Cryptodome.Util.number import long_to_bytes
from Cryptodome.SelfTest.st_common import list_test_cases
from Cryptodome.Cipher import AES
from Cryptodome.Hash import SHAKE128
def get_tag_random(tag, length):
return SHAKE128.new(data=tobytes(tag)).read(length)
class OcbTests(unittest.TestCase):
key_128 = get_tag_random("key_128", 16)
nonce_96 = get_tag_random("nonce_128", 12)<|fim▁hole|> pt = get_tag_random("plaintext", 16 * 100)
ct, mac = cipher.encrypt_and_digest(pt)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
pt2 = cipher.decrypt_and_verify(ct, mac)
self.assertEqual(pt, pt2)
def test_nonce(self):
# Nonce is optional
AES.new(self.key_128, AES.MODE_OCB)
cipher = AES.new(self.key_128, AES.MODE_OCB, self.nonce_96)
ct = cipher.encrypt(self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(ct, cipher.encrypt(self.data_128))
def test_nonce_must_be_bytes(self):
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
nonce='test12345678')
def test_nonce_length(self):
# nonce cannot be empty
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=b(""))
# nonce can be up to 15 bytes long
for length in range(1, 16):
AES.new(self.key_128, AES.MODE_OCB, nonce=self.data_128[:length])
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.data_128)
def test_block_size_128(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(cipher.block_size, AES.block_size)
# By default, a 15 bytes long nonce is randomly generated
nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce
nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce
self.assertEqual(len(nonce1), 15)
self.assertNotEqual(nonce1, nonce2)
def test_nonce_attribute(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertEqual(cipher.nonce, self.nonce_96)
# By default, a 15 bytes long nonce is randomly generated
nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce
nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce
self.assertEqual(len(nonce1), 15)
self.assertNotEqual(nonce1, nonce2)
def test_unknown_parameters(self):
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
self.nonce_96, 7)
self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, unknown=7)
# But some are only known by the base cipher
# (e.g. use_aesni consumed by the AES module)
AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96,
use_aesni=False)
def test_null_encryption_decryption(self):
for func in "encrypt", "decrypt":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
result = getattr(cipher, func)(b(""))
self.assertEqual(result, b(""))
def test_either_encrypt_or_decrypt(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt(b("xyz"))
self.assertRaises(TypeError, cipher.decrypt, b("xyz"))
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt(b("xyz"))
self.assertRaises(TypeError, cipher.encrypt, b("xyz"))
def test_data_must_be_bytes(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(TypeError, cipher.encrypt, 'test1234567890-*')
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(TypeError, cipher.decrypt, 'test1234567890-*')
def test_mac_len(self):
# Invalid MAC length
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, mac_len=7)
self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB,
nonce=self.nonce_96, mac_len=16+1)
# Valid MAC length
for mac_len in range(8, 16 + 1):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96,
mac_len=mac_len)
_, mac = cipher.encrypt_and_digest(self.data_128)
self.assertEqual(len(mac), mac_len)
# Default MAC length
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
_, mac = cipher.encrypt_and_digest(self.data_128)
self.assertEqual(len(mac), 16)
def test_invalid_mac(self):
from Cryptodome.Util.strxor import strxor_c
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
ct, mac = cipher.encrypt_and_digest(self.data_128)
invalid_mac = strxor_c(mac, 0x01)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
self.assertRaises(ValueError, cipher.decrypt_and_verify, ct,
invalid_mac)
def test_hex_mac(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
mac_hex = cipher.hexdigest()
self.assertEqual(cipher.digest(), unhexlify(mac_hex))
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.hexverify(mac_hex)
def test_message_chunks(self):
# Validate that both associated data and plaintext/ciphertext
# can be broken up in chunks of arbitrary length
auth_data = get_tag_random("authenticated data", 127)
plaintext = get_tag_random("plaintext", 127)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(auth_data)
ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext)
def break_up(data, chunk_length):
return [data[i:i+chunk_length] for i in range(0, len(data),
chunk_length)]
# Encryption
for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128:
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
for chunk in break_up(auth_data, chunk_length):
cipher.update(chunk)
pt2 = b("")
for chunk in break_up(ciphertext, chunk_length):
pt2 += cipher.decrypt(chunk)
pt2 += cipher.decrypt()
self.assertEqual(plaintext, pt2)
cipher.verify(ref_mac)
# Decryption
for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128:
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
for chunk in break_up(auth_data, chunk_length):
cipher.update(chunk)
ct2 = b("")
for chunk in break_up(plaintext, chunk_length):
ct2 += cipher.encrypt(chunk)
ct2 += cipher.encrypt()
self.assertEqual(ciphertext, ct2)
self.assertEqual(cipher.digest(), ref_mac)
class OcbFSMTests(unittest.TestCase):
key_128 = get_tag_random("key_128", 16)
nonce_96 = get_tag_random("nonce_128", 12)
data_128 = get_tag_random("data_128", 16)
def test_valid_init_encrypt_decrypt_digest_verify(self):
# No authenticated data, fixed plaintext
# Verify path INIT->ENCRYPT->ENCRYPT(NONE)->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
# Verify path INIT->DECRYPT->DECRYPT(NONCE)->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
def test_invalid_init_encrypt_decrypt_digest_verify(self):
# No authenticated data, fixed plaintext
# Verify path INIT->ENCRYPT->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
self.assertRaises(TypeError, cipher.digest)
# Verify path INIT->DECRYPT->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.decrypt(ct)
self.assertRaises(TypeError, cipher.verify)
def test_valid_init_update_digest_verify(self):
# No plaintext, fixed authenticated data
# Verify path INIT->UPDATE->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
mac = cipher.digest()
# Verify path INIT->UPDATE->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.verify(mac)
def test_valid_full_path(self):
# Fixed authenticated data, fixed plaintext
# Verify path INIT->UPDATE->ENCRYPT->ENCRYPT(NONE)->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
# Verify path INIT->UPDATE->DECRYPT->DECRYPT(NONE)->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
def test_invalid_encrypt_after_final(self):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.encrypt(self.data_128)
cipher.encrypt()
self.assertRaises(TypeError, cipher.encrypt, self.data_128)
def test_invalid_decrypt_after_final(self):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
cipher.update(self.data_128)
cipher.decrypt(self.data_128)
cipher.decrypt()
self.assertRaises(TypeError, cipher.decrypt, self.data_128)
def test_valid_init_digest(self):
# Verify path INIT->DIGEST
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.digest()
def test_valid_init_verify(self):
# Verify path INIT->VERIFY
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
mac = cipher.digest()
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.verify(mac)
def test_valid_multiple_encrypt_or_decrypt(self):
for method_name in "encrypt", "decrypt":
for auth_data in (None, b("333"), self.data_128,
self.data_128 + b("3")):
if auth_data is None:
assoc_len = None
else:
assoc_len = len(auth_data)
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
if auth_data is not None:
cipher.update(auth_data)
method = getattr(cipher, method_name)
method(self.data_128)
method(self.data_128)
method(self.data_128)
method(self.data_128)
method()
def test_valid_multiple_digest_or_verify(self):
# Multiple calls to digest
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
first_mac = cipher.digest()
for x in range(4):
self.assertEqual(first_mac, cipher.digest())
# Multiple calls to verify
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
for x in range(5):
cipher.verify(first_mac)
def test_valid_encrypt_and_digest_decrypt_and_verify(self):
# encrypt_and_digest
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
ct, mac = cipher.encrypt_and_digest(self.data_128)
# decrypt_and_verify
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.update(self.data_128)
pt = cipher.decrypt_and_verify(ct, mac)
self.assertEqual(self.data_128, pt)
def test_invalid_mixing_encrypt_decrypt(self):
# Once per method, with or without assoc. data
for method1_name, method2_name in (("encrypt", "decrypt"),
("decrypt", "encrypt")):
for assoc_data_present in (True, False):
cipher = AES.new(self.key_128, AES.MODE_OCB,
nonce=self.nonce_96)
if assoc_data_present:
cipher.update(self.data_128)
getattr(cipher, method1_name)(self.data_128)
self.assertRaises(TypeError, getattr(cipher, method2_name),
self.data_128)
def test_invalid_encrypt_or_update_after_digest(self):
for method_name in "encrypt", "update":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt(self.data_128)
cipher.encrypt()
cipher.digest()
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.encrypt_and_digest(self.data_128)
def test_invalid_decrypt_or_update_after_verify(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
ct = cipher.encrypt(self.data_128)
ct += cipher.encrypt()
mac = cipher.digest()
for method_name in "decrypt", "update":
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt(ct)
cipher.decrypt()
cipher.verify(mac)
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96)
cipher.decrypt_and_verify(ct, mac)
self.assertRaises(TypeError, getattr(cipher, method_name),
self.data_128)
class OcbRfc7253Test(unittest.TestCase):
# Tuple with
# - nonce
# - authenticated data
# - plaintext
# - ciphertext and 16 byte MAC tag
tv1_key = "000102030405060708090A0B0C0D0E0F"
tv1 = (
(
"BBAA99887766554433221100",
"",
"",
"785407BFFFC8AD9EDCC5520AC9111EE6"
),
(
"BBAA99887766554433221101",
"0001020304050607",
"0001020304050607",
"6820B3657B6F615A5725BDA0D3B4EB3A257C9AF1F8F03009"
),
(
"BBAA99887766554433221102",
"0001020304050607",
"",
"81017F8203F081277152FADE694A0A00"
),
(
"BBAA99887766554433221103",
"",
"0001020304050607",
"45DD69F8F5AAE72414054CD1F35D82760B2CD00D2F99BFA9"
),
(
"BBAA99887766554433221104",
"000102030405060708090A0B0C0D0E0F",
"000102030405060708090A0B0C0D0E0F",
"571D535B60B277188BE5147170A9A22C3AD7A4FF3835B8C5"
"701C1CCEC8FC3358"
),
(
"BBAA99887766554433221105",
"000102030405060708090A0B0C0D0E0F",
"",
"8CF761B6902EF764462AD86498CA6B97"
),
(
"BBAA99887766554433221106",
"",
"000102030405060708090A0B0C0D0E0F",
"5CE88EC2E0692706A915C00AEB8B2396F40E1C743F52436B"
"DF06D8FA1ECA343D"
),
(
"BBAA99887766554433221107",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"1CA2207308C87C010756104D8840CE1952F09673A448A122"
"C92C62241051F57356D7F3C90BB0E07F"
),
(
"BBAA99887766554433221108",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"",
"6DC225A071FC1B9F7C69F93B0F1E10DE"
),
(
"BBAA99887766554433221109",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"221BD0DE7FA6FE993ECCD769460A0AF2D6CDED0C395B1C3C"
"E725F32494B9F914D85C0B1EB38357FF"
),
(
"BBAA9988776655443322110A",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"BD6F6C496201C69296C11EFD138A467ABD3C707924B964DE"
"AFFC40319AF5A48540FBBA186C5553C68AD9F592A79A4240"
),
(
"BBAA9988776655443322110B",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"",
"FE80690BEE8A485D11F32965BC9D2A32"
),
(
"BBAA9988776655443322110C",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F",
"2942BFC773BDA23CABC6ACFD9BFD5835BD300F0973792EF4"
"6040C53F1432BCDFB5E1DDE3BC18A5F840B52E653444D5DF"
),
(
"BBAA9988776655443322110D",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"D5CA91748410C1751FF8A2F618255B68A0A12E093FF45460"
"6E59F9C1D0DDC54B65E8628E568BAD7AED07BA06A4A69483"
"A7035490C5769E60"
),
(
"BBAA9988776655443322110E",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"",
"C5CD9D1850C141E358649994EE701B68"
),
(
"BBAA9988776655443322110F",
"",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"4412923493C57D5DE0D700F753CCE0D1D2D95060122E9F15"
"A5DDBFC5787E50B5CC55EE507BCB084E479AD363AC366B95"
"A98CA5F3000B1479"
)
)
# Tuple with
# - key
# - nonce
# - authenticated data
# - plaintext
# - ciphertext and 12 byte MAC tag
tv2 = (
"0F0E0D0C0B0A09080706050403020100",
"BBAA9988776655443322110D",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"000102030405060708090A0B0C0D0E0F1011121314151617"
"18191A1B1C1D1E1F2021222324252627",
"1792A4E31E0755FB03E31B22116E6C2DDF9EFD6E33D536F1"
"A0124B0A55BAE884ED93481529C76B6AD0C515F4D1CDD4FD"
"AC4F02AA"
)
# Tuple with
# - key length
# - MAC tag length
# - Expected output
tv3 = (
(128, 128, "67E944D23256C5E0B6C61FA22FDF1EA2"),
(192, 128, "F673F2C3E7174AAE7BAE986CA9F29E17"),
(256, 128, "D90EB8E9C977C88B79DD793D7FFA161C"),
(128, 96, "77A3D8E73589158D25D01209"),
(192, 96, "05D56EAD2752C86BE6932C5E"),
(256, 96, "5458359AC23B0CBA9E6330DD"),
(128, 64, "192C9B7BD90BA06A"),
(192, 64, "0066BC6E0EF34E24"),
(256, 64, "7D4EA5D445501CBE"),
)
def test1(self):
key = unhexlify(b(self.tv1_key))
for tv in self.tv1:
nonce, aad, pt, ct = [ unhexlify(b(x)) for x in tv ]
ct, mac_tag = ct[:-16], ct[-16:]
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce)
cipher.update(aad)
ct2 = cipher.encrypt(pt) + cipher.encrypt()
self.assertEqual(ct, ct2)
self.assertEqual(mac_tag, cipher.digest())
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce)
cipher.update(aad)
pt2 = cipher.decrypt(ct) + cipher.decrypt()
self.assertEqual(pt, pt2)
cipher.verify(mac_tag)
def test2(self):
key, nonce, aad, pt, ct = [ unhexlify(b(x)) for x in self.tv2 ]
ct, mac_tag = ct[:-12], ct[-12:]
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12)
cipher.update(aad)
ct2 = cipher.encrypt(pt) + cipher.encrypt()
self.assertEqual(ct, ct2)
self.assertEqual(mac_tag, cipher.digest())
cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12)
cipher.update(aad)
pt2 = cipher.decrypt(ct) + cipher.decrypt()
self.assertEqual(pt, pt2)
cipher.verify(mac_tag)
def test3(self):
for keylen, taglen, result in self.tv3:
key = bchr(0) * (keylen // 8 - 1) + bchr(taglen)
C = b("")
for i in range(128):
S = bchr(0) * i
N = long_to_bytes(3 * i + 1, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(S)
C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest()
N = long_to_bytes(3 * i + 2, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest()
N = long_to_bytes(3 * i + 3, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(S)
C += cipher.encrypt() + cipher.digest()
N = long_to_bytes(385, 12)
cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8)
cipher.update(C)
result2 = cipher.encrypt() + cipher.digest()
self.assertEqual(unhexlify(b(result)), result2)
def get_tests(config={}):
tests = []
tests += list_test_cases(OcbTests)
tests += list_test_cases(OcbFSMTests)
tests += list_test_cases(OcbRfc7253Test)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')<|fim▁end|> | data_128 = get_tag_random("data_128", 16)
def test_loopback_128(self):
cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) |
<|file_name|>marathon.py<|end_file_name|><|fim▁begin|>import contextlib
import pytest
import logging
from distutils.version import LooseVersion
from .service import service_available_predicate
from ..clients import marathon
from ..matcher import assert_that, eventually, has_len
logger = logging.getLogger(__name__)
marathon_1_3 = pytest.mark.skipif('marathon_version_less_than("1.3")')
marathon_1_4 = pytest.mark.skipif('marathon_version_less_than("1.4")')
marathon_1_5 = pytest.mark.skipif('marathon_version_less_than("1.5")')
def marathon_version(client=None):
client = client or marathon.create_client()
about = client.get_about()
# 1.3.9 or 1.4.0-RC8
return LooseVersion(about.get("version"))
def marathon_version_less_than(version):
return marathon_version() < LooseVersion(version)
def mom_version(name='marathon-user'):
"""Returns the version of marathon on marathon.
"""
if service_available_predicate(name):
with marathon_on_marathon(name) as client:
return marathon_version(client)
else:
# We can either skip the corresponding test by returning False
# or raise an exception.
logger.warning('{} MoM not found. Version is None'.format(name))
return None
def mom_version_less_than(version, name='marathon-user'):
""" Returns True if MoM with the given {name} exists and has a version less
than {version}. Note that if MoM does not exist False is returned.
:param version: required version
:type: string
:param name: MoM name, default is 'marathon-user'
:type: string
:return: True if version < MoM version
:rtype: bool
"""
if service_available_predicate(name):
return mom_version() < LooseVersion(version)
else:
# We can either skip the corresponding test by returning False
# or raise an exception.
logger.warning('{} MoM not found. mom_version_less_than({}) is False'.format(name, version))
return False
def deployment_predicate(app_id=None):
return len(marathon.create_client().get_deployments(app_id)) == 0
def delete_app(app_id, force=True):
marathon.create_client().remove_app(app_id, force=force)
def delete_app_wait(app_id, force=True):
delete_app(app_id, force)
deployment_wait(service=app_id)<|fim▁hole|>
def delete_all_apps(force=True, client=None):
client = client or marathon.create_client()
client.remove_group("/", force=force)
def delete_all_apps_wait(force=True):
delete_all_apps(force=force)
deployment_wait()
def is_app_healthy(app_id):
app = marathon.create_client().get_app(app_id)
if app["healthChecks"]:
return app["tasksHealthy"] == app["instances"]
else:
return app["tasksRunning"] == app["instances"]
@contextlib.contextmanager
def marathon_on_marathon(name='marathon-user'):
""" Context manager for altering the marathon client for MoM
:param name: service name of MoM to use
:type name: str
"""
client = marathon.create_client(name)
yield client
def deployments_for(service_id=None, deployment_id=None):
deployments = marathon.create_client().get_deployments()
if deployment_id:
filtered = [
deployment for deployment in deployments
if deployment_id == deployment["id"]
]
return filtered
elif service_id:
filtered = [
deployment for deployment in deployments
if service_id in deployment['affectedApps'] or service_id in deployment['affectedPods']
]
return filtered
else:
return deployments
def deployment_wait(service_id=None, deployment_id=None, wait_fixed=2000, max_attempts=60):
""" Wait for a specific app/pod to deploy successfully. If no app/pod Id passed, wait for all
current deployments to succeed. This inner matcher will retry fetching deployments
after `wait_fixed` milliseconds but give up after `max_attempts` tries.
"""
assert not all([service_id, deployment_id]), "Use either deployment_id or service_id, but not both."
if deployment_id:
logger.info("Waiting for the deployment_id {} to finish".format(deployment_id))
elif service_id:
logger.info('Waiting for {} to deploy successfully'.format(service_id))
else:
logger.info('Waiting for all current deployments to finish')
assert_that(lambda: deployments_for(service_id, deployment_id),
eventually(has_len(0), wait_fixed=wait_fixed, max_attempts=max_attempts))<|fim▁end|> | |
<|file_name|>Registry.java<|end_file_name|><|fim▁begin|>package crashreporter.api;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Registry for API provider objects.
*
* @author Richard
*/
public class Registry {
private static final Map<String, PastebinProvider> pastebinProviders = new HashMap<String, PastebinProvider>();
private static final Map<String, Class<? extends NotificationProvider>> notificationProviders = new HashMap<String, Class<? extends NotificationProvider>>();
/**
* Register a {@link PastebinProvider}.
*
* @param id ID name for the provider, used in the config file
* @param provider The provider
*/
public static void registerPastebinProvider(String id, PastebinProvider provider) {
if (pastebinProviders.containsKey(id)) throw new IllegalArgumentException("Pastebin provider " + id + " already registered by " + pastebinProviders.get(id) + " when registering " + provider);
pastebinProviders.put(id, provider);
}
/**
* Get a {@link PastebinProvider} by its ID.
*
* @param id ID name for the provider
* @return The provider, or null if there is no such provider
*/
public static PastebinProvider getPastebinProvider(String id) {
return pastebinProviders.get(id);
}
/**
* Get a list of {@link PastebinProvider}s, the first one being the user's preferred pastebin.
*
* @return List of providers
*/
public static List<PastebinProvider> getPastebinProviders() {
List<PastebinProvider> providers = new ArrayList<PastebinProvider>(pastebinProviders.size());
<|fim▁hole|> providers.addAll(pastebinProviders.values());
return providers;
}
/**
* Get a map of all {@link PastebinProvider}s, in no particular order.
*
* @return Map of providers
*/
public static Map<String, PastebinProvider> getAllPastebinProviders() {
return Collections.unmodifiableMap(pastebinProviders);
}
/**
* Register a {@link NotificationProvider} class.
*
* @param id ID name for the provider, used in the config file
* @param provider The provider class
*/
public static void registerNotificationProvider(String id, Class<? extends NotificationProvider> provider) {
if (notificationProviders.containsKey(id)) throw new IllegalArgumentException("Notification provider " + id + " already registered by " + notificationProviders.get(id) + " when registering " + provider);
notificationProviders.put(id, provider);
}
/**
* Get a {@link NotificationProvider} class by its ID.
*
* @param id ID name for the provider class
* @return The provider class, or null if there is no such provider
*/
public static Class<? extends NotificationProvider> getNotificationProvider(String id) {
return notificationProviders.get(id);
}
/**
* Get a list of {@link NotificationProvider} classes.
*
* @return List of provider classes
* @see CallHandler#getActiveNotificationProviders()
*/
public static List<Class<? extends NotificationProvider>> getNotificationProviders() {
List<Class<? extends NotificationProvider>> providers = new ArrayList<Class<? extends NotificationProvider>>(notificationProviders.size());
providers.addAll(notificationProviders.values());
return providers;
}
/**
* Get a map of all {@link NotificationProvider} classes.
*
* @return Map of provider classes
* @see CallHandler#getActiveNotificationProviders()
*/
public static Map<String, Class<? extends NotificationProvider>> getAllNotificationProviders() {
return Collections.unmodifiableMap(notificationProviders);
}
}<|fim▁end|> | // first the preferred one
PastebinProvider preferred = CallHandler.instance.getPastebin();
if (preferred != null) providers.add(preferred);
// then the rest |
<|file_name|>production.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
from .base import *
DEBUG = False
TEMPLATE_DEBUG = False
COMPRESS_OFFLINE = True
AWS_STORAGE_BUCKET_NAME = get_env_variable("AWS_STORAGE_BUCKET_NAME")
AWS_S3_CUSTOM_DOMAIN = '{}.s3.amazonaws.com'.format(AWS_STORAGE_BUCKET_NAME)
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'<|fim▁hole|>STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = 'https://{}.s3.amazonaws.com/'.format(AWS_STORAGE_BUCKET_NAME)
STATIC_URL = 'https://{}.s3.amazonaws.com/'.format(AWS_STORAGE_BUCKET_NAME)
MEDIA_URL = 'https://{}.s3.amazonaws.com/'.format(AWS_STORAGE_BUCKET_NAME)
RAVEN_CONFIG = {
'dsn': get_env_variable('RAVEN_DSN'),
}
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
FAVICON_PATH = STATIC_URL + 'img/favicon.png'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env_variable('DB_NAME'),
'USER': get_env_variable('DB_USER'),
'PASSWORD': get_env_variable('DB_PASSWORD'),
'HOST': get_env_variable('DB_HOST'),
'PORT': get_env_variable('DB_PORT'),
'CONN_MAX_AGE': 600,
}
}
try:
from .local import *
except ImportError:
pass<|fim▁end|> | |
<|file_name|>interfaces.py<|end_file_name|><|fim▁begin|>from django.utils.safestring import mark_safe
from corehq.apps.data_interfaces.dispatcher import EditDataInterfaceDispatcher
from corehq.apps.groups.models import Group
from django.core.urlresolvers import reverse
from corehq.apps.reports import util
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, DTSortType
from corehq.apps.reports.generic import GenericReportView
from corehq.apps.reports.models import HQUserType
from corehq.apps.reports.standard.cases.basic import CaseListMixin
from corehq.apps.reports.standard.cases.data_sources import CaseDisplay
from dimagi.utils.decorators.memoized import memoized
from django.utils.translation import ugettext_noop
from django.utils.translation import ugettext as _
class DataInterface(GenericReportView):
# overriding properties from GenericReportView
section_name = ugettext_noop("Data")
base_template = "reports/standard/base_template.html"
asynchronous = True
dispatcher = EditDataInterfaceDispatcher
exportable = False
@property
def default_report_url(self):
return reverse('data_interfaces_default', args=[self.request.project])
class CaseReassignmentInterface(CaseListMixin, DataInterface):
name = ugettext_noop("Reassign Cases")
slug = "reassign_cases"
report_template_path = 'data_interfaces/interfaces/case_management.html'
asynchronous = False
ajax_pagination = True
@property
@memoized
def all_case_sharing_groups(self):
return Group.get_case_sharing_groups(self.domain)
@property
def headers(self):
headers = DataTablesHeader(
DataTablesColumn(mark_safe('Select <a href="#" class="select-all btn btn-mini btn-inverse">all</a> <a href="#" class="select-none btn btn-mini btn-warning">none</a>'), sortable=False, span=2),
DataTablesColumn(_("Case Name"), span=3, prop_name="name.exact"),
DataTablesColumn(_("Case Type"), span=2, prop_name="type.exact"),
DataTablesColumn(_("Owner"), span=2, prop_name="owner_display", sortable=False),
DataTablesColumn(_("Last Modified"), span=3, prop_name="modified_on"),
)
return headers
@property
def rows(self):
checkbox = mark_safe('<input type="checkbox" class="selected-commcare-case" data-bind="event: {change: updateCaseSelection}" data-caseid="%(case_id)s" data-owner="%(owner)s" data-ownertype="%(owner_type)s" />')
for row in self.es_results['hits'].get('hits', []):
case = self.get_case(row)<|fim▁hole|> display.case_type,
display.owner_display,
util.format_relative_date(display.parse_date(display.case['modified_on']))['html'],
]
@property
def report_context(self):
context = super(CaseReassignmentInterface, self).report_context
active_users = self.get_all_users_by_domain(user_filter=tuple(HQUserType.use_defaults()), simplified=True)
context.update(
users=[dict(ownerid=user.get('user_id'), name=user.get('username_in_report'), type="user")
for user in active_users],
groups=[dict(ownerid=group.get_id, name=group.name, type="group")
for group in self.all_case_sharing_groups],
user_ids=self.user_ids,
)
return context<|fim▁end|> | display = CaseDisplay(self, case)
yield [
checkbox % dict(case_id=case['_id'], owner=display.owner_id, owner_type=display.owner_type),
display.case_link, |
<|file_name|>win_reward.py<|end_file_name|><|fim▁begin|># This file is part of ArcJail.
#
# ArcJail is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ArcJail is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ArcJail. If not, see <http://www.gnu.org/licenses/>.
from listeners.tick import Delay
from controlled_cvars.handlers import float_handler
from ...resource.strings import build_module_strings
from ..damage_hook import get_hook, protected_player_manager
from ..players import player_manager
from .. import build_module_config
from .base_classes.jail_game import JailGame
from . import game_event_handler, stage
strings_module = build_module_strings('lrs/win_reward')
config_manager = build_module_config('lrs/win_reward')
config_manager.controlled_cvar(
float_handler,
"duration",
default=10,
description="Duration of Win Reward"
)
config_manager.controlled_cvar(
float_handler,
"loser_speed",
default=0.5,
description="Loser's speed"
)
class WinReward(JailGame):
caption = "Win Reward"
stage_groups = {
'winreward-start': [
"equip-damage-hooks",
"set-start-status",
"winreward-entry",
],
'winreward-timed-out': ["winreward-timed-out", ],
}
def __init__(self, players, **kwargs):
super().__init__(players, **kwargs)
self._counters = {}
self._results = {
'winner': kwargs['winner'],
'loser': kwargs['loser'],
}
@stage('basegame-entry')
def stage_basegame_entry(self):
self.set_stage_group('winreward-start')
@stage('equip-damage-hooks')
def stage_equip_damage_hooks(self):
winner, loser = self._results['winner'], self._results['loser']
def hook_hurt_for_loser(counter, info):
return info.attacker == winner.index
for player in self._players:
p_player = protected_player_manager[player.index]
counter = self._counters[player.index] = p_player.new_counter()
<|fim▁hole|> if player == winner:
counter.hook_hurt = get_hook('SW')
else:
counter.hook_hurt = hook_hurt_for_loser
p_player.set_protected()
@stage('undo-equip-damage-hooks')
def stage_undo_equip_damage_hooks(self):
for player in self._players_all:
p_player = protected_player_manager[player.index]
p_player.delete_counter(self._counters[player.index])
p_player.unset_protected()
@stage('winreward-entry')
def stage_winreward_entry(self):
winner, loser = self._results['winner'], self._results['loser']
loser.speed = config_manager['loser_speed']
def timeout_callback():
self.set_stage_group('winreward-timed-out')
self._delays.append(
Delay(config_manager['duration'], timeout_callback))
@stage('winreward-timed-out')
def stage_wireward_timed_out(self):
winner, loser = self._results['winner'], self._results['loser']
loser.take_damage(loser.health, attacker_index=winner.index)
@game_event_handler('jailgame-player-death', 'player_death')
def event_jailgame_player_death(self, game_event):
player = player_manager.get_by_userid(game_event['userid'])
if player not in self._players:
return
self._players.remove(player)
winner, loser = self._results['winner'], self._results['loser']
if player == winner:
loser.take_damage(loser.health + 1, attacker_index=winner.index)
self.set_stage_group('destroy')<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.db import models
from django.apps import apps
from empresa.models import Empresa
import json
import os
import tempfile
import datetime
import requests
class Parking(models.Model):
empresa = models.OneToOneField(Empresa)
nombre = models.CharField(max_length=40)
plazas = models.IntegerField()
def __unicode__(self):
return "{} ({})".format(self.nombre, self.empresa)
def tupla_tarifa(self):
"Obtener un tarifario dada una recta definida por puntos"
# creamos una lista de listas
lista = map(list, self.tarifa_set.values_list('precio', 'hora'))
# agregamos el rango final de tiempo sacado de la siguiente linea
n = len(lista)
for i in range(n-1):
lista[i].append(lista[i+1][1])
# el rango final ponemos que es 24h
lista[n-1].append(datetime.timedelta(days=1))
# devolvemos [precio, hora_start, hora_end_no_inclusive]
return lista
def tabla_tarifa(self):
"Tarifario con hh:mm para visualizar"
for precio, min0, min1 in self.tupla_tarifa():
t = min1 - datetime.timedelta(seconds=1)
yield min0, t, precio
def get_dia(self):
return float(self.tarifa_set.last().precio)
def get_tarifa(self, td):
"Obtener una tarifa del tarifario"
# calculo de dias completos
precio_dias = td.days * self.get_dia()
# calculo de la fraccion de dia
td = datetime.timedelta(seconds=td.seconds)
for precio, min0, min1 in self.tupla_tarifa():
if min0 <= td < min1:
return precio_dias + float(precio)
def barreras_entrada(self):
return self.barrera_set.filter(entrada=True)
def barreras_salida(self):
return self.barrera_set.filter(entrada=False)
def nodos_remotos(self):
return self.nodoremoto_set.all()
@property
def entrada_set(self):
Entrada = apps.get_model('tickets.Entrada')
return Entrada.objects.por_parking(self)
@property
def coches_hoy(self):
return self.entrada_set.de_hoy().count()
@property
def coches_dentro(self):
return self.entrada_set.de_hoy().dentro().count()
class Expendedor(models.Model):
parking = models.ForeignKey(Parking)
nombre = models.CharField(max_length=40)
mac = models.CharField(max_length=17)
camera_command = models.CharField(max_length=255, blank=True, null=True, help_text="Comando para la camara, "
"con {} donde queramos poner el output filename")
def saca_foto(self):
contenido = None
if self.camera_command:
filename = tempfile.mktemp()
ret = os.system(self.camera_command.format(filename))
if ret == 0:
contenido = open(filename).read()
if os.path.isfile(filename):
os.unlink(filename)
return contenido
def __unicode__(self):
return "{} de {}".format(self.nombre, self.parking.nombre)
class Meta:
verbose_name = 'expendedor'
verbose_name_plural = 'expendedores'
class Barrera(models.Model):
parking = models.ForeignKey(Parking)
nombre = models.CharField(max_length=40)
slug = models.CharField(max_length=40, unique=True)
entrada = models.BooleanField()
abre_url = models.URLField(max_length=100, blank=True, null=True, help_text="si hay url es que esta activo")
abre_post = models.CharField(max_length=100, blank=True, null=True, help_text="post data en formato json")
abresiempre_url = models.URLField(max_length=100, blank=True, null=True, help_text="si hay url es que esta activo")
abresiempre_post = models.CharField(max_length=100, blank=True, null=True, help_text="post data en formato json")
cierra_url = models.URLField(max_length=100, blank=True, null=True, help_text="si hay url es que esta activo")
cierra_post = models.CharField(max_length=100, blank=True, null=True, help_text="post data en formato json")
def abre(self):
if self.abre_post:
r = requests.post(self.abre_url, data=json.loads(self.abre_post))
else:
r = requests.get(self.abre_url)
return r.status_code == 200
def abresiempre(self):
if self.abresiempre_post:
r = requests.post(self.abresiempre_url, data=json.loads(self.abresiempre_post))
else:
r = requests.get(self.abresiempre_url)
return r.status_code == 200
def cierra(self):
if self.cierra_post:
r = requests.post(self.cierra_url, data=json.loads(self.cierra_post))
else:
r = requests.get(self.cierra_url)
return r.status_code == 200
def __unicode__(self):
return "{} ({} de {})".format(self.slug, "entrada" if self.entrada else "salida", self.parking.nombre)
class Meta:
verbose_name = 'barrera'
verbose_name_plural = 'barreras'
class Tarifa(models.Model):
parking = models.ForeignKey(Parking)
precio = models.DecimalField(max_digits=5, decimal_places=2)
hora = models.DurationField(help_text="hora a partir de la cual aplica este precio")
def __unicode__(self):
return "{} = {:.2f} €".format(self.hora, self.precio)
class Meta:
ordering = ('hora', )
class NodoRemoto(models.Model):
parking = models.ForeignKey(Parking)
host_name = models.CharField(max_length = 100, blank = True, null = True, help_text = 'Nombre del Host')
url = models.CharField(max_length = 100, blank=True, null=True, help_text = ' url del demonio nameko' )
nombre = models.CharField(max_length=100, blank=True, null=True, help_text = 'Nombre del demonio nameko')
def __unicode__(self):
return "{} [{}]".format(self.nombre, self.url)
def comandos(self):
return self.comandoremoto_set.all()
class Meta:
verbose_name = 'Nodo Remoto'
verbose_name_plural = 'Nodos Remotos'
class ComandoRemoto(models.Model):
nombre = models.CharField(max_length = 100, blank=True, null=True, help_text = 'nombre del comando')
comando = models.CharField(max_length = 100, blank=True, null=True, help_text= 'comando')
nodoremoto = models.ForeignKey(NodoRemoto)
def __unicode__(self):
return "{}: {}.{}()".format(self.nombre, self.nodoremoto, self.comando)
class Meta:
verbose_name = 'comando Remoto'
verbose_name_plural = 'Comandos Remotos'
# from django.db.models.signals import pre_save
# from django.dispatch import receiver
# @receiver(pre_save, sender=Tarifa)<|fim▁hole|># def anula_date(sender, instance, using, **kwargs):
# if isinstance(instance, datetime.datetime):
# instance.hora = instance.hora.replace(year=1970, month=1, day=1)
class Visor(models.Model):
url = models.URLField(default="http://192.168.1.1:8000")
descripcion = models.CharField(default="visor colocado en ...", max_length=200)
parking = models.ForeignKey(Parking)
def mostrar_importe(self, importe):
imprte_str = "{:.2f}".format(importe)
# print("importe " + imprte_str)
try:
r = requests.post(self.url, json={"importe": importe})
except:
return False
r = requests.post(self.url, json={"importe": importe})
return r.status_code == 200
def __str__(self):
return self.descripcion
class Meta:
verbose_name_plural = 'Visores'<|fim▁end|> | |
<|file_name|>weir.py<|end_file_name|><|fim▁begin|>import math
from .link import Link
from nimbus.reports import report as rp
from nimbus.reports import input as inp
from nimbus.network.links.sections import circle as cir
from nimbus.network.links.sections import rectangle as rct
class Weir(Link):
def __init__(self, name=None, section=None, orif_coef=None, weir_coef=None, invert=None, node1=None, node2=None):
super(Weir, self).__init__(name, node1, node2, section)
self.orif_coef = orif_coef
self.weir_coef = weir_coef
self.invert = invert
self.report = inp.InputReport(self)
<|fim▁hole|> if stage1 > stage2: # stage 1 higher
if stage1 > crown: # orifice flow
if stage2 < self.invert: # free flow
eff_head = stage1 - center
else: # submerged flow
eff_head = stage1 - stage2
area = self.section.get_flow_area(self.section.rise)
flow = self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage1 > self.invert: # weir flow
eff_head = stage1 - self.invert
flow = self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
if stage2 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage2 / stage1, 1.5), 0.385)
else:
flow = 0.0
else: # stage 2 higher
if stage2 > crown: # orifice flow
if stage1 < self.invert: # free flow
eff_head = stage2 - center
else: # submerged flow
eff_head = stage2 - stage1
area = self.section.get_flow_area(self.section.rise)
flow = -self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage2 > self.invert: # weir flow
eff_head = stage2 - self.invert
flow = -self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
if stage1 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage1 / stage2, 1.5), 0.385)
else:
flow = 0.0
return flow
def get_input_strings(self):
if self.section:
shape_type = rp.property_to_string(self.section.__class__, '__name__')
shape_span = rp.float_to_string(self.section.span, 3)
shape_rise = rp.float_to_string(self.section.rise, 3)
else:
shape_type = 'Undefined'
shape_span = 'Undefined'
shape_rise = 'Undefined'
inputs = ['Name: ' + rp.property_to_string(self, 'name'),
'Shape Type: ' + shape_type,
'Span (in): ' + shape_span,
'Rise (in): ' + shape_rise,
'Orifice Coef.: ' + rp.float_to_string(self.orif_coef, 3),
'Weir. Coef: ' + rp.float_to_string(self.weir_coef, 3),
'Invert: ' + rp.float_to_string(self.invert, 3)]
return inputs
def set_shape_as_rectangle(self, span, rise, horizontal=False):
self.section = rct.Rectangle(span, rise, horizontal)
return
def set_shape_as_circle(self, diameter, horizontal=False):
self.section = cir.Circle(diameter, horizontal)
return<|fim▁end|> | def get_flow(self, stage1, stage2):
"""Return the flow of the weir given the stages on both sides of the link."""
crown = self.invert + self.section.rise / 12.0
center = self.invert + self.section.rise / 12.0 / 2.0
|
<|file_name|>notify.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>import sys
import time
from timerasp import gmail
if __name__ == '__main__':
time.sleep(300)
gmail.send_email('notify.py: IP:{}'.format(' '.join(sys.argv[1:])),
'Notify was run and is telling you something')<|fim▁end|> | |
<|file_name|>inst.apk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
<|fim▁hole|>import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s uninstall org.xwalk.%s" % (
ADB_CMD, PARAMETERS.device, os.path.basename(os.path.splitext(file)[0]))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
os.system("chmod 777 %s/stablonglast2d/*.sh" % SCRIPT_DIR)
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)<|fim▁end|> | |
<|file_name|>cachestatus.js<|end_file_name|><|fim▁begin|>/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Cache Status.
*
* The Initial Developer of the Original Code is
* Jason Purdy.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Thanks to the Fasterfox Extension for some pointers
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
function cs_updated_stat( type, aDeviceInfo, prefs ) {
var current = round_memory_usage( aDeviceInfo.totalSize/1024/1024 );
var max = round_memory_usage( aDeviceInfo.maximumSize/1024/1024 );
var cs_id = 'cachestatus';
var bool_pref_key = 'auto_clear';
var int_pref_key = 'ac';
var clear_directive;
if ( type == 'memory' ) {
cs_id += '-ram-label';
bool_pref_key += '_ram';
int_pref_key += 'r_percent';
clear_directive = 'ram';
// this is some sort of random bug workaround
if ( current > max && current == 4096 ) {
current = 0;
}
} else if ( type == 'disk' ) {
cs_id += '-hd-label';
bool_pref_key += '_disk';
int_pref_key += 'd_percent';
clear_directive = 'disk';
} else {
// offline ... or something else we don't manage
return;
}
/*
dump( 'type: ' + type + ' - aDeviceInfo' + aDeviceInfo );
// do we need to auto-clear?
dump( "evaling if we need to auto_clear...\n" );
dump( bool_pref_key + ": " + prefs.getBoolPref( bool_pref_key ) + " and " +
(( current/max )*100) + " > " +
prefs.getIntPref( int_pref_key ) + "\n" );
dump( "new min level: " + prefs.getIntPref( int_pref_key )*.01*max + " > 10\n" );
*/
/*
This is being disabled for now:
http://code.google.com/p/cachestatus/issues/detail?id=10
*/
/*
if (
prefs.getBoolPref( bool_pref_key ) &&
prefs.getIntPref( int_pref_key )*.01*max > 10 &&
(( current/max )*100) > prefs.getIntPref( int_pref_key )
) {
//dump( "clearing!\n" );
cs_clear_cache( clear_directive, 1 );
current = 0;
}
*/
// Now, update the status bar label...
var wm = Components.classes["@mozilla.org/appshell/window-mediator;1"]
.getService(Components.interfaces.nsIWindowMediator);
var win = wm.getMostRecentWindow("navigator:browser");
if (win) {
win.document.getElementById(cs_id).setAttribute(
'value', current + " MB / " + max + " MB " );
}
}
function update_cache_status() {
<|fim▁hole|> .getService(Components.interfaces.nsICacheService);
var prefService = Components.classes["@mozilla.org/preferences-service;1"].getService(Components.interfaces.nsIPrefService);
var prefs = prefService.getBranch("extensions.cachestatus.");
var cache_visitor = {
visitEntry: function(a,b) {},
visitDevice: function( device, aDeviceInfo ) {
cs_updated_stat( device, aDeviceInfo, prefs );
}
}
cache_service.visitEntries( cache_visitor );
}
/*
* This function takes what could be 15.8912576891 and drops it to just
* one decimal place. In a future version, I could have the user say
* how many decimal places...
*/
function round_memory_usage( memory ) {
memory = parseFloat( memory );
memory *= 10;
memory = Math.round(memory)/10;
return memory;
}
// I got the cacheService code from the fasterfox extension
// http://www.xulplanet.com/references/xpcomref/ifaces/nsICacheService.html
function cs_clear_cache( param, noupdate ) {
var cacheService = Components.classes["@mozilla.org/network/cache-service;1"]
.getService(Components.interfaces.nsICacheService);
if ( param && param == 'ram' ) {
cacheService.evictEntries(Components.interfaces.nsICache.STORE_IN_MEMORY);
} else if ( param && param == 'disk' ) {
cacheService.evictEntries(Components.interfaces.nsICache.STORE_ON_DISK);
} else {
cacheService.evictEntries(Components.interfaces.nsICache.STORE_ON_DISK);
cacheService.evictEntries(Components.interfaces.nsICache.STORE_IN_MEMORY);
}
if ( ! noupdate ) {
update_cache_status();
}
}
/*
* Grabbed this helpful bit from:
* http://kb.mozillazine.org/On_Page_Load
* http://developer.mozilla.org/en/docs/Code_snippets:On_page_load
*/
var csExtension = {
onPageLoad: function(aEvent) {
update_cache_status();
},
QueryInterface : function (aIID) {
if (aIID.equals(Components.interfaces.nsIObserver) ||
aIID.equals(Components.interfaces.nsISupports) ||
aIID.equals(Components.interfaces.nsISupportsWeakReference))
return this;
throw Components.results.NS_NOINTERFACE;
},
register: function()
{
var prefService = Components.classes["@mozilla.org/preferences-service;1"].getService(Components.interfaces.nsIPrefService);
this._prefs = prefService.getBranch("extensions.cachestatus.");
if ( this._prefs.getBoolPref( 'auto_update' ) ) {
var appcontent = document.getElementById( 'appcontent' );
if ( appcontent )
appcontent.addEventListener( "DOMContentLoaded", this.onPageLoad, true );
}
this._branch = this._prefs;
this._branch.QueryInterface(Components.interfaces.nsIPrefBranch2);
this._branch.addObserver("", this, true);
this._hbox = this.grabHBox();
this.rebuildPresence( this._prefs.getCharPref( 'presence' ) );
this.welcome();
},
welcome: function ()
{
//Do not show welcome page if user has turned it off from Settings.
if (!csExtension._prefs.getBoolPref( 'welcome' )) {
return
}
//Detect Firefox version
var version = "";
try {
version = (navigator.userAgent.match(/Firefox\/([\d\.]*)/) || navigator.userAgent.match(/Thunderbird\/([\d\.]*)/))[1];
} catch (e) {}
function welcome(version) {
if (csExtension._prefs.getCharPref( 'version' ) == version) {
return;
}
//Showing welcome screen
setTimeout(function () {
var newTab = getBrowser().addTab("http://add0n.com/cache-status.html?version=" + version);
getBrowser().selectedTab = newTab;
}, 5000);
csExtension._prefs.setCharPref( 'version', version );
}
//FF < 4.*
var versionComparator = Components.classes["@mozilla.org/xpcom/version-comparator;1"]
.getService(Components.interfaces.nsIVersionComparator)
.compare(version, "4.0");
if (versionComparator < 0) {
var extMan = Components.classes["@mozilla.org/extensions/manager;1"].getService(Components.interfaces.nsIExtensionManager);
var addon = extMan.getItemForID("[email protected]");
welcome(addon.version);
}
//FF > 4.*
else {
Components.utils.import("resource://gre/modules/AddonManager.jsm");
AddonManager.getAddonByID("[email protected]", function (addon) {
welcome(addon.version);
});
}
},
grabHBox: function()
{
var wm = Components.classes["@mozilla.org/appshell/window-mediator;1"]
.getService(Components.interfaces.nsIWindowMediator);
var win = wm.getMostRecentWindow("navigator:browser");
var found_hbox;
if (win) {
this._doc = win.document;
found_hbox = win.document.getElementById("cs_presence");
}
//dump( "In grabHBox(): WIN: " + win + " HB: " + found_hbox + "\n" );
return found_hbox;
},
observe: function(aSubject, aTopic, aData)
{
if ( aTopic != 'nsPref:changed' ) return;
// aSubject is the nsIPrefBranch we're observing (after appropriate QI)
// aData is the name of the pref that's been changed (relative to aSubject)
//dump( "pref changed: S: " + aSubject + " T: " + aTopic + " D: " + aData + "\n" );
if ( aData == 'auto_update' ) {
var add_event_handler = this._prefs.getBoolPref( 'auto_update' );
if ( add_event_handler ) {
window.addEventListener( 'load', this.onPageLoad, true );
} else {
window.removeEventListener( 'load', this.onPageLoad, true );
}
} else if ( aData == 'presence' ) {
var presence = this._prefs.getCharPref( 'presence' );
if ( presence == 'original' || presence == 'icons' ) {
this.rebuildPresence( presence );
} else {
dump( "Unknown presence value: " + presence + "\n" );
}
}
},
rebuildPresence: function(presence)
{
// Take the hbox 'cs_presence' and replace it
if ( this._hbox == null ) {
this._hbox = this.grabHBox();
}
var hbox = this._hbox;
var child_node = hbox.firstChild;
while( child_node != null ) {
hbox.removeChild( child_node );
child_node = hbox.firstChild;
}
var popupset = this._doc.getElementById( 'cs_popupset' );
var child_node = popupset.firstChild;
while( child_node != null ) {
popupset.removeChild( child_node );
child_node = popupset.firstChild;
}
var string_bundle = this._doc.getElementById( 'cache-status-strings' );
if ( presence == 'original' ) {
var ram_image = this._doc.createElement( 'image' );
ram_image.setAttribute(
'tooltiptext', string_bundle.getString( 'ramcache' ) );
ram_image.setAttribute( 'src', 'chrome://cachestatus/skin/ram.png' );
var ram_label = this._doc.createElement( 'label' );
ram_label.setAttribute( 'id', 'cachestatus-ram-label' );
ram_label.setAttribute(
'value', ': ' + string_bundle.getString( 'nly' ) );
ram_label.setAttribute(
'tooltiptext', string_bundle.getString( 'ramcache' ) );
var disk_image = this._doc.createElement( 'image' );
disk_image.setAttribute(
'tooltiptext', string_bundle.getString( 'diskcache' ) );
disk_image.setAttribute( 'src', 'chrome://cachestatus/skin/hd.png' );
var disk_label = this._doc.createElement( 'label' );
disk_label.setAttribute(
'tooltiptext', string_bundle.getString( 'diskcache' ) );
disk_label.setAttribute( 'id', 'cachestatus-hd-label' );
disk_label.setAttribute(
'value', ': ' + string_bundle.getString( 'nly' ) );
hbox.appendChild( ram_image );
hbox.appendChild( ram_label );
hbox.appendChild( disk_image );
hbox.appendChild( disk_label );
} else if ( presence == 'icons' ) {
var ram_tooltip = this._doc.createElement( 'tooltip' );
ram_tooltip.setAttribute( 'id', 'ram_tooltip' );
ram_tooltip.setAttribute( 'orient', 'horizontal' );
var ram_desc_prefix = this._doc.createElement( 'description' );
ram_desc_prefix.setAttribute( 'id', 'cachestatus-ram-prefix' );
ram_desc_prefix.setAttribute(
'value', string_bundle.getString( 'ramcache' ) + ':' );
ram_desc_prefix.setAttribute( 'style', 'font-weight: bold;' );
var ram_desc = this._doc.createElement( 'description' );
ram_desc.setAttribute( 'id', 'cachestatus-ram-label' );
ram_desc.setAttribute(
'value', string_bundle.getString( 'nly' ) );
ram_tooltip.appendChild( ram_desc_prefix );
ram_tooltip.appendChild( ram_desc );
var hd_tooltip = this._doc.createElement( 'tooltip' );
hd_tooltip.setAttribute( 'id', 'hd_tooltip' );
hd_tooltip.setAttribute( 'orient', 'horizontal' );
var hd_desc_prefix = this._doc.createElement( 'description' );
hd_desc_prefix.setAttribute( 'id', 'cachestatus-hd-prefix' );
hd_desc_prefix.setAttribute(
'value', string_bundle.getString( 'diskcache' ) + ':' );
hd_desc_prefix.setAttribute( 'style', 'font-weight: bold;' );
var hd_desc = this._doc.createElement( 'description' );
hd_desc.setAttribute( 'id', 'cachestatus-hd-label' );
hd_desc.setAttribute(
'value', string_bundle.getString( 'nly' ) );
hd_tooltip.appendChild( hd_desc_prefix );
hd_tooltip.appendChild( hd_desc );
popupset.appendChild( ram_tooltip );
popupset.appendChild( hd_tooltip );
hbox.parentNode.insertBefore( popupset, hbox );
var ram_image = this._doc.createElement( 'image' );
ram_image.setAttribute( 'src', 'chrome://cachestatus/skin/ram.png' );
ram_image.setAttribute( 'tooltip', 'ram_tooltip' );
var disk_image = this._doc.createElement( 'image' );
disk_image.setAttribute( 'src', 'chrome://cachestatus/skin/hd.png' );
disk_image.setAttribute( 'tooltip', 'hd_tooltip' );
hbox.appendChild( ram_image );
hbox.appendChild( disk_image );
}
}
}
// I can't just call csExtension.register directly b/c the XUL
// might not be loaded yet.
window.addEventListener( 'load', function() { csExtension.register(); }, false );<|fim▁end|> | var cache_service = Components.classes["@mozilla.org/network/cache-service;1"]
|
<|file_name|>benchmark.py<|end_file_name|><|fim▁begin|>from djangobench.utils import run_benchmark
from query_latest.models import Book
def benchmark():
Book.objects.latest()<|fim▁hole|>
run_benchmark(
benchmark,
meta = {
'description': 'A simple Model.objects.latest() call.',
}
)<|fim▁end|> | |
<|file_name|>generator2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Module: generator2.py
# Purpose: sample generator classes #2
# Date: N/A
# Notes:
# 1) Test python code
# 2) Ref: https://docs.python.org/2/reference/expressions.html
import numpy as np
print "generator2.py: python generator test code #2"
# echo from the generator examples
# - any function with yield in it becomes a generator
# - methods are next(), send(), throw(), and close()
def echo(value=None):
print "Execution starts when 'next()' is called for the first time."
try:
while True:
try:
value = (yield value)
except Exception, e:
value = e
finally:
print "Don't forget to clean up when 'close()' is called."
print "testing the echo generator from examples"
g=echo(1)
v=g.next()
print "g.next()=", v
v=g.next()
print "g.next()=", v
g.throw(TypeError, "test")
print "Sending 2: ", g.send(2)
print "g.next()=", g.next()
print "Closing g"
g.close()
# test generator, return n*gv (typ
def g1(iv=1, gv=0.9):
iiv=iv
print "g1() created, iv=%f, gv=%f" % (iv, gv)
try:
while True:
try:
# compute next value for the loop
iiv=iiv*gv
# print "iiv=", iiv
if (iiv < 1):
break
# set xv if user calls send(n)
xv = yield iiv
# if xv is sane, change the loop number
if (xv != None):
print "g1() setting to ", xv<|fim▁hole|> finally:
print "g1() done, call close()"
print "Creating g1(10) generator and testing"
g=g1(10)
for x in g:
print "x=", x
g.close()
print "Creating g1(7) and using next() and send()"
g=g1(7)
x=g.next()
print "g.next()= ", x
print "g.next()= ", g.next()
x=g.send(3)
print "g.send(3)=", x
print "g.next()= ", g.next()
print "g.next()= ", g.next()
print "numpy generator: g=(np.cos(x) for x in np.arange(0, 10, .1))"
g=(np.cos(x) for x in np.arange(0, 10, .1))
print "g.next()=", g.next()
print "g.next()=", g.next()
print "g.next()=", g.next()<|fim▁end|> | iiv=xv
except Exception, e:
yield e |
<|file_name|>TestConfig.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.configuration;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.neo4j.graphdb.config.InvalidSettingException;
import org.neo4j.graphdb.config.Setting;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.neo4j.kernel.configuration.Settings.BOOLEAN;
import static org.neo4j.kernel.configuration.Settings.STRING;
import static org.neo4j.kernel.configuration.Settings.setting;
import static org.neo4j.helpers.collection.MapUtil.stringMap;
public class TestConfig
{
public static class MyMigratingSettings
{
@Migrator
public static ConfigurationMigrator migrator = new BaseConfigurationMigrator()
{
{
add( new SpecificPropertyMigration( "old", "Old has been replaced by newer!" )
{
@Override
public void setValueWithOldSetting( String value, Map<String, String> rawConfiguration )
{
rawConfiguration.put( newer.name(), value );
}
} );
}
};
<|fim▁hole|> public static Setting<String> newer = setting( "newer", STRING, "" );
}
public static class MySettingsWithDefaults
{
public static Setting<String> hello = setting( "hello", STRING, "Hello, World!" );
public static Setting<Boolean> boolSetting = setting( "bool_setting", BOOLEAN, Settings.TRUE );
}
private class ChangeCaptureListener implements ConfigurationChangeListener
{
private Set<ConfigurationChange> lastChangeSet;
@Override
public void notifyConfigurationChanges( Iterable<ConfigurationChange> change )
{
lastChangeSet = new HashSet<>();
for ( ConfigurationChange ch : change )
{
lastChangeSet.add( ch );
}
}
}
private <T> Set<T> setOf( T... objs )
{
Set<T> set = new HashSet<>();
Collections.addAll( set, objs );
return set;
}
@Test
public void shouldApplyDefaults()
{
Config config = new Config( new HashMap<String, String>(), MySettingsWithDefaults.class );
assertThat( config.get( MySettingsWithDefaults.hello ), is( "Hello, World!" ) );
}
@Test
public void shouldApplyMigrations()
{
// When
Config config = new Config( stringMap("old", "hello!"), MyMigratingSettings.class );
// Then
assertThat( config.get( MyMigratingSettings.newer ), is( "hello!" ) );
}
@Test( expected = InvalidSettingException.class )
public void shouldNotAllowSettingInvalidValues()
{
Config config = new Config( new HashMap<String, String>(), MySettingsWithDefaults.class );
Map<String, String> params = config.getParams();
params.put( MySettingsWithDefaults.boolSetting.name(), "asd" );
config.applyChanges( params );
fail( "Expected validation to fail." );
}
@Test( expected = InvalidSettingException.class )
public void shouldNotAllowInvalidValuesInConstructor()
{
new Config( stringMap( MySettingsWithDefaults.boolSetting.name(), "asd" ), MySettingsWithDefaults.class );
fail( "Expected validation to fail." );
}
@Test
public void shouldNotifyChangeListenersWhenNewSettingsAreApplied()
{
// Given
Config config = new Config( stringMap("setting", "old"), MyMigratingSettings.class );
ChangeCaptureListener listener = new ChangeCaptureListener();
config.addConfigurationChangeListener( listener );
// When
config.applyChanges( stringMap( "setting", "new" ) );
// Then
assertThat( listener.lastChangeSet,
is( setOf( new ConfigurationChange( "setting", "old", "new" ) ) ) );
}
@Test
public void shouldNotNotifyChangeListenerWhenNothingChanged()
{
// Given
Config config = new Config( stringMap("setting", "old"), MyMigratingSettings.class );
ChangeCaptureListener listener = new ChangeCaptureListener();
config.addConfigurationChangeListener( listener );
// When
config.applyChanges( stringMap( "setting", "old" ) ); // nothing really changed here
// Then
assertThat( listener.lastChangeSet, nullValue() );
}
@Test
public void settingNewPropertyMustNotAlterExistingSettings()
{
// Given
Config config = new Config( stringMap( "a", "1" ) );
// When
config.setProperty( "b", "2" );
// Then
assertThat( config.getParams(), is( stringMap( "a", "1", "b", "2" ) ) );
}
@Test
public void shouldBeAbleToRegisterSettingsClassesAfterInstantiation() throws Exception
{
// Given
Config config = new Config( stringMap( "old", "hello!" ) );
// When
config.registerSettingsClasses( asList( MySettingsWithDefaults.class, MyMigratingSettings.class ) );
// Then
assertThat( config.get( MyMigratingSettings.newer ), equalTo( "hello!" ) );
assertThat( config.get( MySettingsWithDefaults.hello ), equalTo( "Hello, World!" ) );
}
@Test
public void shouldBeAbleToAgumentConfig() throws Exception
{
// Given
Config config = new Config( stringMap( "newer", "old", "non-overlapping", "huzzah" ) );
// When
config.augment( stringMap( "newer", "new", "unrelated", "hello" ) );
// Then
assertThat( config.get( setting("newer", STRING, "") ), equalTo( "new" ) );
assertThat( config.get( setting("non-overlapping", STRING, "") ), equalTo( "huzzah" ) );
assertThat( config.get( setting("unrelated", STRING, "") ), equalTo( "hello" ) );
}
}<|fim▁end|> | |
<|file_name|>JRubyExample.java<|end_file_name|><|fim▁begin|>package me.soulmachine;
import org.jruby.embed.ScriptingContainer;
import java.util.List;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
/**
* A simple JRuby example to execute Python scripts from Java.
*/
final class JRubyExample {
private JRubyExample() {}
/**
* Main entrypoint.
*
* @param args arguments
* @throws ScriptException ScriptException
*/
public static void main(final String[] args) throws ScriptException {
listEngines();
final String rubyHelloWord = "puts 'Hello World from JRuby!'";
// First way: Use built-in ScriptEngine from JDK
{
final ScriptEngineManager mgr = new ScriptEngineManager();<|fim▁hole|>
try {
pyEngine.eval(rubyHelloWord);
} catch (ScriptException ex) {
ex.printStackTrace();
}
}
// Second way: Use ScriptingContainer() from JRuby
{
final ScriptingContainer scriptingContainer = new ScriptingContainer();
scriptingContainer.runScriptlet(rubyHelloWord);
}
// Call Ruby Methods from Java
{
final ScriptingContainer scriptingContainer = new ScriptingContainer();
final String rubyMethod = "def myAdd(a,b)\n\treturn a+b\nend";
final Object receiver = scriptingContainer.runScriptlet(rubyMethod);
final Object[] arguments = new Object[2];
arguments[0] = Integer.valueOf(6);
arguments[1] = Integer.valueOf(4);
final Integer result = scriptingContainer.callMethod(receiver, "myAdd",
arguments, Integer.class);
System.out.println("Result: " + result);
}
}
/**
* Display all script engines.
*/
public static void listEngines() {
final ScriptEngineManager mgr = new ScriptEngineManager();
final List<ScriptEngineFactory> factories =
mgr.getEngineFactories();
for (final ScriptEngineFactory factory: factories) {
System.out.println("ScriptEngineFactory Info");
final String engName = factory.getEngineName();
final String engVersion = factory.getEngineVersion();
final String langName = factory.getLanguageName();
final String langVersion = factory.getLanguageVersion();
System.out.printf("\tScript Engine: %s (%s)\n", engName, engVersion);
final List<String> engNames = factory.getNames();
for (final String name: engNames) {
System.out.printf("\tEngine Alias: %s\n", name);
}
System.out.printf("\tLanguage: %s (%s)\n", langName, langVersion);
}
}
}<|fim▁end|> | final ScriptEngine pyEngine = mgr.getEngineByName("ruby"); |
<|file_name|>barnes_two_planets.py<|end_file_name|><|fim▁begin|>#!usr/bin/env python<|fim▁hole|>from pyspace.simulator import BarnesSimulator
import numpy
x = numpy.array([0,100])
y = numpy.array([0,0])
z = numpy.array([0,0])
m = numpy.array([1000,1])
v_y = numpy.array([0,(1000/100)**0.5])
pa = PlanetArray(x, y, z, v_y=v_y, m=m)
sim = BarnesSimulator(pa, 1, 1, 0, sim_name = "two_planets")
sim.simulate(1000, dump_output = True)<|fim▁end|> | from pyspace.planet import PlanetArray |
<|file_name|>0039_auto__add_externalaccount.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ExternalAccount'
db.create_table('users_externalaccount', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['users.UserProfile'])),
('username', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True)),
('type', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('privacy', self.gf('django.db.models.fields.PositiveIntegerField')(default=3)),
))
db.send_create_signal('users', ['ExternalAccount'])
def backwards(self, orm):
# Deleting model 'ExternalAccount'
db.delete_table('users_externalaccount')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc_channel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'steward': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'system': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'wiki': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'groups.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},<|fim▁hole|> 'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'groups.skill': {
'Meta': {'ordering': "['name']", 'object_name': 'Skill'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'users.externalaccount': {
'Meta': {'object_name': 'ExternalAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'privacy': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3'}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']"}),
'username': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
'users.usernameblacklist': {
'Meta': {'ordering': "['value']", 'object_name': 'UsernameBlacklist'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_regex': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'users.userprofile': {
'Meta': {'ordering': "['full_name']", 'object_name': 'UserProfile', 'db_table': "'profile'"},
'allows_community_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allows_mozilla_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'basket_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
'date_mozillian': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date_vouched': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ircname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'is_vouched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Language']"}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'privacy_bio': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_city': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_country': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_date_mozillian': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_email': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_full_name': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_groups': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_ircname': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_languages': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_photo': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_region': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_skills': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_timezone': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_title': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_tshirt': ('mozillians.users.models.PrivacyField', [], {'default': '1'}),
'privacy_vouched_by': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_website': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'region': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Skill']"}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'tshirt': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '70', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'vouched_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vouchees'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['users.UserProfile']", 'blank': 'True', 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['users']<|fim▁end|> | 'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}), |
<|file_name|>bitcoin_bs.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="bs" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>phreak</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The phreak developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your phreak addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>phreak will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show information about phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>phreak client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to phreak network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About phreak card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about phreak card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid phreak address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. phreak can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid phreak address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>phreak-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start phreak after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start phreak on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the phreak client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the phreak network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting phreak.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show phreak addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting phreak.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the phreak network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the phreak-Qt help message to get a list with possible phreak command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>phreak - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>phreak Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the phreak debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the phreak RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 PHR</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>123.456 PHR</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a phreak address (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a phreak address (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified phreak address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a phreak address (e.g. PHR1xGeKnTkaAotEVgs2rnUfVsFv8LVSM)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter phreak signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source><|fim▁hole|> <source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Sve</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Danas</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ovaj mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Prošli mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ove godine</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>phreak version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or phreakd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: phreak.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: phreakd.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong phreak will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=phreakrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "phreak Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. phreak is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of phreak</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart phreak to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. phreak is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/> |
<|file_name|>local-store-manager.service.ts<|end_file_name|><|fim▁begin|>// =============================
// Email: [email protected]
// www.ebenmonney.com/templates
// =============================
import { Injectable } from '@angular/core';
import { Observable, Subject } from 'rxjs';
import { Utilities } from './utilities';
@Injectable()
/**
* Provides a wrapper for accessing the web storage API and synchronizing session storage across tabs/windows.
*/
export class LocalStoreManager {
private static syncListenerInitialised = false;
public static readonly DBKEY_USER_DATA = 'user_data';
private static readonly DBKEY_SYNC_KEYS = 'sync_keys';
private syncKeys: string[] = [];
private initEvent = new Subject();
private reservedKeys: string[] =
[
'sync_keys',
'addToSyncKeys',
'removeFromSyncKeys',
'getSessionStorage',
'setSessionStorage',
'addToSessionStorage',
'removeFromSessionStorage',
'clearAllSessionsStorage'
];
public initialiseStorageSyncListener() {
if (LocalStoreManager.syncListenerInitialised === true) {
return;
}
LocalStoreManager.syncListenerInitialised = true;
window.addEventListener('storage', this.sessionStorageTransferHandler, false);
this.syncSessionStorage();
}
public deinitialiseStorageSyncListener() {
window.removeEventListener('storage', this.sessionStorageTransferHandler, false);
LocalStoreManager.syncListenerInitialised = false;
}
public clearAllStorage() {
this.clearAllSessionsStorage();
this.clearLocalStorage();
}
public clearAllSessionsStorage() {
this.clearInstanceSessionStorage();
localStorage.removeItem(LocalStoreManager.DBKEY_SYNC_KEYS);
localStorage.setItem('clearAllSessionsStorage', '_dummy');
localStorage.removeItem('clearAllSessionsStorage');
}
public clearInstanceSessionStorage() {
sessionStorage.clear();
this.syncKeys = [];
}
public clearLocalStorage() {
localStorage.clear();
}
public saveSessionData(data: any, key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
this.removeFromSyncKeys(key);
localStorage.removeItem(key);
this.sessionStorageSetItem(key, data);
}
public saveSyncedSessionData(data: any, key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
localStorage.removeItem(key);
this.addToSessionStorage(data, key);
}
public savePermanentData(data: any, key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
this.removeFromSessionStorage(key);
this.localStorageSetItem(key, data);
}
public moveDataToSessionStorage(key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
const data = this.getData(key);
if (data == null) {
return;
}
this.saveSessionData(data, key);
}
public moveDataToSyncedSessionStorage(key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
const data = this.getData(key);
if (data == null) {
return;
}
this.saveSyncedSessionData(data, key);
}
public moveDataToPermanentStorage(key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
const data = this.getData(key);
if (data == null) {
return;
}
this.savePermanentData(data, key);
}
public exists(key = LocalStoreManager.DBKEY_USER_DATA) {
let data = sessionStorage.getItem(key);
if (data == null) {
data = localStorage.getItem(key);
}
return data != null;
}
public getData(key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
let data = this.sessionStorageGetItem(key);
<|fim▁hole|> if (data == null) {
data = this.localStorageGetItem(key);
}
return data;
}
public getDataObject<T>(key = LocalStoreManager.DBKEY_USER_DATA, isDateType = false): T {
let data = this.getData(key);
if (data != null) {
if (isDateType) {
data = new Date(data);
}
return data as T;
} else {
return null;
}
}
public deleteData(key = LocalStoreManager.DBKEY_USER_DATA) {
this.testForInvalidKeys(key);
this.removeFromSessionStorage(key);
localStorage.removeItem(key);
}
public getInitEvent(): Observable<{}> {
return this.initEvent.asObservable();
}
private sessionStorageTransferHandler = (event: StorageEvent) => {
if (!event.newValue) {
return;
}
if (event.key === 'getSessionStorage') {
if (sessionStorage.length) {
this.localStorageSetItem('setSessionStorage', sessionStorage);
localStorage.removeItem('setSessionStorage');
}
} else if (event.key === 'setSessionStorage') {
if (!this.syncKeys.length) {
this.loadSyncKeys();
}
const data = JSON.parse(event.newValue);
// console.info("Set => Key: Transfer setSessionStorage" + ", data: " + JSON.stringify(data));
for (const key in data) {
if (this.syncKeysContains(key)) {
this.sessionStorageSetItem(key, JSON.parse(data[key]));
}
}
this.onInit();
} else if (event.key === 'addToSessionStorage') {
const data = JSON.parse(event.newValue);
// console.warn("Set => Key: Transfer addToSessionStorage" + ", data: " + JSON.stringify(data));
this.addToSessionStorageHelper(data.data, data.key);
} else if (event.key === 'removeFromSessionStorage') {
this.removeFromSessionStorageHelper(event.newValue);
} else if (event.key === 'clearAllSessionsStorage' && sessionStorage.length) {
this.clearInstanceSessionStorage();
} else if (event.key === 'addToSyncKeys') {
this.addToSyncKeysHelper(event.newValue);
} else if (event.key === 'removeFromSyncKeys') {
this.removeFromSyncKeysHelper(event.newValue);
}
}
private syncSessionStorage() {
localStorage.setItem('getSessionStorage', '_dummy');
localStorage.removeItem('getSessionStorage');
}
private addToSessionStorage(data: any, key: string) {
this.addToSessionStorageHelper(data, key);
this.addToSyncKeysBackup(key);
this.localStorageSetItem('addToSessionStorage', { key, data });
localStorage.removeItem('addToSessionStorage');
}
private addToSessionStorageHelper(data: any, key: string) {
this.addToSyncKeysHelper(key);
this.sessionStorageSetItem(key, data);
}
private removeFromSessionStorage(keyToRemove: string) {
this.removeFromSessionStorageHelper(keyToRemove);
this.removeFromSyncKeysBackup(keyToRemove);
localStorage.setItem('removeFromSessionStorage', keyToRemove);
localStorage.removeItem('removeFromSessionStorage');
}
private removeFromSessionStorageHelper(keyToRemove: string) {
sessionStorage.removeItem(keyToRemove);
this.removeFromSyncKeysHelper(keyToRemove);
}
private testForInvalidKeys(key: string) {
if (!key) {
throw new Error('key cannot be empty');
}
if (this.reservedKeys.some(x => x === key)) {
throw new Error(`The storage key "${key}" is reserved and cannot be used. Please use a different key`);
}
}
private syncKeysContains(key: string) {
return this.syncKeys.some(x => x === key);
}
private loadSyncKeys() {
if (this.syncKeys.length) {
return;
}
this.syncKeys = this.getSyncKeysFromStorage();
}
private getSyncKeysFromStorage(defaultValue: string[] = []): string[] {
const data = this.localStorageGetItem(LocalStoreManager.DBKEY_SYNC_KEYS);
if (data == null) {
return defaultValue;
} else {
return data as string[];
}
}
private addToSyncKeys(key: string) {
this.addToSyncKeysHelper(key);
this.addToSyncKeysBackup(key);
localStorage.setItem('addToSyncKeys', key);
localStorage.removeItem('addToSyncKeys');
}
private addToSyncKeysBackup(key: string) {
const storedSyncKeys = this.getSyncKeysFromStorage();
if (!storedSyncKeys.some(x => x === key)) {
storedSyncKeys.push(key);
this.localStorageSetItem(LocalStoreManager.DBKEY_SYNC_KEYS, storedSyncKeys);
}
}
private removeFromSyncKeysBackup(key: string) {
const storedSyncKeys = this.getSyncKeysFromStorage();
const index = storedSyncKeys.indexOf(key);
if (index > -1) {
storedSyncKeys.splice(index, 1);
this.localStorageSetItem(LocalStoreManager.DBKEY_SYNC_KEYS, storedSyncKeys);
}
}
private addToSyncKeysHelper(key: string) {
if (!this.syncKeysContains(key)) {
this.syncKeys.push(key);
}
}
private removeFromSyncKeys(key: string) {
this.removeFromSyncKeysHelper(key);
this.removeFromSyncKeysBackup(key);
localStorage.setItem('removeFromSyncKeys', key);
localStorage.removeItem('removeFromSyncKeys');
}
private removeFromSyncKeysHelper(key: string) {
const index = this.syncKeys.indexOf(key);
if (index > -1) {
this.syncKeys.splice(index, 1);
}
}
private localStorageSetItem(key: string, data: any) {
localStorage.setItem(key, JSON.stringify(data));
}
private sessionStorageSetItem(key: string, data: any) {
sessionStorage.setItem(key, JSON.stringify(data));
}
private localStorageGetItem(key: string) {
return Utilities.JsonTryParse(localStorage.getItem(key));
}
private sessionStorageGetItem(key: string) {
return Utilities.JsonTryParse(sessionStorage.getItem(key));
}
private onInit() {
setTimeout(() => {
this.initEvent.next();
this.initEvent.complete();
});
}
}<|fim▁end|> | |
<|file_name|>PokecheckmeCommand.java<|end_file_name|><|fim▁begin|>package com.pixelutilitys.commands;
import com.pixelmonmod.pixelmon.Pixelmon;
import com.pixelmonmod.pixelmon.enums.EnumGui;
import net.minecraft.command.CommandBase;
import net.minecraft.command.ICommandSender;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.ChatComponentTranslation;
public class PokecheckmeCommand extends CommandBase {
@Override
public String getCommandName() {
return "pokecheckme";
}
@Override
public int getRequiredPermissionLevel() {
return 2;
}
@Override
public String getCommandUsage(ICommandSender icommandsender) {<|fim▁hole|> @Override
public void processCommand(ICommandSender sender, String[] astring) {
// TODO Auto-generated method stub
EntityPlayer player = (EntityPlayer) sender;
player.openGui(Pixelmon.instance, EnumGui.PC.getIndex(), null, 0, 0, 0);
ChatComponentTranslation success = new ChatComponentTranslation("You have successfuly opened your pc files!");
sender.addChatMessage(success);
}
@Override
public int compareTo(Object arg0) {
// TODO Auto-generated method stub
return 0;
}
}<|fim▁end|> | return "/pokecheckme";
}
|
<|file_name|>deletionServerInput.ts<|end_file_name|><|fim▁begin|>/**
* vinimay
* Vinimay is a decentralised social network focused on giving back control of its data to the user
*
* OpenAPI spec version: 0.1.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
export interface DeletionServerInput {<|fim▁hole|> */
token: string;
/**
* Request signature, required if the frienship was previously accepted
*/
signature?: string;
}<|fim▁end|> | /**
* The token identifying the relationship |
<|file_name|>behavior_binary_table.py<|end_file_name|><|fim▁begin|>"""
BORIS
Behavioral Observation Research Interactive Software
Copyright 2012-2022 Olivier Friard
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import os
import pathlib
import re
import sys
from decimal import Decimal as dc
import tablib
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QFileDialog, QInputDialog, QMessageBox)
from boris import dialog
from boris import project_functions
from boris import select_observations
from boris import utilities
from boris.config import *
def create_behavior_binary_table(pj: dict, selected_observations: list, parameters_obs: dict,
time_interval: float) -> dict:
"""
create behavior binary table
Args:
pj (dict): project dictionary
selected_observations (list): list of selected observations
parameters_obs (dict): dcit of parameters
time_interval (float): time interval (in seconds)
Returns:
dict: dictionary of tablib dataset
"""
results_df = {}
state_behavior_codes = [
x for x in utilities.state_behavior_codes(pj[ETHOGRAM]) if x in parameters_obs[SELECTED_BEHAVIORS]
]
point_behavior_codes = [
x for x in utilities.point_behavior_codes(pj[ETHOGRAM]) if x in parameters_obs[SELECTED_BEHAVIORS]
]
if not state_behavior_codes and not point_behavior_codes:
return {"error": True, "msg": "No state events selected"}
for obs_id in selected_observations:
start_time = parameters_obs[START_TIME]
end_time = parameters_obs[END_TIME]
# check observation interval
if parameters_obs["time"] == TIME_FULL_OBS:
max_obs_length, _ = project_functions.observation_length(pj, [obs_id])
start_time = dc("0.000")
end_time = dc(max_obs_length)
if parameters_obs["time"] == TIME_EVENTS:
try:
start_time = dc(pj[OBSERVATIONS][obs_id][EVENTS][0][0])
except Exception:
start_time = dc("0.000")
try:
end_time = dc(pj[OBSERVATIONS][obs_id][EVENTS][-1][0])
except Exception:
max_obs_length, _ = project_functions.observation_length(pj, [obs_id])
end_time = dc(max_obs_length)
if obs_id not in results_df:
results_df[obs_id] = {}
for subject in parameters_obs[SELECTED_SUBJECTS]:
# extract tuple (behavior, modifier)
behav_modif_list = [(idx[2], idx[3]) for idx in pj[OBSERVATIONS][obs_id][EVENTS] if idx[1] == (
subject if subject != NO_FOCAL_SUBJECT else "") and idx[2] in parameters_obs[SELECTED_BEHAVIORS]]
# extract observed subjects NOT USED at the moment
observed_subjects = [event[EVENT_SUBJECT_FIELD_IDX] for event in pj[OBSERVATIONS][obs_id][EVENTS]]
# add selected behavior if not found in (behavior, modifier)
if not parameters_obs[EXCLUDE_BEHAVIORS]:
#for behav in state_behavior_codes:
for behav in parameters_obs[SELECTED_BEHAVIORS]:
if behav not in [x[0] for x in behav_modif_list]:
behav_modif_list.append((behav, ""))
behav_modif_set = set(behav_modif_list)
observed_behav = [(x[0], x[1]) for x in sorted(behav_modif_set)]
if parameters_obs[INCLUDE_MODIFIERS]:
results_df[obs_id][subject] = tablib.Dataset(
headers=["time"] + [f"{x[0]}" + f" ({x[1]})" * (x[1] != "") for x in sorted(behav_modif_set)])
else:
results_df[obs_id][subject] = tablib.Dataset(headers=["time"] + [x[0] for x in sorted(behav_modif_set)])
if subject == NO_FOCAL_SUBJECT:
sel_subject_dict = {"": {SUBJECT_NAME: ""}}
else:
sel_subject_dict = dict([
(idx, pj[SUBJECTS][idx]) for idx in pj[SUBJECTS] if pj[SUBJECTS][idx][SUBJECT_NAME] == subject
])
row_idx = 0
t = start_time
while t <= end_time:
# state events
current_states = utilities.get_current_states_modifiers_by_subject_2(
state_behavior_codes, pj[OBSERVATIONS][obs_id][EVENTS], sel_subject_dict, t)
# point events
current_point = utilities.get_current_points_by_subject(point_behavior_codes,
pj[OBSERVATIONS][obs_id][EVENTS],<|fim▁hole|> for behav in observed_behav:
if behav[0] in state_behavior_codes:
cols.append(int(behav in current_states[list(current_states.keys())[0]]))
if behav[0] in point_behavior_codes:
cols.append(current_point[list(current_point.keys())[0]].count(behav))
results_df[obs_id][subject].append(cols)
t += time_interval
row_idx += 1
return results_df
def behavior_binary_table(pj: dict):
"""
ask user for parameters for behavior binary table
call create_behavior_binary_table
"""
_, selected_observations = select_observations.select_observations(
pj, MULTIPLE, "Select observations for the behavior binary table")
if not selected_observations:
return
# check if state events are paired
out = ""
not_paired_obs_list = []
for obs_id in selected_observations:
r, msg = project_functions.check_state_events_obs(obs_id, pj[ETHOGRAM], pj[OBSERVATIONS][obs_id])
if not r:
out += f"Observation: <strong>{obs_id}</strong><br>{msg}<br>"
not_paired_obs_list.append(obs_id)
if out:
out = f"The observations with UNPAIRED state events will be removed from the analysis<br><br>{out}"
results = dialog.Results_dialog()
results.setWindowTitle(f"{programName} - Check selected observations")
results.ptText.setReadOnly(True)
results.ptText.appendHtml(out)
results.pbSave.setVisible(False)
results.pbCancel.setVisible(True)
if not results.exec_():
return
selected_observations = [x for x in selected_observations if x not in not_paired_obs_list]
if not selected_observations:
return
max_obs_length, _ = project_functions.observation_length(pj, selected_observations)
if max_obs_length == -1: # media length not available, user choose to not use events
return
parameters = dialog.choose_obs_subj_behav_category(pj,
selected_observations,
maxTime=max_obs_length,
flagShowIncludeModifiers=True,
flagShowExcludeBehaviorsWoEvents=True,
by_category=False)
if not parameters[SELECTED_SUBJECTS] or not parameters[SELECTED_BEHAVIORS]:
QMessageBox.warning(None, programName, "Select subject(s) and behavior(s) to analyze")
return
# ask for time interval
i, ok = QInputDialog.getDouble(None, "Behavior binary table", "Time interval (in seconds):", 1.0, 0.001, 86400, 3)
if not ok:
return
time_interval = utilities.float2decimal(i)
results_df = create_behavior_binary_table(pj, selected_observations, parameters, time_interval)
if "error" in results_df:
QMessageBox.warning(None, programName, results_df["msg"])
return
# save results
if len(selected_observations) == 1:
extended_file_formats = [
"Tab Separated Values (*.tsv)", "Comma Separated Values (*.csv)", "Open Document Spreadsheet ODS (*.ods)",
"Microsoft Excel Spreadsheet XLSX (*.xlsx)", "Legacy Microsoft Excel Spreadsheet XLS (*.xls)",
"HTML (*.html)"
]
file_formats = ["tsv", "csv", "ods", "xlsx", "xls", "html"]
file_name, filter_ = QFileDialog().getSaveFileName(None, "Save results", "", ";;".join(extended_file_formats))
if not file_name:
return
output_format = file_formats[extended_file_formats.index(filter_)]
if pathlib.Path(file_name).suffix != "." + output_format:
file_name = str(pathlib.Path(file_name)) + "." + output_format
# check if file with new extension already exists
if pathlib.Path(file_name).is_file():
if dialog.MessageDialog(programName, f"The file {file_name} already exists.",
[CANCEL, OVERWRITE]) == CANCEL:
return
else:
items = ("Tab Separated Values (*.tsv)", "Comma separated values (*.csv)", "Open Document Spreadsheet (*.ods)",
"Microsoft Excel Spreadsheet XLSX (*.xlsx)", "Legacy Microsoft Excel Spreadsheet XLS (*.xls)",
"HTML (*.html)")
item, ok = QInputDialog.getItem(None, "Save results", "Available formats", items, 0, False)
if not ok:
return
output_format = re.sub(".* \(\*\.", "", item)[:-1]
export_dir = QFileDialog().getExistingDirectory(None,
"Choose a directory to save results",
os.path.expanduser("~"),
options=QFileDialog.ShowDirsOnly)
if not export_dir:
return
mem_command = ""
for obs_id in results_df:
for subject in results_df[obs_id]:
if len(selected_observations) > 1:
file_name_with_subject = str(
pathlib.Path(export_dir) / utilities.safeFileName(obs_id + "_" + subject)) + "." + output_format
else:
file_name_with_subject = str(os.path.splitext(file_name)[0] +
utilities.safeFileName("_" + subject)) + "." + output_format
# check if file with new extension already exists
if mem_command != OVERWRITE_ALL and pathlib.Path(file_name_with_subject).is_file():
if mem_command == "Skip all":
continue
mem_command = dialog.MessageDialog(programName, f"The file {file_name_with_subject} already exists.",
[OVERWRITE, OVERWRITE_ALL, "Skip", "Skip all", CANCEL])
if mem_command == CANCEL:
return
if mem_command in ["Skip", "Skip all"]:
continue
try:
if output_format in ["csv", "tsv", "html"]:
with open(file_name_with_subject, "wb") as f:
f.write(str.encode(results_df[obs_id][subject].export(output_format)))
if output_format in ["ods", "xlsx", "xls"]:
with open(file_name_with_subject, "wb") as f:
f.write(results_df[obs_id][subject].export(output_format))
except Exception:
error_type, error_file_name, error_lineno = utilities.error_info(sys.exc_info())
logging.critical(
f"Error in behavior binary table function: {error_type} {error_file_name} {error_lineno}")
QMessageBox.critical(None, programName, f"Error saving file: {error_type}")
return<|fim▁end|> | sel_subject_dict, t, time_interval)
cols = [float(t)] # time
|
<|file_name|>moc_qtesteventloop.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
** Meta object code from reading C++ file 'qtesteventloop.h'
**
** Created: Mon 26. Nov 09:22:32 2012
** by: The Qt Meta Object Compiler version 63 (Qt 4.8.4)
**
** WARNING! All changes made in this file will be lost!
*****************************************************************************/
#include "../../../qtesteventloop.h"
#if !defined(Q_MOC_OUTPUT_REVISION)<|fim▁hole|>#error "The header file 'qtesteventloop.h' doesn't include <QObject>."
#elif Q_MOC_OUTPUT_REVISION != 63
#error "This file was generated using the moc from 4.8.4. It"
#error "cannot be used with the include files from this version of Qt."
#error "(The moc has changed too much.)"
#endif
QT_BEGIN_MOC_NAMESPACE
static const uint qt_meta_data_QTestEventLoop[] = {
// content:
6, // revision
0, // classname
0, 0, // classinfo
1, 14, // methods
0, 0, // properties
0, 0, // enums/sets
0, 0, // constructors
0, // flags
0, // signalCount
// slots: signature, parameters, type, tag, flags
16, 15, 15, 15, 0x0a,
0 // eod
};
static const char qt_meta_stringdata_QTestEventLoop[] = {
"QTestEventLoop\0\0exitLoop()\0"
};
void QTestEventLoop::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
{
if (_c == QMetaObject::InvokeMetaMethod) {
Q_ASSERT(staticMetaObject.cast(_o));
QTestEventLoop *_t = static_cast<QTestEventLoop *>(_o);
switch (_id) {
case 0: _t->exitLoop(); break;
default: ;
}
}
Q_UNUSED(_a);
}
const QMetaObjectExtraData QTestEventLoop::staticMetaObjectExtraData = {
0, qt_static_metacall
};
const QMetaObject QTestEventLoop::staticMetaObject = {
{ &QObject::staticMetaObject, qt_meta_stringdata_QTestEventLoop,
qt_meta_data_QTestEventLoop, &staticMetaObjectExtraData }
};
#ifdef Q_NO_DATA_RELOCATION
const QMetaObject &QTestEventLoop::getStaticMetaObject() { return staticMetaObject; }
#endif //Q_NO_DATA_RELOCATION
const QMetaObject *QTestEventLoop::metaObject() const
{
return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject;
}
void *QTestEventLoop::qt_metacast(const char *_clname)
{
if (!_clname) return 0;
if (!strcmp(_clname, qt_meta_stringdata_QTestEventLoop))
return static_cast<void*>(const_cast< QTestEventLoop*>(this));
return QObject::qt_metacast(_clname);
}
int QTestEventLoop::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
_id = QObject::qt_metacall(_c, _id, _a);
if (_id < 0)
return _id;
if (_c == QMetaObject::InvokeMetaMethod) {
if (_id < 1)
qt_static_metacall(this, _c, _id, _a);
_id -= 1;
}
return _id;
}
QT_END_MOC_NAMESPACE<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Weather component that handles meteorological data for your location.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/weather/
"""
import asyncio
import logging
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.const import PRECISION_WHOLE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = []
DOMAIN = 'weather'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
ATTR_CONDITION_CLASS = 'condition_class'
ATTR_FORECAST = 'forecast'
ATTR_FORECAST_CONDITION = 'condition'
ATTR_FORECAST_PRECIPITATION = 'precipitation'
ATTR_FORECAST_TEMP = 'temperature'
ATTR_FORECAST_TEMP_LOW = 'templow'
ATTR_FORECAST_TIME = 'datetime'
ATTR_WEATHER_ATTRIBUTION = 'attribution'
ATTR_WEATHER_HUMIDITY = 'humidity'
ATTR_WEATHER_OZONE = 'ozone'
ATTR_WEATHER_PRESSURE = 'pressure'
ATTR_WEATHER_TEMPERATURE = 'temperature'
ATTR_WEATHER_VISIBILITY = 'visibility'
ATTR_WEATHER_WIND_BEARING = 'wind_bearing'
ATTR_WEATHER_WIND_SPEED = 'wind_speed'
@asyncio.coroutine
def async_setup(hass, config):
"""Set up the weather component."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
yield from component.async_setup(config)
return True
class WeatherEntity(Entity):<|fim▁hole|> @property
def temperature(self):
"""Return the platform temperature."""
raise NotImplementedError()
@property
def temperature_unit(self):
"""Return the unit of measurement."""
raise NotImplementedError()
@property
def pressure(self):
"""Return the pressure."""
return None
@property
def humidity(self):
"""Return the humidity."""
raise NotImplementedError()
@property
def wind_speed(self):
"""Return the wind speed."""
return None
@property
def wind_bearing(self):
"""Return the wind bearing."""
return None
@property
def ozone(self):
"""Return the ozone level."""
return None
@property
def attribution(self):
"""Return the attribution."""
return None
@property
def visibility(self):
"""Return the visibility."""
return None
@property
def forecast(self):
"""Return the forecast."""
return None
@property
def precision(self):
"""Return the forecast."""
return PRECISION_TENTHS if self.temperature_unit == TEMP_CELSIUS \
else PRECISION_WHOLE
@property
def state_attributes(self):
"""Return the state attributes."""
data = {
ATTR_WEATHER_TEMPERATURE: show_temp(
self.hass, self.temperature, self.temperature_unit,
self.precision),
}
humidity = self.humidity
if humidity is not None:
data[ATTR_WEATHER_HUMIDITY] = round(humidity)
ozone = self.ozone
if ozone is not None:
data[ATTR_WEATHER_OZONE] = ozone
pressure = self.pressure
if pressure is not None:
data[ATTR_WEATHER_PRESSURE] = pressure
wind_bearing = self.wind_bearing
if wind_bearing is not None:
data[ATTR_WEATHER_WIND_BEARING] = wind_bearing
wind_speed = self.wind_speed
if wind_speed is not None:
data[ATTR_WEATHER_WIND_SPEED] = wind_speed
visibility = self.visibility
if visibility is not None:
data[ATTR_WEATHER_VISIBILITY] = visibility
attribution = self.attribution
if attribution is not None:
data[ATTR_WEATHER_ATTRIBUTION] = attribution
if self.forecast is not None:
forecast = []
for forecast_entry in self.forecast:
forecast_entry = dict(forecast_entry)
forecast_entry[ATTR_FORECAST_TEMP] = show_temp(
self.hass, forecast_entry[ATTR_FORECAST_TEMP],
self.temperature_unit, self.precision)
if ATTR_FORECAST_TEMP_LOW in forecast_entry:
forecast_entry[ATTR_FORECAST_TEMP_LOW] = show_temp(
self.hass, forecast_entry[ATTR_FORECAST_TEMP_LOW],
self.temperature_unit, self.precision)
forecast.append(forecast_entry)
data[ATTR_FORECAST] = forecast
return data
@property
def state(self):
"""Return the current state."""
return self.condition
@property
def condition(self):
"""Return the current condition."""
raise NotImplementedError()<|fim▁end|> | """ABC for weather data."""
|
<|file_name|>sample-nowplaying.js<|end_file_name|><|fim▁begin|>// GET /api/v1/nowplaying/groovesalad
{
"stationId": "groovesalad",
"time": 1425871720000,
"artist": "Panorama",<|fim▁hole|> "artistCorrected": false,
"albumCorrected": false,
"corrected": false,
"duration": 335000,
"durationEstimated": false
}<|fim▁end|> | "title": "Selene",
"album": "Panorama",
"trackCorrected": false, |
<|file_name|>cmdi2html.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 Max Planck Institute for Psycholinguistics
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
function init_cmdi() {
$("a.toggle").click(function () {
$(this).parent().parent().toggleClass('collapsed');
$(this).parent().parent().toggleClass('expanded');
});
}
function expand_highlighted_cmdi() {
$(".searchword").parents('.IMDI_group.cmdi').removeClass('collapsed');<|fim▁hole|>$(document).ready(init_cmdi);<|fim▁end|> | $(".searchword").parents('.IMDI_group.cmdi').addClass('expanded');
}
|
<|file_name|>test_curried.py<|end_file_name|><|fim▁begin|>import toolz
from toolz.curried import take, first, second, sorted, merge_with
def test_take():
assert list(take(2)([1, 2, 3])) == [1, 2]
<|fim▁hole|>
def test_merge_with():
assert merge_with(sum)({1: 1}, {1: 2}) == {1: 3}
def test_sorted():
assert sorted(key=second)([(1, 2), (2, 1)]) == [(2, 1), (1, 2)]<|fim▁end|> | def test_first():
assert first is toolz.itertoolz.core.first |
<|file_name|>context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Data needed by the layout task.
#![deny(unsafe_code)]
use canvas_traits::CanvasMsg;
use css::matching::{ApplicableDeclarationsCache, StyleSharingCandidateCache};
use euclid::{Rect, Size2D};
use fnv::FnvHasher;
use gfx::display_list::OpaqueNode;
use gfx::font_cache_task::FontCacheTask;
use gfx::font_context::FontContext;
use ipc_channel::ipc::{self, IpcSender};
use msg::compositor_msg::LayerId;
use msg::constellation_msg::ConstellationChan;
use net_traits::image::base::Image;
use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask, ImageResponse, ImageState};
use net_traits::image_cache_task::{UsePlaceholder};<|fim▁hole|>use std::collections::hash_state::DefaultState;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::mpsc::{channel, Sender};
use style::selector_matching::Stylist;
use url::Url;
use util::geometry::Au;
use util::mem::HeapSizeOf;
use util::opts;
struct LocalLayoutContext {
font_context: RefCell<FontContext>,
applicable_declarations_cache: RefCell<ApplicableDeclarationsCache>,
style_sharing_candidate_cache: RefCell<StyleSharingCandidateCache>,
}
impl HeapSizeOf for LocalLayoutContext {
// FIXME(njn): measure other fields eventually.
fn heap_size_of_children(&self) -> usize {
self.font_context.heap_size_of_children()
}
}
thread_local!(static LOCAL_CONTEXT_KEY: RefCell<Option<Rc<LocalLayoutContext>>> = RefCell::new(None));
pub fn heap_size_of_local_context() -> usize {
LOCAL_CONTEXT_KEY.with(|r| {
r.borrow().clone().map_or(0, |context| context.heap_size_of_children())
})
}
fn create_or_get_local_context(shared_layout_context: &SharedLayoutContext)
-> Rc<LocalLayoutContext> {
LOCAL_CONTEXT_KEY.with(|r| {
let mut r = r.borrow_mut();
if let Some(context) = r.clone() {
if shared_layout_context.screen_size_changed {
context.applicable_declarations_cache.borrow_mut().evict_all();
}
context
} else {
let context = Rc::new(LocalLayoutContext {
font_context: RefCell::new(FontContext::new(shared_layout_context.font_cache_task.clone())),
applicable_declarations_cache: RefCell::new(ApplicableDeclarationsCache::new()),
style_sharing_candidate_cache: RefCell::new(StyleSharingCandidateCache::new()),
});
*r = Some(context.clone());
context
}
})
}
/// Layout information shared among all workers. This must be thread-safe.
pub struct SharedLayoutContext {
/// The shared image cache task.
pub image_cache_task: ImageCacheTask,
/// A channel for the image cache to send responses to.
pub image_cache_sender: ImageCacheChan,
/// The current screen size.
pub screen_size: Size2D<Au>,
/// Screen sized changed?
pub screen_size_changed: bool,
/// A channel up to the constellation.
pub constellation_chan: ConstellationChan,
/// A channel up to the layout task.
pub layout_chan: LayoutChan,
/// Interface to the font cache task.
pub font_cache_task: FontCacheTask,
/// The CSS selector stylist.
///
/// FIXME(#2604): Make this no longer an unsafe pointer once we have fast `RWArc`s.
pub stylist: *const Stylist,
/// The root node at which we're starting the layout.
pub reflow_root: Option<OpaqueNode>,
/// The URL.
pub url: Url,
/// Starts at zero, and increased by one every time a layout completes.
/// This can be used to easily check for invalid stale data.
pub generation: u32,
/// A channel on which new animations that have been triggered by style recalculation can be
/// sent.
pub new_animations_sender: Sender<Animation>,
/// A channel to send canvas renderers to paint task, in order to correctly paint the layers
pub canvas_layers_sender: Sender<(LayerId, IpcSender<CanvasMsg>)>,
/// The visible rects for each layer, as reported to us by the compositor.
pub visible_rects: Arc<HashMap<LayerId, Rect<Au>, DefaultState<FnvHasher>>>,
/// The animations that are currently running.
pub running_animations: Arc<HashMap<OpaqueNode, Vec<Animation>>>,
/// Why is this reflow occurring
pub goal: ReflowGoal,
}
// FIXME(#6569) This implementations is unsound:
// XXX UNSOUND!!! for image_cache_task
// XXX UNSOUND!!! for image_cache_sender
// XXX UNSOUND!!! for constellation_chan
// XXX UNSOUND!!! for layout_chan
// XXX UNSOUND!!! for font_cache_task
// XXX UNSOUND!!! for stylist
// XXX UNSOUND!!! for new_animations_sender
// XXX UNSOUND!!! for canvas_layers_sender
#[allow(unsafe_code)]
unsafe impl Sync for SharedLayoutContext {}
pub struct LayoutContext<'a> {
pub shared: &'a SharedLayoutContext,
cached_local_layout_context: Rc<LocalLayoutContext>,
}
impl<'a> LayoutContext<'a> {
pub fn new(shared_layout_context: &'a SharedLayoutContext) -> LayoutContext<'a> {
let local_context = create_or_get_local_context(shared_layout_context);
LayoutContext {
shared: shared_layout_context,
cached_local_layout_context: local_context,
}
}
#[inline(always)]
pub fn font_context(&self) -> RefMut<FontContext> {
self.cached_local_layout_context.font_context.borrow_mut()
}
#[inline(always)]
pub fn applicable_declarations_cache(&self) -> RefMut<ApplicableDeclarationsCache> {
self.cached_local_layout_context.applicable_declarations_cache.borrow_mut()
}
#[inline(always)]
pub fn style_sharing_candidate_cache(&self) -> RefMut<StyleSharingCandidateCache> {
self.cached_local_layout_context.style_sharing_candidate_cache.borrow_mut()
}
pub fn get_or_request_image(&self, url: Url, use_placeholder: UsePlaceholder)
-> Option<Arc<Image>> {
// See if the image is already available
let result = self.shared.image_cache_task.find_image(url.clone(),
use_placeholder);
match result {
Ok(image) => Some(image),
Err(state) => {
// If we are emitting an output file, then we need to block on
// image load or we risk emitting an output file missing the image.
let is_sync = opts::get().output_file.is_some() ||
opts::get().exit_after_load;
match (state, is_sync) {
// Image failed to load, so just return nothing
(ImageState::LoadError, _) => None,
// Not loaded, test mode - load the image synchronously
(_, true) => {
let (sync_tx, sync_rx) = ipc::channel().unwrap();
self.shared.image_cache_task.request_image(url,
ImageCacheChan(sync_tx),
None);
match sync_rx.recv().unwrap().image_response {
ImageResponse::Loaded(image) |
ImageResponse::PlaceholderLoaded(image) => Some(image),
ImageResponse::None => None,
}
}
// Not yet requested, async mode - request image from the cache
(ImageState::NotRequested, false) => {
self.shared.image_cache_task
.request_image(url, self.shared.image_cache_sender.clone(), None);
None
}
// Image has been requested, is still pending. Return no image
// for this paint loop. When the image loads it will trigger
// a reflow and/or repaint.
(ImageState::Pending, false) => None,
}
}
}
}
}<|fim▁end|> | use script::layout_interface::{Animation, LayoutChan, ReflowGoal};
use std::cell::{RefCell, RefMut};
use std::collections::HashMap; |
<|file_name|>metrics_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import time
import unittest
from http import HTTPStatus
from testing.common import BaseTestCase, TestAppProcess
from fedlearner_webconsole.proto import workflow_definition_pb2
from fedlearner_webconsole.db import db
from fedlearner_webconsole.workflow.models import Workflow
from fedlearner_webconsole.job.models import Job, JobType
from fedlearner_webconsole.job.metrics import JobMetricsBuilder
class JobMetricsBuilderTest(BaseTestCase):
class Config(BaseTestCase.Config):
ES_HOST = ''
ES_PORT = 80
class FollowerConfig(Config):
GRPC_LISTEN_PORT = 4990
def test_data_join_metrics(self):
job = Job(
name='multi-indices-test27',
job_type=JobType.DATA_JOIN)
import json
print(json.dumps(JobMetricsBuilder(job).plot_metrics()))
def test_nn_metrics(self):
job = Job(
name='automl-2782410011',
job_type=JobType.NN_MODEL_TRANINING)
print(JobMetricsBuilder(job).plot_metrics())
def test_peer_metrics(self):
proc = TestAppProcess(
JobMetricsBuilderTest,
'follower_test_peer_metrics',
JobMetricsBuilderTest.FollowerConfig)
proc.start()
self.leader_test_peer_metrics()<|fim▁hole|> proc.terminate()
def leader_test_peer_metrics(self):
self.setup_project(
'leader',
JobMetricsBuilderTest.FollowerConfig.GRPC_LISTEN_PORT)
workflow = Workflow(
name='test-workflow',
project_id=1)
db.session.add(workflow)
db.session.commit()
while True:
resp = self.get_helper(
'/api/v2/workflows/1/peer_workflows'
'/0/jobs/test-job/metrics')
if resp.status_code == HTTPStatus.OK:
break
time.sleep(1)
def follower_test_peer_metrics(self):
self.setup_project(
'follower',
JobMetricsBuilderTest.Config.GRPC_LISTEN_PORT)
workflow = Workflow(
name='test-workflow',
project_id=1,
metric_is_public=True)
workflow.set_job_ids([1])
db.session.add(workflow)
job = Job(
name='automl-2782410011',
job_type=JobType.NN_MODEL_TRANINING,
workflow_id=1,
project_id=1,
config=workflow_definition_pb2.JobDefinition(
name='test-job'
).SerializeToString())
db.session.add(job)
db.session.commit()
while True:
time.sleep(1)
if __name__ == '__main__':
# no es in test env skip this test
# unittest.main()
pass<|fim▁end|> | |
<|file_name|>attachment.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.db import models
<|fim▁hole|>
class Attachment(models.Model):
"""Ticket attachment model."""
ticket = models.ForeignKey(
Ticket, blank=False, related_name='attachments', db_index=True,
on_delete=models.DO_NOTHING)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=False, db_index=True,
on_delete=models.DO_NOTHING)
upload = models.FileField(upload_to='attachments/%Y/%m/%d', max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user accessible attachments.
Ones he has access to through the tickets.
"""
if queryset is None:
queryset = cls.objects
return queryset.filter(ticket__in=Ticket.filter_by_user(user))<|fim▁end|> | from .ticket import Ticket
|
<|file_name|>xkcdplot.py<|end_file_name|><|fim▁begin|>"""
XKCD plot generator
-------------------
Author: Jake Vanderplas
This is a script that will take any matplotlib line diagram, and convert it
to an XKCD-style plot. It will work for plots with line & text elements,
including axes labels and titles (but not axes tick labels).
The idea for this comes from work by Damon McDougall
http://www.mail-archive.com/[email protected]/msg25499.html
from:
http://nbviewer.ipython.org/url/jakevdp.github.com/downloads/notebooks/XKCD_plots.ipynb
"""
import numpy as np
import pylab as pl
from scipy import interpolate, signal
import matplotlib.font_manager as fm
# We need a special font for the code below. It can be downloaded this way:
import os
import urllib2
#import urllib.request as urllib2
if not os.path.exists('Humor-Sans.ttf'):
fhandle = urllib2.urlopen('http://antiyawn.com/uploads/Humor-Sans-1.0.ttf')
open('Humor-Sans.ttf', 'wb').write(fhandle.read())
def xkcd_line(x, y, xlim=None, ylim=None,
mag=1.0, f1=30, f2=0.05, f3=15):
"""
Mimic a hand-drawn line from (x, y) data
Parameters
----------
x, y : array_like
arrays to be modified
xlim, ylim : data range
the assumed plot range for the modification. If not specified,
they will be guessed from the data
mag : float
magnitude of distortions
f1, f2, f3 : int, float, int
filtering parameters. f1 gives the size of the window, f2 gives
the high-frequency cutoff, f3 gives the size of the filter
Returns
-------
x, y : ndarrays
The modified lines
"""
x = np.asarray(x)
y = np.asarray(y)
# get limits for rescaling
if xlim is None:
xlim = (x.min(), x.max())
if ylim is None:
ylim = (y.min(), y.max())
if xlim[1] == xlim[0]:
xlim = ylim
if ylim[1] == ylim[0]:
ylim = xlim
# scale the data
x_scaled = (x - xlim[0]) * 1. / (xlim[1] - xlim[0])
y_scaled = (y - ylim[0]) * 1. / (ylim[1] - ylim[0])
# compute the total distance along the path
dx = x_scaled[1:] - x_scaled[:-1]
dy = y_scaled[1:] - y_scaled[:-1]
dist_tot = np.sum(np.sqrt(dx * dx + dy * dy))
# number of interpolated points is proportional to the distance
Nu = int(200 * dist_tot)
u = np.arange(-1, Nu + 1) * 1. / (Nu - 1)
# interpolate curve at sampled points
k = min(3, len(x) - 1)
res = interpolate.splprep([x_scaled, y_scaled], s=0, k=k)
x_int, y_int = interpolate.splev(u, res[0])
# we'll perturb perpendicular to the drawn line
dx = x_int[2:] - x_int[:-2]
dy = y_int[2:] - y_int[:-2]
dist = np.sqrt(dx * dx + dy * dy)
# create a filtered perturbation
coeffs = mag * np.random.normal(0, 0.01, len(x_int) - 2)
b = signal.firwin(f1, f2 * dist_tot, window=('kaiser', f3))
response = signal.lfilter(b, 1, coeffs)
x_int[1:-1] += response * dy / dist
y_int[1:-1] += response * dx / dist
# un-scale data
x_int = x_int[1:-1] * (xlim[1] - xlim[0]) + xlim[0]
y_int = y_int[1:-1] * (ylim[1] - ylim[0]) + ylim[0]
return x_int, y_int
def XKCDify(ax, mag=1.0,
f1=50, f2=0.01, f3=15,
bgcolor='w',
xaxis_loc=None,
yaxis_loc=None,
xaxis_arrow='+',
yaxis_arrow='+',
ax_extend=0.1,
expand_axes=False):
"""Make axis look hand-drawn
This adjusts all lines, text, legends, and axes in the figure to look
like xkcd plots. Other plot elements are not modified.
Parameters
----------
ax : Axes instance
the axes to be modified.
mag : float
the magnitude of the distortion
f1, f2, f3 : int, float, int
filtering parameters. f1 gives the size of the window, f2 gives
the high-frequency cutoff, f3 gives the size of the filter
xaxis_loc, yaxis_log : float
The locations to draw the x and y axes. If not specified, they
will be drawn from the bottom left of the plot
xaxis_arrow, yaxis_arrow : str
where to draw arrows on the x/y axes. Options are '+', '-', '+-', or ''
ax_extend : float
How far (fractionally) to extend the drawn axes beyond the original
axes limits
expand_axes : bool
if True, then expand axes to fill the figure (useful if there is only
a single axes in the figure)
"""
# Get axes aspect
ext = ax.get_window_extent().extents
aspect = (ext[3] - ext[1]) / (ext[2] - ext[0])
xlim = ax.get_xlim()
ylim = ax.get_ylim()
xspan = xlim[1] - xlim[0]
yspan = ylim[1] - xlim[0]
<|fim▁hole|> xax_lim = (xlim[0] - ax_extend * xspan,
xlim[1] + ax_extend * xspan)
yax_lim = (ylim[0] - ax_extend * yspan,
ylim[1] + ax_extend * yspan)
if xaxis_loc is None:
xaxis_loc = ylim[0]
if yaxis_loc is None:
yaxis_loc = xlim[0]
# Draw axes
xaxis = pl.Line2D([xax_lim[0], xax_lim[1]], [xaxis_loc, xaxis_loc],
linestyle='-', color='k')
yaxis = pl.Line2D([yaxis_loc, yaxis_loc], [yax_lim[0], yax_lim[1]],
linestyle='-', color='k')
# Label axes3, 0.5, 'hello', fontsize=14)
ax.text(xax_lim[1], xaxis_loc - 0.02 * yspan, ax.get_xlabel(),
fontsize=14, ha='right', va='top', rotation=12)
ax.text(yaxis_loc - 0.02 * xspan, yax_lim[1], ax.get_ylabel(),
fontsize=14, ha='right', va='top', rotation=78)
ax.set_xlabel('')
ax.set_ylabel('')
# Add title
ax.text(0.5 * (xax_lim[1] + xax_lim[0]), yax_lim[1],
ax.get_title(),
ha='center', va='bottom', fontsize=16)
ax.set_title('')
Nlines = len(ax.lines)
lines = [xaxis, yaxis] + [ax.lines.pop(0) for i in range(Nlines)]
for line in lines:
x, y = line.get_data()
x_int, y_int = xkcd_line(x, y, xlim, ylim,
mag, f1, f2, f3)
# create foreground and background line
lw = line.get_linewidth()
line.set_linewidth(2 * lw)
line.set_data(x_int, y_int)
# don't add background line for axes
if (line is not xaxis) and (line is not yaxis):
line_bg = pl.Line2D(x_int, y_int, color=bgcolor,
linewidth=8 * lw)
ax.add_line(line_bg)
ax.add_line(line)
# Draw arrow-heads at the end of axes lines
arr1 = 0.03 * np.array([-1, 0, -1])
arr2 = 0.02 * np.array([-1, 0, 1])
arr1[::2] += np.random.normal(0, 0.005, 2)
arr2[::2] += np.random.normal(0, 0.005, 2)
x, y = xaxis.get_data()
if '+' in str(xaxis_arrow):
ax.plot(x[-1] + arr1 * xspan * aspect,
y[-1] + arr2 * yspan,
color='k', lw=2)
if '-' in str(xaxis_arrow):
ax.plot(x[0] - arr1 * xspan * aspect,
y[0] - arr2 * yspan,
color='k', lw=2)
x, y = yaxis.get_data()
if '+' in str(yaxis_arrow):
ax.plot(x[-1] + arr2 * xspan * aspect,
y[-1] + arr1 * yspan,
color='k', lw=2)
if '-' in str(yaxis_arrow):
ax.plot(x[0] - arr2 * xspan * aspect,
y[0] - arr1 * yspan,
color='k', lw=2)
# Change all the fonts to humor-sans.
prop = fm.FontProperties(fname='Humor-Sans.ttf', size=16)
for text in ax.texts:
text.set_fontproperties(prop)
# modify legend
leg = ax.get_legend()
if leg is not None:
leg.set_frame_on(False)
for child in leg.get_children():
if isinstance(child, pl.Line2D):
x, y = child.get_data()
child.set_data(xkcd_line(x, y, mag=10, f1=100, f2=0.001))
child.set_linewidth(2 * child.get_linewidth())
if isinstance(child, pl.Text):
child.set_fontproperties(prop)
# Set the axis limits
ax.set_xlim(xax_lim[0] - 0.1 * xspan,
xax_lim[1] + 0.1 * xspan)
ax.set_ylim(yax_lim[0] - 0.1 * yspan,
yax_lim[1] + 0.1 * yspan)
# adjust the axes
ax.set_xticks([])
ax.set_yticks([])
if expand_axes:
ax.figure.set_facecolor(bgcolor)
ax.set_axis_off()
ax.set_position([0, 0, 1, 1])
return ax<|fim▁end|> | |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""<|fim▁end|> | """
Contains exception classes specific to this project. |
<|file_name|>ObservationSequences.cpp<|end_file_name|><|fim▁begin|>#include "ObservationSequences.h"
#include <iostream>
#include <fstream>
#include <algorithm>
#include "boost\filesystem.hpp"
using namespace std;
namespace bst = boost::filesystem;
ObservationSequences::~ObservationSequences()
{
}
ObservationSequences::ObservationSequences(std::string folderName)
{
noOfFiles = 0;
noOfTrainingFiles = 0;
noOfScoringFiles = 0;
this->malwareFamilyName = folderName;
}
void ObservationSequences::getFileList() {
string folderName = "D:/Aditya/CS_266/Project/Dataset/" + this->malwareFamilyName + "/";
cout << folderName << endl;
bst::path p(folderName);
for (auto i = bst::directory_iterator(p); i != bst::directory_iterator(); i++)
{
if (!bst::is_directory(i->path()))
{
string fullFileName = folderName + i->path().filename().string();
this->fileNameList.push_back(fullFileName);
}
}
}
void ObservationSequences::getFileStream()
{
this->noOfFiles = this->fileNameList.size();
for (int index = 0; index < this->noOfFiles; index++)
{
vector<int> tempFileStream;
vector<string> opCodeStream;
string tempFileName = this->fileNameList.at(index);
ifstream tempReadFile;
tempReadFile.open(tempFileName);
string line;
while (getline(tempReadFile, line))
{
int opCodeIndex = find(this->distinctOpCodesList.begin(), this->distinctOpCodesList.end(), line) - this->distinctOpCodesList.begin();
int endIndex = this->distinctOpCodesList.size();
if (opCodeIndex != endIndex)
{
tempFileStream.push_back(opCodeIndex);
}
else
{
this->distinctOpCodesList.push_back(line);
int newOpCodeIndex = this->distinctOpCodesList.size()-1;
tempFileStream.push_back(newOpCodeIndex);
}
opCodeStream.push_back(line);
}
this->obsSequenceList.push_back(tempFileStream);
}
cout << this->distinctOpCodesList.size();<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use mysql;
use std::{error, fmt, result};
<|fim▁hole|>pub enum Error {
Mysql(mysql::Error),
RecordNotFound(u64),
ColumnNotFound,
AddressChecksumToTrits,
}
pub type Result<T> = result::Result<T, Error>;
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Mysql(ref err) => write!(f, "MySQL error: {}", err),
Error::RecordNotFound(id) => write!(f, "Record not found ({})", id),
Error::ColumnNotFound => write!(f, "Column not found"),
Error::AddressChecksumToTrits => {
write!(f, "can't convert address checksum to trits")
}
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Mysql(ref err) => err.description(),
Error::RecordNotFound(_) => "Record not found",
Error::ColumnNotFound => "Column not found",
Error::AddressChecksumToTrits => "Can't convert to trits",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Mysql(ref err) => Some(err),
Error::RecordNotFound(_) |
Error::ColumnNotFound |
Error::AddressChecksumToTrits => None,
}
}
}
impl From<mysql::Error> for Error {
fn from(err: mysql::Error) -> Error {
Error::Mysql(err)
}
}<|fim▁end|> | #[derive(Debug)] |
<|file_name|>test_vpc_network_pfrules.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Component tests for VPC network functionality - Port Forwarding Rules.
"""
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.base import (stopRouter,
startRouter,
Account,
VpcOffering,
VPC,
ServiceOffering,
NATRule,
NetworkACL,
PublicIPAddress,
NetworkOffering,
Network,
VirtualMachine,
LoadBalancerRule)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_routers)
from marvin.lib.utils import cleanup_resources
import socket
import time
import sys
class Services:
"""Test VPC network services - Port Forwarding Rules Test Data Class.
"""<|fim▁hole|> "account": {
"email": "[email protected]",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"host1": None,
"host2": None,
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
},
"network_offering": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"network_offering_no_lb": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"vpc_offering": {
"name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
},
"vpc": {
"name": "TestVPC",
"displaytext": "TestVPC",
"cidr": '10.0.0.1/24'
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0'
},
"lbrule": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2222,
"openfirewall": False,
"startport": 22,
"endport": 2222,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"lbrule_http": {
"name": "HTTP",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 80,
"publicport": 8888,
"openfirewall": False,
"startport": 80,
"endport": 8888,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"natrule": {
"privateport": 22,
"publicport": 22,
"startport": 22,
"endport": 22,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"http_rule": {
"privateport": 80,
"publicport": 80,
"startport": 80,
"endport": 80,
"cidrlist": '0.0.0.0/0',
"protocol": "TCP"
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
# "hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
"timeout": 10,
}
class TestVPCNetworkPFRules(cloudstackTestCase):
@classmethod
def setUpClass(cls):
# We want to fail quicker if it's failure
socket.setdefaulttimeout(60)
cls.testClient = super(TestVPCNetworkPFRules, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
self.debug("Creating a VPC offering..")
self.vpc_off = VpcOffering.create(
self.apiclient,
self.services["vpc_offering"]
)
self.debug("Enabling the VPC offering created")
self.vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating a VPC network in the account: %s" % self.account.name)
self.services["vpc"]["cidr"] = '10.1.1.1/16'
self.vpc = VPC.create(
self.apiclient,
self.services["vpc"],
vpcofferingid=self.vpc_off.id,
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
self.debug("Warning: Exception during cleanup : %s" % e)
return
def get_vpcrouter(self):
routers = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
)
self.assertEqual(isinstance(routers, list),
True,
"Check for list routers response return valid data"
)
self.assertNotEqual(len(routers),
0,
"Check list router response"
)
router = routers[0]
return router
def stop_vpcrouter(self):
router = self.get_vpcrouter()
self.debug("Stopping router ID: %s" % router.id)
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
routers = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
)
self.assertEqual(isinstance(routers, list),
True,
"Check for list routers response return valid data"
)
router = routers[0]
self.assertEqual(router.state,
'Stopped',
"Check list router response for router state"
)
return router
def start_vpcrouter(self, router):
# Start the VPC Router
self.debug("Starting router ID: %s" % router.id)
cmd = startRouter.startRouterCmd()
cmd.id = router.id
self.apiclient.startRouter(cmd)
routers = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
zoneid=self.zone.id
)
self.assertEqual(isinstance(routers, list),
True,
"Check for list routers response return valid data"
)
router = routers[0]
self.assertEqual(router.state,
'Running',
"Check list router response for router state"
)
def check_ssh_into_vm(self, vm, public_ip, testnegative=False):
self.debug("Checking if we can SSH into VM=%s on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
try:
vm.get_ssh_client(ipaddress=public_ip.ipaddress.ipaddress)
if not testnegative:
self.debug("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
else:
self.fail("SSH into VM=%s on public_ip=%s is successfully" % (vm.name, public_ip.ipaddress.ipaddress))
except:
if not testnegative:
self.fail("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
else:
self.debug("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
def check_wget_from_vm(self, vm, public_ip, network=None, testnegative=False, isVmAccessible=True):
import urllib
self.debug("Checking if we can wget from a VM=%s http server on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
try:
if not isVmAccessible:
self.create_natrule(vm, public_ip, network)
self.setup_webserver(vm)
urllib.urlretrieve("http://%s/test.html" % public_ip.ipaddress.ipaddress, filename="test.html")
if not testnegative:
self.debug("Successesfull to wget from VM=%s http server on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
else:
self.fail("Successesfull to wget from VM=%s http server on public_ip=%s" % (vm.name, public_ip.ipaddress.ipaddress))
except Exception as e:
if not testnegative:
self.fail("Failed to wget from VM=%s http server on public_ip=%s: %s" % (vm.name, public_ip.ipaddress.ipaddress, e))
else:
self.debug("Failed to wget from VM=%s http server on public_ip=%s: %s" % (vm.name, public_ip.ipaddress.ipaddress, e))
def setup_webserver(self, vm):
# Start httpd service on VM first
sshClient = vm.get_ssh_client()
# Test to see if we are on a tiny linux box (using busybox)
res = str(sshClient.execute("busybox")).lower()
if "hexdump" in res:
self.setup_busybox(sshClient)
else:
self.setup_apache(sshClient)
def setup_busybox(self, sshClient):
""" Create a dummy test.html file and fire up the busybox web server """
sshClient.execute('echo test > test.html')
sshClient.execute("/usr/sbin/httpd")
self.debug("Setup webserver using busybox")
def setup_apache(self, sshClient):
sshClient.execute("service httpd start")
time.sleep(5)
ssh_response = str(sshClient.execute("service httpd status")).lower()
self.debug("httpd service status is: %s" % ssh_response)
if "httpd: unrecognized service" in ssh_response or "inactive" in ssh_response:
ssh_res = sshClient.execute("yum install httpd -y")
if "Complete!" not in ssh_res:
raise Exception("Failed to install http server")
sshClient.execute("service httpd start")
time.sleep(5)
ssh_response = str(sshClient.execute("service httpd status")).lower()
if not "running" in ssh_response:
raise Exception("Failed to start httpd service")
self.debug("Setup webserver using apache")
def create_natrule(self, vm, public_ip, network, services=None):
self.debug("Creating NAT rule in network for vm with public IP")
if not services:
services = self.services["natrule"]
nat_rule = NATRule.create(self.apiclient,
vm,
services,
ipaddressid=public_ip.ipaddress.id,
openfirewall=False,
networkid=network.id,
vpcid=self.vpc.id
)
self.debug("Adding NetworkACL rules to make NAT rule accessible")
nwacl_nat = NetworkACL.create(self.apiclient,
networkid=network.id,
services=services,
traffictype='Ingress'
)
self.debug('nwacl_nat=%s' % nwacl_nat.__dict__)
return nat_rule
def acquire_publicip(self, network):
self.debug("Associating public IP for network: %s" % network.name)
public_ip = PublicIPAddress.create(self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=network.id,
vpcid=self.vpc.id
)
self.debug("Associated %s with network %s" % (public_ip.ipaddress.ipaddress,
network.id
))
return public_ip
def create_vpc(self, cidr='10.1.2.1/16'):
self.debug("Creating a VPC offering..")
self.services["vpc_offering"]["name"] = self.services["vpc_offering"]["name"] + str(cidr)
vpc_off = VpcOffering.create(
self.apiclient,
self.services["vpc_offering"]
)
self._cleanup.append(vpc_off)
self.debug("Enabling the VPC offering created")
vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating a VPC network in the account: %s" % self.account.name)
self.services["vpc"]["cidr"] = cidr
vpc = VPC.create(
self.apiclient,
self.services["vpc"],
vpcofferingid=vpc_off.id,
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
return vpc
def create_network(self, net_offerring, gateway='10.1.1.1',vpc=None):
try:
self.debug('Create NetworkOffering')
net_offerring["name"] = "NET_OFF-" + str(gateway)
nw_off = NetworkOffering.create(self.apiclient,
net_offerring,
conservemode=False
)
# Enable Network offering
nw_off.update(self.apiclient, state='Enabled')
self._cleanup.append(nw_off)
self.debug('Created and Enabled NetworkOffering')
self.services["network"]["name"] = "NETWORK-" + str(gateway)
self.debug('Adding Network=%s' % self.services["network"])
obj_network = Network.create(self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=nw_off.id,
zoneid=self.zone.id,
gateway=gateway,
vpcid=vpc.id if vpc else self.vpc.id
)
self.debug("Created network with ID: %s" % obj_network.id)
return obj_network
except Exception, e:
self.fail('Unable to create a Network with offering=%s because of %s ' % (net_offerring, e))
def deployvm_in_network(self, network, host_id=None):
try:
self.debug('Creating VM in network=%s' % network.name)
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(network.id)],
hostid=host_id
)
self.debug('Created VM=%s in network=%s' % (vm.id, network.name))
return vm
except:
self.fail('Unable to create VM in a Network=%s' % network.name)
def create_lbrule(self, public_ip, network, vmarray, services=None):
self.debug("Creating LB rule for IP address: %s" %
public_ip.ipaddress.ipaddress)
objservices = None
if services:
objservices = services
else:
objservices = self.services["lbrule"]
lb_rule = LoadBalancerRule.create(
self.apiclient,
objservices,
ipaddressid=public_ip.ipaddress.id,
accountid=self.account.name,
networkid=network.id,
vpcid=self.vpc.id,
domainid=self.account.domainid
)
self.debug("Adding virtual machines %s and %s to LB rule" % (vmarray))
lb_rule.assign(self.apiclient, vmarray)
return lb_rule
def open_egress_to_world(self, network):
self.debug("Adding Egress rules to network %s and %s to allow access to internet" % (network.name,self.services["http_rule"]))
nwacl_internet_1 = NetworkACL.create(
self.apiclient,
networkid=network.id,
services=self.services["http_rule"],
traffictype='Ingress'
)
return nwacl_internet_1
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_01_network_services_VPC_StopCreatePF(self):
""" Test : Create VPC PF rules on acquired public ip when VpcVirtualRouter is stopped
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Stop the VPC Virtual Router.
# 6. Use the Create PF rule for vm in network1.
# 7. Start VPC Virtual Router.
# 8. Successfully ssh into the Guest VM using the PF rule
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
#ensure vm is accessible over public ip
nat_rule = self.create_natrule(vm_1, public_ip_1, network_1)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
#remove the nat rule
nat_rule.delete(self.apiclient)
router = self.stop_vpcrouter()
#recreate nat rule
self.create_natrule(vm_1, public_ip_1, network_1)
self.start_vpcrouter(router)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_02_network_services_VPC_CreatePF(self):
""" Test Create VPC PF rules on acquired public ip when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
self.create_natrule( vm_1, public_ip_1, network_1)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_03_network_services_VPC_StopCreateMultiplePF(self):
""" Test Create multiple VPC PF rules on acquired public ip in diff't networks when VpcVirtualRouter is stopped
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Add network2(10.1.2.1/24) using N01 to this VPC.
# 5. Deploy vm1 in network1.
# 6. Deploy vm2 in network2.
# 7. Stop the VPC Virtual Router.
# 8. Use the Create PF rule for vm1 in network1.
# 9. Use the Create PF rule for vm2 in network2.
# 10. Start VPC Virtual Router.
# 11. Successfully ssh into the Guest VM1 and VM2 using the PF rule
network_1 = self.create_network(self.services["network_offering_no_lb"])
network_2 = self.create_network(self.services["network_offering_no_lb"], '10.1.2.1')
vm_1 = self.deployvm_in_network(network_1)
vm_2 = self.deployvm_in_network(network_2)
# wait until VM is up before stop the VR
time.sleep(120)
public_ip_1 = self.acquire_publicip(network_1)
public_ip_2 = self.acquire_publicip(network_2)
router = self.stop_vpcrouter()
self.create_natrule(vm_1, public_ip_1, network_1)
self.create_natrule(vm_2, public_ip_2, network_2)
self.start_vpcrouter(router)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=False)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_04_network_services_VPC_CreateMultiplePF(self):
""" Test Create multiple VPC PF rules on acquired public ip in diff't networks when VpcVirtualRouter is running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Add network2(10.1.2.1/24) using N01 to this VPC.
# 5. Deploy vm1 in network1.
# 6. Deploy vm2 in network2.
# 7. Use the Create PF rule for vm1 in network1.
# 8. Use the Create PF rule for vm2 in network2.
# 9. Successfully ssh into the Guest VM1 and VM2 using the PF rule
network_1 = self.create_network(self.services["network_offering"])
network_2 = self.create_network(self.services["network_offering_no_lb"], '10.1.2.1')
vm_1 = self.deployvm_in_network(network_1)
vm_2 = self.deployvm_in_network(network_2)
public_ip_1 = self.acquire_publicip(network_1)
public_ip_2 = self.acquire_publicip(network_2)
self.create_natrule(vm_1, public_ip_1, network_1)
self.create_natrule(vm_2, public_ip_2, network_2)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=False)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_05_network_services_VPC_StopDeletePF(self):
""" Test delete a PF rule in VPC when VpcVirtualRouter is Stopped
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule.
# 7. Successfully wget a file on http server of VM1.
# 8. Stop the VPC Virtual Router.
# 9. Delete internet PF rule
# 10. Start VPC Virtual Router.
# 11. wget a file present on http server of VM1 should fail
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
self.create_natrule(vm_1, public_ip_1, network_1)
http_rule = self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
router = self.stop_vpcrouter()
http_rule.delete(self.apiclient)
self.start_vpcrouter(router)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_06_network_services_VPC_DeletePF(self):
""" Test delete a PF rule in VPC when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule.
# 7. Successfully wget a file on http server of VM1.
# 9. Delete internet PF rule
# 10. wget a file present on http server of VM1 should fail
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
self.create_natrule(vm_1, public_ip_1, network_1)
http_rule=self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
http_rule.delete(self.apiclient)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_07_network_services_VPC_StopDeleteAllPF(self):
""" Test delete all PF rules in VPC when VpcVirtualRouter is Stopped
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule.
# 7. Successfully wget a file on http server of VM1.
# 8. Stop the VPC Virtual Router.
# 9. Delete all PF rule
# 10. Start VPC Virtual Router.
# 11. wget a file present on http server of VM1 should fail
# 12. ssh into Guest VM using the PF rule should fail
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
nat_rule = self.create_natrule(vm_1, public_ip_1, network_1)
http_rule = self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
router = self.stop_vpcrouter()
http_rule.delete(self.apiclient)
nat_rule.delete(self.apiclient)
self.start_vpcrouter(router)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=True)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True,
isVmAccessible=False, network=network_1)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_08_network_services_VPC_DeleteAllPF(self):
""" Test delete all PF rules in VPC when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Create a Network offering - NO1 with all supported services
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Deploy vm1 in network1.
# 5. Use the Create PF rule for vm in network1.
# 6. Successfully ssh into the Guest VM using the PF rule.
# 7. Successfully wget a file on http server of VM1.
# 8. Delete all PF rule
# 9. wget a file present on http server of VM1 should fail
# 10. ssh into Guest VM using the PF rule should fail
network_1 = self.create_network(self.services["network_offering"])
vm_1 = self.deployvm_in_network(network_1)
public_ip_1 = self.acquire_publicip(network_1)
nat_rule = self.create_natrule(vm_1, public_ip_1, network_1)
http_rule = self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
http_rule.delete(self.apiclient)
nat_rule.delete(self.apiclient)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=True)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True,
isVmAccessible=False, network=network_1)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_09_network_services_VPC_StopDeleteAllMultiplePF(self):
""" Test delete all PF rules in VPC across multiple networks when VpcVirtualRouter is Stopped
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16.
# 2. Create a Network offering - NO1 with all supported services.
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Add network2(10.1.2.1/24) using N01 to this VPC.
# 5. Deploy vm1 and vm2 in network1.
# 6. Deploy vm3 and vm4 in network2.
# 7. Use the Create PF rule ssh and http for vm1 and vm2 in network1.
# 8. Use the Create PF rule ssh and http for vm3 and vm4 in network2.
# 9. Successfully ssh into the Guest vm1, vm2, vm3 and vm4 using the PF rule.
# 10. Succesfully wget a file from http server present on vm1, vm2, vm3 and vm4.
# 11. Stop VPC Virtual Router.
# 12. Delete all PF rultes for vm1, vm2, vm3 and vm4.
# 12. Start VPC Virtual Router.
# 13. Fail to ssh and http to vm1, vm2, vm3 and vm4.
network_1 = self.create_network(self.services["network_offering"])
network_2 = self.create_network(self.services["network_offering_no_lb"], '10.1.2.1')
vm_1 = self.deployvm_in_network(network_1)
vm_2 = self.deployvm_in_network(network_1)
vm_3 = self.deployvm_in_network(network_2)
vm_4 = self.deployvm_in_network(network_2)
public_ip_1 = self.acquire_publicip(network_1)
public_ip_2 = self.acquire_publicip(network_1)
nat_rule1 = self.create_natrule(vm_1, public_ip_1, network_1)
nat_rule2 = self.create_natrule(vm_2, public_ip_2, network_1)
http_rule1 = self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
http_rule2 = self.create_natrule(vm_2, public_ip_2, network_1, self.services["http_rule"])
public_ip_3 = self.acquire_publicip(network_2)
public_ip_4 = self.acquire_publicip(network_2)
nat_rule3 = self.create_natrule(vm_3, public_ip_3, network_2)
nat_rule4 = self.create_natrule(vm_4, public_ip_4, network_2)
http_rule3 = self.create_natrule(vm_3, public_ip_3, network_2, self.services["http_rule"])
http_rule4 = self.create_natrule(vm_4, public_ip_4, network_2, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=False)
self.check_ssh_into_vm(vm_3, public_ip_3, testnegative=False)
self.check_ssh_into_vm(vm_4, public_ip_4, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_2, public_ip_2, testnegative=False)
self.check_wget_from_vm(vm_3, public_ip_3, testnegative=False)
self.check_wget_from_vm(vm_4, public_ip_4, testnegative=False)
router = self.stop_vpcrouter()
nat_rule1.delete(self.apiclient)
nat_rule2.delete(self.apiclient)
nat_rule3.delete(self.apiclient)
nat_rule4.delete(self.apiclient)
http_rule1.delete(self.apiclient)
http_rule2.delete(self.apiclient)
http_rule3.delete(self.apiclient)
http_rule4.delete(self.apiclient)
self.start_vpcrouter(router)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=True)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=True)
self.check_ssh_into_vm(vm_3, public_ip_3, testnegative=True)
self.check_ssh_into_vm(vm_4, public_ip_4, testnegative=True)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True,
isVmAccessible=False, network=network_1)
self.check_wget_from_vm(vm_2, public_ip_2, testnegative=True,
isVmAccessible=False, network=network_1)
self.check_wget_from_vm(vm_3, public_ip_3, testnegative=True,
isVmAccessible=False, network=network_2)
self.check_wget_from_vm(vm_4, public_ip_4, testnegative=True,
isVmAccessible=False, network=network_2)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_10_network_services_VPC_DeleteAllMultiplePF(self):
""" Test delete all PF rules in VPC across multiple networks when VpcVirtualRouter is Running
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16.
# 2. Create a Network offering - NO1 with all supported services.
# 3. Add network1(10.1.1.1/24) using N01 to this VPC.
# 4. Add network2(10.1.2.1/24) using N01 to this VPC.
# 5. Deploy vm1 and vm2 in network1.
# 6. Deploy vm3 and vm4 in network2.
# 7. Use the Create PF rule ssh and http for vm1 and vm2 in network1.
# 8. Use the Create PF rule ssh and http for vm3 and vm4 in network2.
# 9. Successfully ssh into the Guest vm1, vm2, vm3 and vm4 using the PF rule.
# 10. Succesfully wget a file from http server present on vm1, vm2, vm3 and vm4.
# 12. Delete all PF rultes for vm1, vm2, vm3 and vm4.
# 13. Fail to ssh and http to vm1, vm2, vm3 and vm4.
network_1 = self.create_network(self.services["network_offering"])
network_2 = self.create_network(self.services["network_offering_no_lb"], '10.1.2.1')
vm_1 = self.deployvm_in_network(network_1)
vm_2 = self.deployvm_in_network(network_1)
vm_3 = self.deployvm_in_network(network_2)
vm_4 = self.deployvm_in_network(network_2)
public_ip_1 = self.acquire_publicip(network_1)
public_ip_2 = self.acquire_publicip(network_1)
nat_rule1 = self.create_natrule(vm_1, public_ip_1, network_1)
nat_rule2 = self.create_natrule(vm_2, public_ip_2, network_1)
http_rule1 = self.create_natrule(vm_1, public_ip_1, network_1, self.services["http_rule"])
http_rule2 = self.create_natrule(vm_2, public_ip_2, network_1, self.services["http_rule"])
public_ip_3 = self.acquire_publicip(network_2)
public_ip_4 = self.acquire_publicip(network_2)
nat_rule3 = self.create_natrule(vm_3, public_ip_3, network_2)
nat_rule4 = self.create_natrule(vm_4, public_ip_4, network_2)
http_rule3 = self.create_natrule(vm_3, public_ip_3, network_2, self.services["http_rule"])
http_rule4 = self.create_natrule(vm_4, public_ip_4, network_2, self.services["http_rule"])
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=False)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=False)
self.check_ssh_into_vm(vm_3, public_ip_3, testnegative=False)
self.check_ssh_into_vm(vm_4, public_ip_4, testnegative=False)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=False)
self.check_wget_from_vm(vm_2, public_ip_2, testnegative=False)
self.check_wget_from_vm(vm_3, public_ip_3, testnegative=False)
self.check_wget_from_vm(vm_4, public_ip_4, testnegative=False)
nat_rule1.delete(self.apiclient)
nat_rule2.delete(self.apiclient)
nat_rule3.delete(self.apiclient)
nat_rule4.delete(self.apiclient)
http_rule1.delete(self.apiclient)
http_rule2.delete(self.apiclient)
http_rule3.delete(self.apiclient)
http_rule4.delete(self.apiclient)
self.check_ssh_into_vm(vm_1, public_ip_1, testnegative=True)
self.check_ssh_into_vm(vm_2, public_ip_2, testnegative=True)
self.check_ssh_into_vm(vm_3, public_ip_3, testnegative=True)
self.check_ssh_into_vm(vm_4, public_ip_4, testnegative=True)
self.check_wget_from_vm(vm_1, public_ip_1, testnegative=True,
isVmAccessible=False, network=network_1)
self.check_wget_from_vm(vm_2, public_ip_2, testnegative=True,
isVmAccessible=False, network=network_1)
self.check_wget_from_vm(vm_3, public_ip_3, testnegative=True,
isVmAccessible=False, network=network_2)
self.check_wget_from_vm(vm_4, public_ip_4, testnegative=True,
isVmAccessible=False, network=network_2)
return<|fim▁end|> |
def __init__(self):
self.services = { |
<|file_name|>try_catch.rs<|end_file_name|><|fim▁begin|>/// Wraps a computation with an RAII-allocated Nan::TryCatch.<|fim▁hole|><|fim▁end|> | pub use neon_sys::Neon_TryCatch_With as with;
pub use neon_sys::TryCatchControl; |
<|file_name|>books.server.controller.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors'),
Book = mongoose.model('Book'),
_ = require('lodash');
// ,
// googleapi = require('node-google-api')('AIzaSyAffzxPYpgZ14gieEE04_u4U-5Y26UQ8_0');
// exports.gbooks = function(req, res) {
// googleapi.build(function(api) {
// for(var k in api){
// console.log(k);
// }
// });
// var favoriteslist = req.favoriteslist;
// favoriteslist.googleapi.build(function(err, api){
// })
// googleapi.build(function(api) {
// api.books.mylibrary.bookshelves.list({
// userId: '114705319517394488779',
// source: 'gbs_lp_bookshelf_list'
// }, function(result){
// if(result.error) {
// console.log(result.error);
// } else {
// for(var i in result.items) {
// console.log(result.items[i].summary);
// }
// }
// });
// });
// };
/**
* Create a Book
*/
exports.create = function(req, res) {
var book = new Book(req.body);
book.user = req.user;
book.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(book);
}
});
};
/**
* Show the current Book
*/
exports.read = function(req, res) {
res.jsonp(req.book);
};
/**
* Update a Book
*/
exports.update = function(req, res) {
var book = req.book ;
book = _.extend(book , req.body);
book.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(book);
}
});
};
/**
* Delete an Book
*/
exports.delete = function(req, res) {
var book = req.book ;
book.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(book);
}
});
};
/**
* List of Books
*/
exports.list = function(req, res) { Book.find().sort('-created').populate('user', 'displayName').exec(function(err, books) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(books);
}
});
};
/**
* Book middleware
*/
exports.bookByID = function(req, res, next, id) { Book.findById(id).populate('user', 'displayName').exec(function(err, book) {
if (err) return next(err);
if (! book) return next(new Error('Failed to load Book ' + id));<|fim▁hole|>
/**
* Book authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.book.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};<|fim▁end|> | req.book = book ;
next();
});
}; |
<|file_name|>tree.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A general b-tree structure suitable for ropes and the like.
use std::cmp::{min, Ordering};
use std::marker::PhantomData;
use std::sync::Arc;
use crate::interval::{Interval, IntervalBounds};
const MIN_CHILDREN: usize = 4;
const MAX_CHILDREN: usize = 8;
pub trait NodeInfo: Clone {
/// The type of the leaf.
///
/// A given `NodeInfo` is for exactly one type of leaf. That is why
/// the leaf type is an associated type rather than a type parameter.
type L: Leaf;
/// An operator that combines info from two subtrees. It is intended
/// (but not strictly enforced) that this operator be associative and
/// obey an identity property. In mathematical terms, the accumulate
/// method is the operation of a monoid.
fn accumulate(&mut self, other: &Self);
/// A mapping from a leaf into the info type. It is intended (but
/// not strictly enforced) that applying the accumulate method to
/// the info derived from two leaves gives the same result as
/// deriving the info from the concatenation of the two leaves. In
/// mathematical terms, the compute_info method is a monoid
/// homomorphism.
fn compute_info(_: &Self::L) -> Self;
/// The identity of the monoid. Need not be implemented because it
/// can be computed from the leaf default.
///
/// This is here to demonstrate that this is a monoid.
fn identity() -> Self {
Self::compute_info(&Self::L::default())
}
/// The interval covered by the first `len` base units of this node. The
/// default impl is sufficient for most types, but interval trees may need
/// to override it.
fn interval(&self, len: usize) -> Interval {
Interval::new(0, len)
}
}
/// A trait indicating the default metric of a NodeInfo.
///
/// Adds quality of life functions to
/// Node\<N\>, where N is a DefaultMetric.
/// For example, [Node\<DefaultMetric\>.count](struct.Node.html#method.count).
pub trait DefaultMetric: NodeInfo {
type DefaultMetric: Metric<Self>;
}
/// A trait for the leaves of trees of type [Node](struct.Node.html).
///
/// Two leafs can be concatenated using `push_maybe_split`.
pub trait Leaf: Sized + Clone + Default {
/// Measurement of leaf in base units.
/// A 'base unit' refers to the smallest discrete unit
/// by which a given concrete type can be indexed.
/// Concretely, for Rust's String type the base unit is the byte.
fn len(&self) -> usize;
/// Generally a minimum size requirement for leaves.
fn is_ok_child(&self) -> bool;
/// Combine the part `other` denoted by the `Interval` `iv` into `self`,
/// optionly splitting off a new `Leaf` if `self` would have become too big.
/// Returns either `None` if no splitting was needed, or `Some(rest)` if
/// `rest` was split off.
///
/// Interval is in "base units". Generally implements a maximum size.
///
/// # Invariants:
/// - If one or the other input is empty, then no split.
/// - If either input satisfies `is_ok_child`, then, on return, `self`
/// satisfies this, as does the optional split.
fn push_maybe_split(&mut self, other: &Self, iv: Interval) -> Option<Self>;
/// Same meaning as push_maybe_split starting from an empty
/// leaf, but maybe can be implemented more efficiently?
///
// TODO: remove if it doesn't pull its weight
fn subseq(&self, iv: Interval) -> Self {
let mut result = Self::default();
if result.push_maybe_split(self, iv).is_some() {
panic!("unexpected split");
}
result
}
}
/// A b-tree node storing leaves at the bottom, and with info
/// retained at each node. It is implemented with atomic reference counting
/// and copy-on-write semantics, so an immutable clone is a very cheap
/// operation, and nodes can be shared across threads. Even so, it is
/// designed to be updated in place, with efficiency similar to a mutable
/// data structure, using uniqueness of reference count to detect when
/// this operation is safe.
///
/// When the leaf is a string, this is a rope data structure (a persistent
/// rope in functional programming jargon). However, it is not restricted
/// to strings, and it is expected to be the basis for a number of data
/// structures useful for text processing.
#[derive(Clone)]
pub struct Node<N: NodeInfo>(Arc<NodeBody<N>>);
#[derive(Clone)]
struct NodeBody<N: NodeInfo> {
height: usize,
len: usize,
info: N,
val: NodeVal<N>,
}
#[derive(Clone)]
enum NodeVal<N: NodeInfo> {
Leaf(N::L),
Internal(Vec<Node<N>>),
}
// also consider making Metric a newtype for usize, so type system can
// help separate metrics
/// A trait for quickly processing attributes of a
/// [NodeInfo](struct.NodeInfo.html).
///
/// For the conceptual background see the
/// [blog post, Rope science, part 2: metrics](https://github.com/google/xi-editor/blob/master/docs/docs/rope_science_02.md).
pub trait Metric<N: NodeInfo> {
/// Return the size of the
/// [NodeInfo::L](trait.NodeInfo.html#associatedtype.L), as measured by this
/// metric.
///
/// The usize argument is the total size/length of the node, in base units.
///
/// # Examples
/// For the [LinesMetric](../rope/struct.LinesMetric.html), this gives the number of
/// lines in string contained in the leaf. For the
/// [BaseMetric](../rope/struct.BaseMetric.html), this gives the size of the string
/// in uft8 code units, that is, bytes.
///
fn measure(info: &N, len: usize) -> usize;
/// Returns the smallest offset, in base units, for an offset in measured units.
///
/// # Invariants:
///
/// - `from_base_units(to_base_units(x)) == x` is True for valid `x`
fn to_base_units(l: &N::L, in_measured_units: usize) -> usize;
/// Returns the smallest offset in measured units corresponding to an offset in base units.
///
/// # Invariants:
///
/// - `from_base_units(to_base_units(x)) == x` is True for valid `x`
fn from_base_units(l: &N::L, in_base_units: usize) -> usize;
/// Return whether the offset in base units is a boundary of this metric.
/// If a boundary is at end of a leaf then this method must return true.
/// However, a boundary at the beginning of a leaf is optional
/// (the previous leaf will be queried).
fn is_boundary(l: &N::L, offset: usize) -> bool;
/// Returns the index of the boundary directly preceding offset,
/// or None if no such boundary exists. Input and result are in base units.
fn prev(l: &N::L, offset: usize) -> Option<usize>;
/// Returns the index of the first boundary for which index > offset,
/// or None if no such boundary exists. Input and result are in base units.
fn next(l: &N::L, offset: usize) -> Option<usize>;
/// Returns true if the measured units in this metric can span multiple
/// leaves. As an example, in a metric that measures lines in a rope, a
/// line may start in one leaf and end in another; however in a metric
/// measuring bytes, storage of a single byte cannot extend across leaves.
fn can_fragment() -> bool;
}
impl<N: NodeInfo> Node<N> {
pub fn from_leaf(l: N::L) -> Node<N> {
let len = l.len();
let info = N::compute_info(&l);
Node(Arc::new(NodeBody { height: 0, len, info, val: NodeVal::Leaf(l) }))
}
/// Create a node from a vec of nodes.
///
/// The input must satisfy the following balancing requirements:
/// * The length of `nodes` must be <= MAX_CHILDREN and > 1.
/// * All the nodes are the same height.
/// * All the nodes must satisfy is_ok_child.
fn from_nodes(nodes: Vec<Node<N>>) -> Node<N> {
debug_assert!(nodes.len() > 1);
debug_assert!(nodes.len() <= MAX_CHILDREN);
let height = nodes[0].0.height + 1;
let mut len = nodes[0].0.len;
let mut info = nodes[0].0.info.clone();
debug_assert!(nodes[0].is_ok_child());
for child in &nodes[1..] {
debug_assert_eq!(child.height() + 1, height);
debug_assert!(child.is_ok_child());
len += child.0.len;
info.accumulate(&child.0.info);
}
Node(Arc::new(NodeBody { height, len, info, val: NodeVal::Internal(nodes) }))
}
pub fn len(&self) -> usize {
self.0.len
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns `true` if these two `Node`s share the same underlying data.
///
/// This is principally intended to be used by the druid crate, without needing
/// to actually add a feature and implement druid's `Data` trait.
pub fn ptr_eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
fn height(&self) -> usize {
self.0.height
}
fn is_leaf(&self) -> bool {
self.0.height == 0
}
fn interval(&self) -> Interval {
self.0.info.interval(self.0.len)
}
fn get_children(&self) -> &[Node<N>] {
if let NodeVal::Internal(ref v) = self.0.val {
v
} else {
panic!("get_children called on leaf node");
}
}
fn get_leaf(&self) -> &N::L {
if let NodeVal::Leaf(ref l) = self.0.val {
l
} else {
panic!("get_leaf called on internal node");
}
}
/// Call a callback with a mutable reference to a leaf.
///
/// This clones the leaf if the reference is shared. It also recomputes
/// length and info after the leaf is mutated.
fn with_leaf_mut<T>(&mut self, f: impl FnOnce(&mut N::L) -> T) -> T {
let inner = Arc::make_mut(&mut self.0);
if let NodeVal::Leaf(ref mut l) = inner.val {
let result = f(l);
inner.len = l.len();
inner.info = N::compute_info(l);
result
} else {
panic!("with_leaf_mut called on internal node");
}
}
fn is_ok_child(&self) -> bool {
match self.0.val {
NodeVal::Leaf(ref l) => l.is_ok_child(),
NodeVal::Internal(ref nodes) => (nodes.len() >= MIN_CHILDREN),
}
}
fn merge_nodes(children1: &[Node<N>], children2: &[Node<N>]) -> Node<N> {
let n_children = children1.len() + children2.len();
if n_children <= MAX_CHILDREN {
Node::from_nodes([children1, children2].concat())
} else {
// Note: this leans left. Splitting at midpoint is also an option
let splitpoint = min(MAX_CHILDREN, n_children - MIN_CHILDREN);
let mut iter = children1.iter().chain(children2.iter()).cloned();
let left = iter.by_ref().take(splitpoint).collect();
let right = iter.collect();
let parent_nodes = vec![Node::from_nodes(left), Node::from_nodes(right)];
Node::from_nodes(parent_nodes)
}
}
fn merge_leaves(mut rope1: Node<N>, rope2: Node<N>) -> Node<N> {
debug_assert!(rope1.is_leaf() && rope2.is_leaf());
let both_ok = rope1.get_leaf().is_ok_child() && rope2.get_leaf().is_ok_child();
if both_ok {
return Node::from_nodes(vec![rope1, rope2]);
}
match {
let node1 = Arc::make_mut(&mut rope1.0);
let leaf2 = rope2.get_leaf();
if let NodeVal::Leaf(ref mut leaf1) = node1.val {
let leaf2_iv = Interval::new(0, leaf2.len());
let new = leaf1.push_maybe_split(leaf2, leaf2_iv);
node1.len = leaf1.len();
node1.info = N::compute_info(leaf1);
new
} else {
panic!("merge_leaves called on non-leaf");
}
} {
Some(new) => Node::from_nodes(vec![rope1, Node::from_leaf(new)]),
None => rope1,
}
}
pub fn concat(rope1: Node<N>, rope2: Node<N>) -> Node<N> {
let h1 = rope1.height();
let h2 = rope2.height();
match h1.cmp(&h2) {
Ordering::Less => {
let children2 = rope2.get_children();
if h1 == h2 - 1 && rope1.is_ok_child() {
return Node::merge_nodes(&[rope1], children2);
}
let newrope = Node::concat(rope1, children2[0].clone());
if newrope.height() == h2 - 1 {
Node::merge_nodes(&[newrope], &children2[1..])
} else {
Node::merge_nodes(newrope.get_children(), &children2[1..])
}
}
Ordering::Equal => {<|fim▁hole|> return Node::from_nodes(vec![rope1, rope2]);
}
if h1 == 0 {
return Node::merge_leaves(rope1, rope2);
}
Node::merge_nodes(rope1.get_children(), rope2.get_children())
}
Ordering::Greater => {
let children1 = rope1.get_children();
if h2 == h1 - 1 && rope2.is_ok_child() {
return Node::merge_nodes(children1, &[rope2]);
}
let lastix = children1.len() - 1;
let newrope = Node::concat(children1[lastix].clone(), rope2);
if newrope.height() == h1 - 1 {
Node::merge_nodes(&children1[..lastix], &[newrope])
} else {
Node::merge_nodes(&children1[..lastix], newrope.get_children())
}
}
}
}
pub fn measure<M: Metric<N>>(&self) -> usize {
M::measure(&self.0.info, self.0.len)
}
pub(crate) fn push_subseq(&self, b: &mut TreeBuilder<N>, iv: Interval) {
if iv.is_empty() {
return;
}
if iv == self.interval() {
b.push(self.clone());
return;
}
match self.0.val {
NodeVal::Leaf(ref l) => {
b.push_leaf_slice(l, iv);
}
NodeVal::Internal(ref v) => {
let mut offset = 0;
for child in v {
if iv.is_before(offset) {
break;
}
let child_iv = child.interval();
// easier just to use signed ints?
let rec_iv = iv.intersect(child_iv.translate(offset)).translate_neg(offset);
child.push_subseq(b, rec_iv);
offset += child.len();
}
}
}
}
pub fn subseq<T: IntervalBounds>(&self, iv: T) -> Node<N> {
let iv = iv.into_interval(self.len());
let mut b = TreeBuilder::new();
self.push_subseq(&mut b, iv);
b.build()
}
pub fn edit<T, IV>(&mut self, iv: IV, new: T)
where
T: Into<Node<N>>,
IV: IntervalBounds,
{
let mut b = TreeBuilder::new();
let iv = iv.into_interval(self.len());
let self_iv = self.interval();
self.push_subseq(&mut b, self_iv.prefix(iv));
b.push(new.into());
self.push_subseq(&mut b, self_iv.suffix(iv));
*self = b.build();
}
// doesn't deal with endpoint, handle that specially if you need it
pub fn convert_metrics<M1: Metric<N>, M2: Metric<N>>(&self, mut m1: usize) -> usize {
if m1 == 0 {
return 0;
}
// If M1 can fragment, then we must land on the leaf containing
// the m1 boundary. Otherwise, we can land on the beginning of
// the leaf immediately following the M1 boundary, which may be
// more efficient.
let m1_fudge = if M1::can_fragment() { 1 } else { 0 };
let mut m2 = 0;
let mut node = self;
while node.height() > 0 {
for child in node.get_children() {
let child_m1 = child.measure::<M1>();
if m1 < child_m1 + m1_fudge {
node = child;
break;
}
m2 += child.measure::<M2>();
m1 -= child_m1;
}
}
let l = node.get_leaf();
let base = M1::to_base_units(l, m1);
m2 + M2::from_base_units(l, base)
}
}
impl<N: DefaultMetric> Node<N> {
/// Measures the length of the text bounded by ``DefaultMetric::measure(offset)`` with another metric.
///
/// # Examples
/// ```
/// use crate::xi_rope::{Rope, LinesMetric};
///
/// // the default metric of Rope is BaseMetric (aka number of bytes)
/// let my_rope = Rope::from("first line \n second line \n");
///
/// // count the number of lines in my_rope
/// let num_lines = my_rope.count::<LinesMetric>(my_rope.len());
/// assert_eq!(2, num_lines);
/// ```
pub fn count<M: Metric<N>>(&self, offset: usize) -> usize {
self.convert_metrics::<N::DefaultMetric, M>(offset)
}
/// Measures the length of the text bounded by ``M::measure(offset)`` with the default metric.
///
/// # Examples
/// ```
/// use crate::xi_rope::{Rope, LinesMetric};
///
/// // the default metric of Rope is BaseMetric (aka number of bytes)
/// let my_rope = Rope::from("first line \n second line \n");
///
/// // get the byte offset of the line at index 1
/// let byte_offset = my_rope.count_base_units::<LinesMetric>(1);
/// assert_eq!(12, byte_offset);
/// ```
pub fn count_base_units<M: Metric<N>>(&self, offset: usize) -> usize {
self.convert_metrics::<M, N::DefaultMetric>(offset)
}
}
impl<N: NodeInfo> Default for Node<N> {
fn default() -> Node<N> {
Node::from_leaf(N::L::default())
}
}
/// A builder for creating new trees.
pub struct TreeBuilder<N: NodeInfo> {
// A stack of partially built trees. These are kept in order of
// strictly descending height, and all vectors have a length less
// than MAX_CHILDREN and greater than zero.
//
// In addition, there is a balancing invariant: for each vector
// of length greater than one, all elements satisfy `is_ok_child`.
stack: Vec<Vec<Node<N>>>,
}
impl<N: NodeInfo> TreeBuilder<N> {
/// A new, empty builder.
pub fn new() -> TreeBuilder<N> {
TreeBuilder { stack: Vec::new() }
}
/// Append a node to the tree being built.
pub fn push(&mut self, mut n: Node<N>) {
loop {
let ord = if let Some(last) = self.stack.last() {
last[0].height().cmp(&n.height())
} else {
Ordering::Greater
};
match ord {
Ordering::Less => {
n = Node::concat(self.pop(), n);
}
Ordering::Equal => {
let tos = self.stack.last_mut().unwrap();
if tos.last().unwrap().is_ok_child() && n.is_ok_child() {
tos.push(n);
} else if n.height() == 0 {
let iv = Interval::new(0, n.len());
let new_leaf = tos
.last_mut()
.unwrap()
.with_leaf_mut(|l| l.push_maybe_split(n.get_leaf(), iv));
if let Some(new_leaf) = new_leaf {
tos.push(Node::from_leaf(new_leaf));
}
} else {
let last = tos.pop().unwrap();
let children1 = last.get_children();
let children2 = n.get_children();
let n_children = children1.len() + children2.len();
if n_children <= MAX_CHILDREN {
tos.push(Node::from_nodes([children1, children2].concat()));
} else {
// Note: this leans left. Splitting at midpoint is also an option
let splitpoint = min(MAX_CHILDREN, n_children - MIN_CHILDREN);
let mut iter = children1.iter().chain(children2.iter()).cloned();
let left = iter.by_ref().take(splitpoint).collect();
let right = iter.collect();
tos.push(Node::from_nodes(left));
tos.push(Node::from_nodes(right));
}
}
if tos.len() < MAX_CHILDREN {
break;
}
n = self.pop()
}
Ordering::Greater => {
self.stack.push(vec![n]);
break;
}
}
}
}
/// Append a sequence of leaves.
pub fn push_leaves(&mut self, leaves: impl IntoIterator<Item = N::L>) {
for leaf in leaves.into_iter() {
self.push(Node::from_leaf(leaf));
}
}
/// Append a single leaf.
pub fn push_leaf(&mut self, l: N::L) {
self.push(Node::from_leaf(l))
}
/// Append a slice of a single leaf.
pub fn push_leaf_slice(&mut self, l: &N::L, iv: Interval) {
self.push(Node::from_leaf(l.subseq(iv)))
}
/// Build the final tree.
///
/// The tree is the concatenation of all the nodes and leaves that have been pushed
/// on the builder, in order.
pub fn build(mut self) -> Node<N> {
if self.stack.is_empty() {
Node::from_leaf(N::L::default())
} else {
let mut n = self.pop();
while !self.stack.is_empty() {
n = Node::concat(self.pop(), n);
}
n
}
}
/// Pop the last vec-of-nodes off the stack, resulting in a node.
fn pop(&mut self) -> Node<N> {
let nodes = self.stack.pop().unwrap();
if nodes.len() == 1 {
nodes.into_iter().next().unwrap()
} else {
Node::from_nodes(nodes)
}
}
}
const CURSOR_CACHE_SIZE: usize = 4;
/// A data structure for traversing boundaries in a tree.
///
/// It is designed to be efficient both for random access and for iteration. The
/// cursor itself is agnostic to which [`Metric`] is used to determine boundaries, but
/// the methods to find boundaries are parametrized on the [`Metric`].
///
/// A cursor can be valid or invalid. It is always valid when created or after
/// [`set`](#method.set) is called, and becomes invalid after [`prev`](#method.prev)
/// or [`next`](#method.next) fails to find a boundary.
///
/// [`Metric`]: struct.Metric.html
pub struct Cursor<'a, N: 'a + NodeInfo> {
/// The tree being traversed by this cursor.
root: &'a Node<N>,
/// The current position of the cursor.
///
/// It is always less than or equal to the tree length.
position: usize,
/// The cache holds the tail of the path from the root to the current leaf.
///
/// Each entry is a reference to the parent node and the index of the child. It
/// is stored bottom-up; `cache[0]` is the parent of the leaf and the index of
/// the leaf within that parent.
///
/// The main motivation for this being a fixed-size array is to keep the cursor
/// an allocation-free data structure.
cache: [Option<(&'a Node<N>, usize)>; CURSOR_CACHE_SIZE],
/// The leaf containing the current position, when the cursor is valid.
///
/// The position is only at the end of the leaf when it is at the end of the tree.
leaf: Option<&'a N::L>,
/// The offset of `leaf` within the tree.
offset_of_leaf: usize,
}
impl<'a, N: NodeInfo> Cursor<'a, N> {
/// Create a new cursor at the given position.
pub fn new(n: &'a Node<N>, position: usize) -> Cursor<'a, N> {
let mut result = Cursor {
root: n,
position,
cache: [None; CURSOR_CACHE_SIZE],
leaf: None,
offset_of_leaf: 0,
};
result.descend();
result
}
/// The length of the tree.
pub fn total_len(&self) -> usize {
self.root.len()
}
/// Return a reference to the root node of the tree.
pub fn root(&self) -> &'a Node<N> {
self.root
}
/// Get the current leaf of the cursor.
///
/// If the cursor is valid, returns the leaf containing the current position,
/// and the offset of the current position within the leaf. That offset is equal
/// to the leaf length only at the end, otherwise it is less than the leaf length.
pub fn get_leaf(&self) -> Option<(&'a N::L, usize)> {
self.leaf.map(|l| (l, self.position - self.offset_of_leaf))
}
/// Set the position of the cursor.
///
/// The cursor is valid after this call.
///
/// Precondition: `position` is less than or equal to the length of the tree.
pub fn set(&mut self, position: usize) {
self.position = position;
if let Some(l) = self.leaf {
if self.position >= self.offset_of_leaf && self.position < self.offset_of_leaf + l.len()
{
return;
}
}
// TODO: walk up tree to find leaf if nearby
self.descend();
}
/// Get the position of the cursor.
pub fn pos(&self) -> usize {
self.position
}
/// Determine whether the current position is a boundary.
///
/// Note: the beginning and end of the tree may or may not be boundaries, depending on the
/// metric. If the metric is not `can_fragment`, then they always are.
pub fn is_boundary<M: Metric<N>>(&mut self) -> bool {
if self.leaf.is_none() {
// not at a valid position
return false;
}
if self.position == self.offset_of_leaf && !M::can_fragment() {
return true;
}
if self.position == 0 || self.position > self.offset_of_leaf {
return M::is_boundary(self.leaf.unwrap(), self.position - self.offset_of_leaf);
}
// tricky case, at beginning of leaf, need to query end of previous
// leaf; TODO: would be nice if we could do it another way that didn't
// make the method &mut self.
let l = self.prev_leaf().unwrap().0;
let result = M::is_boundary(l, l.len());
let _ = self.next_leaf();
result
}
/// Moves the cursor to the previous boundary.
///
/// When there is no previous boundary, returns `None` and the cursor becomes invalid.
///
/// Return value: the position of the boundary, if it exists.
pub fn prev<M: Metric<N>>(&mut self) -> Option<usize> {
if self.position == 0 || self.leaf.is_none() {
self.leaf = None;
return None;
}
let orig_pos = self.position;
let offset_in_leaf = orig_pos - self.offset_of_leaf;
if offset_in_leaf > 0 {
let l = self.leaf.unwrap();
if let Some(offset_in_leaf) = M::prev(l, offset_in_leaf) {
self.position = self.offset_of_leaf + offset_in_leaf;
return Some(self.position);
}
}
// not in same leaf, need to scan backwards
self.prev_leaf()?;
if let Some(offset) = self.last_inside_leaf::<M>(orig_pos) {
return Some(offset);
}
// Not found in previous leaf, find using measurement.
let measure = self.measure_leaf::<M>(self.position);
if measure == 0 {
self.leaf = None;
self.position = 0;
return None;
}
self.descend_metric::<M>(measure);
self.last_inside_leaf::<M>(orig_pos)
}
/// Moves the cursor to the next boundary.
///
/// When there is no next boundary, returns `None` and the cursor becomes invalid.
///
/// Return value: the position of the boundary, if it exists.
pub fn next<M: Metric<N>>(&mut self) -> Option<usize> {
if self.position >= self.root.len() || self.leaf.is_none() {
self.leaf = None;
return None;
}
if let Some(offset) = self.next_inside_leaf::<M>() {
return Some(offset);
}
self.next_leaf()?;
if let Some(offset) = self.next_inside_leaf::<M>() {
return Some(offset);
}
// Leaf is 0-measure (otherwise would have already succeeded).
let measure = self.measure_leaf::<M>(self.position);
self.descend_metric::<M>(measure + 1);
if let Some(offset) = self.next_inside_leaf::<M>() {
return Some(offset);
}
// Not found, properly invalidate cursor.
self.position = self.root.len();
self.leaf = None;
None
}
/// Returns the current position if it is a boundary in this [`Metric`],
/// else behaves like [`next`](#method.next).
///
/// [`Metric`]: struct.Metric.html
pub fn at_or_next<M: Metric<N>>(&mut self) -> Option<usize> {
if self.is_boundary::<M>() {
Some(self.pos())
} else {
self.next::<M>()
}
}
/// Returns the current position if it is a boundary in this [`Metric`],
/// else behaves like [`prev`](#method.prev).
///
/// [`Metric`]: struct.Metric.html
pub fn at_or_prev<M: Metric<N>>(&mut self) -> Option<usize> {
if self.is_boundary::<M>() {
Some(self.pos())
} else {
self.prev::<M>()
}
}
/// Returns an iterator with this cursor over the given [`Metric`].
///
/// # Examples:
///
/// ```
/// # use xi_rope::{Cursor, LinesMetric, Rope};
/// #
/// let text: Rope = "one line\ntwo line\nred line\nblue".into();
/// let mut cursor = Cursor::new(&text, 0);
/// let line_offsets = cursor.iter::<LinesMetric>().collect::<Vec<_>>();
/// assert_eq!(line_offsets, vec![9, 18, 27]);
///
/// ```
/// [`Metric`]: struct.Metric.html
pub fn iter<'c, M: Metric<N>>(&'c mut self) -> CursorIter<'c, 'a, N, M> {
CursorIter { cursor: self, _metric: PhantomData }
}
/// Tries to find the last boundary in the leaf the cursor is currently in.
///
/// If the last boundary is at the end of the leaf, it is only counted if
/// it is less than `orig_pos`.
#[inline]
fn last_inside_leaf<M: Metric<N>>(&mut self, orig_pos: usize) -> Option<usize> {
let l = self.leaf.expect("inconsistent, shouldn't get here");
let len = l.len();
if self.offset_of_leaf + len < orig_pos && M::is_boundary(l, len) {
let _ = self.next_leaf();
return Some(self.position);
}
let offset_in_leaf = M::prev(l, len)?;
self.position = self.offset_of_leaf + offset_in_leaf;
Some(self.position)
}
/// Tries to find the next boundary in the leaf the cursor is currently in.
#[inline]
fn next_inside_leaf<M: Metric<N>>(&mut self) -> Option<usize> {
let l = self.leaf.expect("inconsistent, shouldn't get here");
let offset_in_leaf = self.position - self.offset_of_leaf;
let offset_in_leaf = M::next(l, offset_in_leaf)?;
if offset_in_leaf == l.len() && self.offset_of_leaf + offset_in_leaf != self.root.len() {
let _ = self.next_leaf();
} else {
self.position = self.offset_of_leaf + offset_in_leaf;
}
Some(self.position)
}
/// Move to beginning of next leaf.
///
/// Return value: same as [`get_leaf`](#method.get_leaf).
pub fn next_leaf(&mut self) -> Option<(&'a N::L, usize)> {
let leaf = self.leaf?;
self.position = self.offset_of_leaf + leaf.len();
for i in 0..CURSOR_CACHE_SIZE {
if self.cache[i].is_none() {
// this probably can't happen
self.leaf = None;
return None;
}
let (node, j) = self.cache[i].unwrap();
if j + 1 < node.get_children().len() {
self.cache[i] = Some((node, j + 1));
let mut node_down = &node.get_children()[j + 1];
for k in (0..i).rev() {
self.cache[k] = Some((node_down, 0));
node_down = &node_down.get_children()[0];
}
self.leaf = Some(node_down.get_leaf());
self.offset_of_leaf = self.position;
return self.get_leaf();
}
}
if self.offset_of_leaf + self.leaf.unwrap().len() == self.root.len() {
self.leaf = None;
return None;
}
self.descend();
self.get_leaf()
}
/// Move to beginning of previous leaf.
///
/// Return value: same as [`get_leaf`](#method.get_leaf).
pub fn prev_leaf(&mut self) -> Option<(&'a N::L, usize)> {
if self.offset_of_leaf == 0 {
self.leaf = None;
self.position = 0;
return None;
}
for i in 0..CURSOR_CACHE_SIZE {
if self.cache[i].is_none() {
// this probably can't happen
self.leaf = None;
return None;
}
let (node, j) = self.cache[i].unwrap();
if j > 0 {
self.cache[i] = Some((node, j - 1));
let mut node_down = &node.get_children()[j - 1];
for k in (0..i).rev() {
let last_ix = node_down.get_children().len() - 1;
self.cache[k] = Some((node_down, last_ix));
node_down = &node_down.get_children()[last_ix];
}
let leaf = node_down.get_leaf();
self.leaf = Some(leaf);
self.offset_of_leaf -= leaf.len();
self.position = self.offset_of_leaf;
return self.get_leaf();
}
}
self.position = self.offset_of_leaf - 1;
self.descend();
self.position = self.offset_of_leaf;
self.get_leaf()
}
/// Go to the leaf containing the current position.
///
/// Sets `leaf` to the leaf containing `position`, and updates `cache` and
/// `offset_of_leaf` to be consistent.
fn descend(&mut self) {
let mut node = self.root;
let mut offset = 0;
while node.height() > 0 {
let children = node.get_children();
let mut i = 0;
loop {
if i + 1 == children.len() {
break;
}
let nextoff = offset + children[i].len();
if nextoff > self.position {
break;
}
offset = nextoff;
i += 1;
}
let cache_ix = node.height() - 1;
if cache_ix < CURSOR_CACHE_SIZE {
self.cache[cache_ix] = Some((node, i));
}
node = &children[i];
}
self.leaf = Some(node.get_leaf());
self.offset_of_leaf = offset;
}
/// Returns the measure at the beginning of the leaf containing `pos`.
///
/// This method is O(log n) no matter the current cursor state.
fn measure_leaf<M: Metric<N>>(&self, mut pos: usize) -> usize {
let mut node = self.root;
let mut metric = 0;
while node.height() > 0 {
for child in node.get_children() {
let len = child.len();
if pos < len {
node = child;
break;
}
pos -= len;
metric += child.measure::<M>();
}
}
metric
}
/// Find the leaf having the given measure.
///
/// This function sets `self.position` to the beginning of the leaf
/// containing the smallest offset with the given metric, and also updates
/// state as if [`descend`](#method.descend) was called.
///
/// If `measure` is greater than the measure of the whole tree, then moves
/// to the last node.
fn descend_metric<M: Metric<N>>(&mut self, mut measure: usize) {
let mut node = self.root;
let mut offset = 0;
while node.height() > 0 {
let children = node.get_children();
let mut i = 0;
loop {
if i + 1 == children.len() {
break;
}
let child = &children[i];
let child_m = child.measure::<M>();
if child_m >= measure {
break;
}
offset += child.len();
measure -= child_m;
i += 1;
}
let cache_ix = node.height() - 1;
if cache_ix < CURSOR_CACHE_SIZE {
self.cache[cache_ix] = Some((node, i));
}
node = &children[i];
}
self.leaf = Some(node.get_leaf());
self.position = offset;
self.offset_of_leaf = offset;
}
}
/// An iterator generated by a [`Cursor`], for some [`Metric`].
///
/// [`Cursor`]: struct.Cursor.html
/// [`Metric`]: struct.Metric.html
pub struct CursorIter<'c, 'a: 'c, N: 'a + NodeInfo, M: 'a + Metric<N>> {
cursor: &'c mut Cursor<'a, N>,
_metric: PhantomData<&'a M>,
}
impl<'c, 'a, N: NodeInfo, M: Metric<N>> Iterator for CursorIter<'c, 'a, N, M> {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.cursor.next::<M>()
}
}
impl<'c, 'a, N: NodeInfo, M: Metric<N>> CursorIter<'c, 'a, N, M> {
/// Returns the current position of the underlying [`Cursor`].
///
/// [`Cursor`]: struct.Cursor.html
pub fn pos(&self) -> usize {
self.cursor.pos()
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::rope::*;
fn build_triangle(n: u32) -> String {
let mut s = String::new();
let mut line = String::new();
for _ in 0..n {
s += &line;
s += "\n";
line += "a";
}
s
}
#[test]
fn eq_rope_with_pieces() {
let n = 2_000;
let s = build_triangle(n);
let mut builder_default = TreeBuilder::new();
let mut concat_rope = Rope::default();
builder_default.push_str(&s);
let mut i = 0;
while i < s.len() {
let j = (i + 1000).min(s.len());
concat_rope = concat_rope + s[i..j].into();
i = j;
}
let built_rope = builder_default.build();
assert_eq!(built_rope, concat_rope);
}
#[test]
fn cursor_next_triangle() {
let n = 2_000;
let text = Rope::from(build_triangle(n));
let mut cursor = Cursor::new(&text, 0);
let mut prev_offset = cursor.pos();
for i in 1..(n + 1) as usize {
let offset = cursor.next::<LinesMetric>().expect("arrived at the end too soon");
assert_eq!(offset - prev_offset, i);
prev_offset = offset;
}
assert_eq!(cursor.next::<LinesMetric>(), None);
}
#[test]
fn node_is_empty() {
let text = Rope::from(String::new());
assert_eq!(text.is_empty(), true);
}
#[test]
fn cursor_next_empty() {
let text = Rope::from(String::new());
let mut cursor = Cursor::new(&text, 0);
assert_eq!(cursor.next::<LinesMetric>(), None);
assert_eq!(cursor.pos(), 0);
}
#[test]
fn cursor_iter() {
let text: Rope = build_triangle(50).into();
let mut cursor = Cursor::new(&text, 0);
let mut manual = Vec::new();
while let Some(nxt) = cursor.next::<LinesMetric>() {
manual.push(nxt);
}
cursor.set(0);
let auto = cursor.iter::<LinesMetric>().collect::<Vec<_>>();
assert_eq!(manual, auto);
}
#[test]
fn cursor_next_misc() {
cursor_next_for("toto");
cursor_next_for("toto\n");
cursor_next_for("toto\ntata");
cursor_next_for("歴史\n科学的");
cursor_next_for("\n歴史\n科学的\n");
cursor_next_for(&build_triangle(100));
}
fn cursor_next_for(s: &str) {
let r = Rope::from(s.to_owned());
for i in 0..r.len() {
let mut c = Cursor::new(&r, i);
let it = c.next::<LinesMetric>();
let pos = c.pos();
assert!(s.as_bytes()[i..pos - 1].iter().all(|c| *c != b'\n'), "missed linebreak");
if pos < s.len() {
assert!(it.is_some(), "must be Some(_)");
assert!(s.as_bytes()[pos - 1] == b'\n', "not a linebreak");
} else {
if s.as_bytes()[s.len() - 1] == b'\n' {
assert!(it.is_some(), "must be Some(_)");
} else {
assert!(it.is_none());
assert!(c.get_leaf().is_none());
}
}
}
}
#[test]
fn cursor_prev_misc() {
cursor_prev_for("toto");
cursor_prev_for("a\na\n");
cursor_prev_for("toto\n");
cursor_prev_for("toto\ntata");
cursor_prev_for("歴史\n科学的");
cursor_prev_for("\n歴史\n科学的\n");
cursor_prev_for(&build_triangle(100));
}
fn cursor_prev_for(s: &str) {
let r = Rope::from(s.to_owned());
for i in 0..r.len() {
let mut c = Cursor::new(&r, i);
let it = c.prev::<LinesMetric>();
let pos = c.pos();
//Should countain at most one linebreak
assert!(
s.as_bytes()[pos..i].iter().filter(|c| **c == b'\n').count() <= 1,
"missed linebreak"
);
if i == 0 && s.as_bytes()[i] == b'\n' {
assert_eq!(pos, 0);
}
if pos > 0 {
assert!(it.is_some(), "must be Some(_)");
assert!(s.as_bytes()[pos - 1] == b'\n', "not a linebreak");
}
}
}
#[test]
fn at_or_next() {
let text: Rope = "this\nis\nalil\nstring".into();
let mut cursor = Cursor::new(&text, 0);
assert_eq!(cursor.at_or_next::<LinesMetric>(), Some(5));
assert_eq!(cursor.at_or_next::<LinesMetric>(), Some(5));
cursor.set(1);
assert_eq!(cursor.at_or_next::<LinesMetric>(), Some(5));
assert_eq!(cursor.at_or_prev::<LinesMetric>(), Some(5));
cursor.set(6);
assert_eq!(cursor.at_or_prev::<LinesMetric>(), Some(5));
cursor.set(6);
assert_eq!(cursor.at_or_next::<LinesMetric>(), Some(8));
assert_eq!(cursor.at_or_next::<LinesMetric>(), Some(8));
}
#[test]
fn next_zero_measure_large() {
let mut text = Rope::from("a");
for _ in 0..24 {
text = Node::concat(text.clone(), text);
let mut cursor = Cursor::new(&text, 0);
assert_eq!(cursor.next::<LinesMetric>(), None);
// Test that cursor is properly invalidated and at end of text.
assert_eq!(cursor.get_leaf(), None);
assert_eq!(cursor.pos(), text.len());
cursor.set(text.len());
assert_eq!(cursor.prev::<LinesMetric>(), None);
// Test that cursor is properly invalidated and at beginning of text.
assert_eq!(cursor.get_leaf(), None);
assert_eq!(cursor.pos(), 0);
}
}
#[test]
fn prev_line_large() {
let s: String = format!("{}{}", "\n", build_triangle(1000));
let rope = Rope::from(s);
let mut expected_pos = rope.len();
let mut cursor = Cursor::new(&rope, rope.len());
for i in (1..1001).rev() {
expected_pos = expected_pos - i;
assert_eq!(expected_pos, cursor.prev::<LinesMetric>().unwrap());
}
assert_eq!(None, cursor.prev::<LinesMetric>());
}
#[test]
fn prev_line_small() {
let empty_rope = Rope::from("\n");
let mut cursor = Cursor::new(&empty_rope, empty_rope.len());
assert_eq!(None, cursor.prev::<LinesMetric>());
let rope = Rope::from("\n\n\n\n\n\n\n\n\n\n");
cursor = Cursor::new(&rope, rope.len());
let mut expected_pos = rope.len();
for _ in (1..10).rev() {
expected_pos -= 1;
assert_eq!(expected_pos, cursor.prev::<LinesMetric>().unwrap());
}
assert_eq!(None, cursor.prev::<LinesMetric>());
}
#[test]
fn balance_invariant() {
let mut tb = TreeBuilder::<RopeInfo>::new();
let leaves: Vec<String> = (0..1000).map(|i| i.to_string().into()).collect();
tb.push_leaves(leaves);
let tree = tb.build();
println!("height {}", tree.height());
}
}<|fim▁end|> | if rope1.is_ok_child() && rope2.is_ok_child() { |
<|file_name|>session.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class Session(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'user_id': 'int',
'token': 'str',
'created': 'datetime'
}
attribute_map = {
'user_id': 'userId',
'token': 'token',
'created': 'created'
}
def __init__(self, user_id=None, token=None, created=None, local_vars_configuration=None): # noqa: E501
"""Session - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._user_id = None
self._token = None
self._created = None
self.discriminator = None
self.user_id = user_id
self.token = token
self.created = created
@property
def user_id(self):
"""Gets the user_id of this Session. # noqa: E501
The ID of the user of this session # noqa: E501
:return: The user_id of this Session. # noqa: E501
:rtype: int
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""Sets the user_id of this Session.
The ID of the user of this session # noqa: E501
:param user_id: The user_id of this Session. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and user_id is None: # noqa: E501
raise ValueError("Invalid value for `user_id`, must not be `None`") # noqa: E501
self._user_id = user_id
@property
def token(self):
"""Gets the token of this Session. # noqa: E501
An opaque session identifier # noqa: E501<|fim▁hole|> :rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this Session.
An opaque session identifier # noqa: E501
:param token: The token of this Session. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and token is None: # noqa: E501
raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501
self._token = token
@property
def created(self):
"""Gets the created of this Session. # noqa: E501
Unix timestamp indicating when the session was first created. # noqa: E501
:return: The created of this Session. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this Session.
Unix timestamp indicating when the session was first created. # noqa: E501
:param created: The created of this Session. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Session):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Session):
return True
return self.to_dict() != other.to_dict()<|fim▁end|> |
:return: The token of this Session. # noqa: E501 |
<|file_name|>brocade_aaa.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_aaa(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def aaa_config_aaa_authentication_login_first(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
authentication = ET.SubElement(aaa, "authentication")
login = ET.SubElement(authentication, "login")
first = ET.SubElement(login, "first")
first.text = kwargs.pop('first')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_authentication_login_second(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
authentication = ET.SubElement(aaa, "authentication")
login = ET.SubElement(authentication, "login")
second = ET.SubElement(login, "second")
second.text = kwargs.pop('second')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_accounting_exec_defaultacc_start_stop_server_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
accounting = ET.SubElement(aaa, "accounting")
exec_el = ET.SubElement(accounting, "exec")
defaultacc = ET.SubElement(exec_el, "defaultacc")
start_stop = ET.SubElement(defaultacc, "start-stop")
server_type = ET.SubElement(start_stop, "server-type")
server_type.text = kwargs.pop('server_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_accounting_commands_defaultacc_start_stop_server_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
accounting = ET.SubElement(aaa, "accounting")
commands = ET.SubElement(accounting, "commands")
defaultacc = ET.SubElement(commands, "defaultacc")
start_stop = ET.SubElement(defaultacc, "start-stop")
server_type = ET.SubElement(start_stop, "server-type")
server_type.text = kwargs.pop('server_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(username, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_user_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
user_password = ET.SubElement(username, "user-password")
user_password.text = kwargs.pop('user_password')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
encryption_level = ET.SubElement(username, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
role = ET.SubElement(username, "role")
role.text = kwargs.pop('role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_desc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
desc = ET.SubElement(username, "desc")
desc.text = kwargs.pop('desc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
enable = ET.SubElement(username, "enable")
enable.text = kwargs.pop('enable')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_expire(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
expire = ET.SubElement(username, "expire")
expire.text = kwargs.pop('expire')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def service_password_encryption(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
service = ET.SubElement(config, "service", xmlns="urn:brocade.com:mgmt:brocade-aaa")
password_encryption = ET.SubElement(service, "password-encryption")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def role_name_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
role = ET.SubElement(config, "role", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(role, "name")
name = ET.SubElement(name, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def role_name_desc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
role = ET.SubElement(config, "role", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(role, "name")
name_key = ET.SubElement(name, "name")
name_key.text = kwargs.pop('name')
desc = ET.SubElement(name, "desc")
desc.text = kwargs.pop('desc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_auth_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
auth_port = ET.SubElement(host, "auth-port")
auth_port.text = kwargs.pop('auth_port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, "protocol")
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
key = ET.SubElement(host, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
encryption_level = ET.SubElement(host, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
port = ET.SubElement(host, "port")
port.text = kwargs.pop('port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, "protocol")
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
key = ET.SubElement(host, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
encryption_level = ET.SubElement(host, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_tacacs_source_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
tacacs_source_ip = ET.SubElement(tacacs_server, "tacacs-source-ip")
tacacs_source_ip.text = kwargs.pop('tacacs_source_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
port = ET.SubElement(host, "port")
port.text = kwargs.pop('port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_basedn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
basedn = ET.SubElement(host, "basedn")
basedn.text = kwargs.pop('basedn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_maprole_group_ad_group(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
maprole = ET.SubElement(ldap_server, "maprole")
group = ET.SubElement(maprole, "group")
ad_group = ET.SubElement(group, "ad-group")
ad_group.text = kwargs.pop('ad_group')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_maprole_group_switch_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
maprole = ET.SubElement(ldap_server, "maprole")
group = ET.SubElement(maprole, "group")
ad_group_key = ET.SubElement(group, "ad-group")
ad_group_key.text = kwargs.pop('ad_group')
switch_role = ET.SubElement(group, "switch-role")
switch_role.text = kwargs.pop('switch_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_min_length(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
min_length = ET.SubElement(password_attributes, "min-length")
min_length.text = kwargs.pop('min_length')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_max_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
max_retry = ET.SubElement(password_attributes, "max-retry")
max_retry.text = kwargs.pop('max_retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_max_lockout_duration(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
max_lockout_duration = ET.SubElement(password_attributes, "max-lockout-duration")
max_lockout_duration.text = kwargs.pop('max_lockout_duration')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_upper(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
upper = ET.SubElement(character_restriction, "upper")
upper.text = kwargs.pop('upper')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_lower(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
lower = ET.SubElement(character_restriction, "lower")
lower.text = kwargs.pop('lower')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_numeric(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
numeric = ET.SubElement(character_restriction, "numeric")
numeric.text = kwargs.pop('numeric')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_special_char(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
special_char = ET.SubElement(character_restriction, "special-char")
special_char.text = kwargs.pop('special_char')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_admin_lockout_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
admin_lockout_enable = ET.SubElement(password_attributes, "admin-lockout-enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_login(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
login = ET.SubElement(banner, "login")
login.text = kwargs.pop('login')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_motd(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
motd = ET.SubElement(banner, "motd")
motd.text = kwargs.pop('motd')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_incoming(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
incoming = ET.SubElement(banner, "incoming")
incoming.text = kwargs.pop('incoming')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index = ET.SubElement(rule, "index")
index.text = kwargs.pop('index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
action = ET.SubElement(rule, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_operation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
operation = ET.SubElement(rule, "operation")
operation.text = kwargs.pop('operation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
role = ET.SubElement(rule, "role")
role.text = kwargs.pop('role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_container_cmds_enumList(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
container_cmds = ET.SubElement(cmdlist, "container-cmds")
enumList = ET.SubElement(container_cmds, "enumList")
enumList.text = kwargs.pop('enumList')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_d_interface_fcoe_leaf_interface_fcoe_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_d = ET.SubElement(cmdlist, "interface-d")
interface_fcoe_leaf = ET.SubElement(interface_d, "interface-fcoe-leaf")
interface = ET.SubElement(interface_fcoe_leaf, "interface")
fcoe_leaf = ET.SubElement(interface, "fcoe-leaf")
fcoe_leaf.text = kwargs.pop('fcoe_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_e = ET.SubElement(cmdlist, "interface-e")
interface_te_leaf = ET.SubElement(interface_e, "interface-te-leaf")
interface = ET.SubElement(interface_te_leaf, "interface")
tengigabitethernet_leaf = ET.SubElement(interface, "tengigabitethernet-leaf")
tengigabitethernet_leaf.text = kwargs.pop('tengigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_h_interface_ge_leaf_interface_gigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_h = ET.SubElement(cmdlist, "interface-h")
interface_ge_leaf = ET.SubElement(interface_h, "interface-ge-leaf")
interface = ET.SubElement(interface_ge_leaf, "interface")
gigabitethernet_leaf = ET.SubElement(interface, "gigabitethernet-leaf")
gigabitethernet_leaf.text = kwargs.pop('gigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_j_interface_pc_leaf_interface_port_channel_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_j = ET.SubElement(cmdlist, "interface-j")
interface_pc_leaf = ET.SubElement(interface_j, "interface-pc-leaf")
interface = ET.SubElement(interface_pc_leaf, "interface")
port_channel_leaf = ET.SubElement(interface, "port-channel-leaf")
port_channel_leaf.text = kwargs.pop('port_channel_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_l_interface_vlan_leaf_interface_vlan_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_l = ET.SubElement(cmdlist, "interface-l")
interface_vlan_leaf = ET.SubElement(interface_l, "interface-vlan-leaf")
interface = ET.SubElement(interface_vlan_leaf, "interface")
vlan_leaf = ET.SubElement(interface, "vlan-leaf")
vlan_leaf.text = kwargs.pop('vlan_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_m_interface_management_leaf_interface_management_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_m = ET.SubElement(cmdlist, "interface-m")
interface_management_leaf = ET.SubElement(interface_m, "interface-management-leaf")
interface = ET.SubElement(interface_management_leaf, "interface")
management_leaf = ET.SubElement(interface, "management-leaf")
management_leaf.text = kwargs.pop('management_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_o_interface_loopback_leaf_interface_loopback_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_o = ET.SubElement(cmdlist, "interface-o")
interface_loopback_leaf = ET.SubElement(interface_o, "interface-loopback-leaf")
interface = ET.SubElement(interface_loopback_leaf, "interface")
loopback_leaf = ET.SubElement(interface, "loopback-leaf")
loopback_leaf.text = kwargs.pop('loopback_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_q_interface_ve_leaf_interface_ve_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_q = ET.SubElement(cmdlist, "interface-q")
interface_ve_leaf = ET.SubElement(interface_q, "interface-ve-leaf")
interface = ET.SubElement(interface_ve_leaf, "interface")
ve_leaf = ET.SubElement(interface, "ve-leaf")
ve_leaf.text = kwargs.pop('ve_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_s_interface_fc_leaf_interface_fibrechannel_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_s = ET.SubElement(cmdlist, "interface-s")
interface_fc_leaf = ET.SubElement(interface_s, "interface-fc-leaf")
interface = ET.SubElement(interface_fc_leaf, "interface")
fibrechannel_leaf = ET.SubElement(interface, "fibrechannel-leaf")
fibrechannel_leaf.text = kwargs.pop('fibrechannel_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_u_interface_fe_leaf_interface_fortygigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_u = ET.SubElement(cmdlist, "interface-u")
interface_fe_leaf = ET.SubElement(interface_u, "interface-fe-leaf")
interface = ET.SubElement(interface_fe_leaf, "interface")
fortygigabitethernet_leaf = ET.SubElement(interface, "fortygigabitethernet-leaf")
fortygigabitethernet_leaf.text = kwargs.pop('fortygigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_w_interface_he_leaf_interface_hundredgigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_w = ET.SubElement(cmdlist, "interface-w")
interface_he_leaf = ET.SubElement(interface_w, "interface-he-leaf")
interface = ET.SubElement(interface_he_leaf, "interface")
hundredgigabitethernet_leaf = ET.SubElement(interface, "hundredgigabitethernet-leaf")
hundredgigabitethernet_leaf.text = kwargs.pop('hundredgigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def root_sa_root_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
root_sa = ET.SubElement(config, "root-sa", xmlns="urn:brocade.com:mgmt:brocade-aaa")
root = ET.SubElement(root_sa, "root")
enable = ET.SubElement(root, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def root_sa_root_access(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
root_sa = ET.SubElement(config, "root-sa", xmlns="urn:brocade.com:mgmt:brocade-aaa")
root = ET.SubElement(root_sa, "root")
access = ET.SubElement(root, "access")
access.text = kwargs.pop('access')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_alias_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
alias = ET.SubElement(alias_config, "alias")
name = ET.SubElement(alias, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_alias_expansion(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
alias = ET.SubElement(alias_config, "alias")
name_key = ET.SubElement(alias, "name")
name_key.text = kwargs.pop('name')
expansion = ET.SubElement(alias, "expansion")
expansion.text = kwargs.pop('expansion')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name = ET.SubElement(user, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_alias_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name_key = ET.SubElement(user, "name")
name_key.text = kwargs.pop('name')
alias = ET.SubElement(user, "alias")
name = ET.SubElement(alias, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_alias_expansion(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name_key = ET.SubElement(user, "name")
name_key.text = kwargs.pop('name')
alias = ET.SubElement(user, "alias")
name_key = ET.SubElement(alias, "name")
name_key.text = kwargs.pop('name')
expansion = ET.SubElement(alias, "expansion")
expansion.text = kwargs.pop('expansion')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_authentication_login_first(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
authentication = ET.SubElement(aaa, "authentication")
login = ET.SubElement(authentication, "login")
first = ET.SubElement(login, "first")
first.text = kwargs.pop('first')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_authentication_login_second(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
authentication = ET.SubElement(aaa, "authentication")
login = ET.SubElement(authentication, "login")
second = ET.SubElement(login, "second")
second.text = kwargs.pop('second')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_accounting_exec_defaultacc_start_stop_server_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
accounting = ET.SubElement(aaa, "accounting")
exec_el = ET.SubElement(accounting, "exec")
defaultacc = ET.SubElement(exec_el, "defaultacc")
start_stop = ET.SubElement(defaultacc, "start-stop")
server_type = ET.SubElement(start_stop, "server-type")
server_type.text = kwargs.pop('server_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def aaa_config_aaa_accounting_commands_defaultacc_start_stop_server_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
aaa_config = ET.SubElement(config, "aaa-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
aaa = ET.SubElement(aaa_config, "aaa")
accounting = ET.SubElement(aaa, "accounting")
commands = ET.SubElement(accounting, "commands")
defaultacc = ET.SubElement(commands, "defaultacc")
start_stop = ET.SubElement(defaultacc, "start-stop")
server_type = ET.SubElement(start_stop, "server-type")
server_type.text = kwargs.pop('server_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(username, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_user_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
user_password = ET.SubElement(username, "user-password")
user_password.text = kwargs.pop('user_password')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
encryption_level = ET.SubElement(username, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
role = ET.SubElement(username, "role")
role.text = kwargs.pop('role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_desc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
desc = ET.SubElement(username, "desc")
desc.text = kwargs.pop('desc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
enable = ET.SubElement(username, "enable")
enable.text = kwargs.pop('enable')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def username_expire(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
username = ET.SubElement(config, "username", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name_key = ET.SubElement(username, "name")
name_key.text = kwargs.pop('name')
expire = ET.SubElement(username, "expire")
expire.text = kwargs.pop('expire')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def service_password_encryption(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
service = ET.SubElement(config, "service", xmlns="urn:brocade.com:mgmt:brocade-aaa")
password_encryption = ET.SubElement(service, "password-encryption")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def role_name_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
role = ET.SubElement(config, "role", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(role, "name")
name = ET.SubElement(name, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def role_name_desc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
role = ET.SubElement(config, "role", xmlns="urn:brocade.com:mgmt:brocade-aaa")
name = ET.SubElement(role, "name")
name_key = ET.SubElement(name, "name")
name_key.text = kwargs.pop('name')
desc = ET.SubElement(name, "desc")
desc.text = kwargs.pop('desc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_auth_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
auth_port = ET.SubElement(host, "auth-port")
auth_port.text = kwargs.pop('auth_port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, "protocol")
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
key = ET.SubElement(host, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
encryption_level = ET.SubElement(host, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def radius_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
port = ET.SubElement(host, "port")
port.text = kwargs.pop('port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_protocol(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
protocol = ET.SubElement(host, "protocol")
protocol.text = kwargs.pop('protocol')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
key = ET.SubElement(host, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_encryption_level(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
encryption_level = ET.SubElement(host, "encryption-level")
encryption_level.text = kwargs.pop('encryption_level')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(tacacs_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def tacacs_server_tacacs_source_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
tacacs_server = ET.SubElement(config, "tacacs-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
tacacs_source_ip = ET.SubElement(tacacs_server, "tacacs-source-ip")
tacacs_source_ip.text = kwargs.pop('tacacs_source_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_hostname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
hostname = ET.SubElement(host, "hostname")
hostname.text = kwargs.pop('hostname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_use_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf = ET.SubElement(host, "use-vrf")
use_vrf.text = kwargs.pop('use_vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_port(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
port = ET.SubElement(host, "port")
port.text = kwargs.pop('port')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
retries = ET.SubElement(host, "retries")
retries.text = kwargs.pop('retries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
timeout = ET.SubElement(host, "timeout")
timeout.text = kwargs.pop('timeout')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_host_basedn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(ldap_server, "host")
hostname_key = ET.SubElement(host, "hostname")
hostname_key.text = kwargs.pop('hostname')
use_vrf_key = ET.SubElement(host, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
basedn = ET.SubElement(host, "basedn")
basedn.text = kwargs.pop('basedn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_maprole_group_ad_group(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
maprole = ET.SubElement(ldap_server, "maprole")
group = ET.SubElement(maprole, "group")
ad_group = ET.SubElement(group, "ad-group")
ad_group.text = kwargs.pop('ad_group')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ldap_server_maprole_group_switch_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ldap_server = ET.SubElement(config, "ldap-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
maprole = ET.SubElement(ldap_server, "maprole")
group = ET.SubElement(maprole, "group")
ad_group_key = ET.SubElement(group, "ad-group")
ad_group_key.text = kwargs.pop('ad_group')
switch_role = ET.SubElement(group, "switch-role")
switch_role.text = kwargs.pop('switch_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_min_length(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
min_length = ET.SubElement(password_attributes, "min-length")
min_length.text = kwargs.pop('min_length')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_max_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
max_retry = ET.SubElement(password_attributes, "max-retry")
max_retry.text = kwargs.pop('max_retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_max_lockout_duration(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
max_lockout_duration = ET.SubElement(password_attributes, "max-lockout-duration")
max_lockout_duration.text = kwargs.pop('max_lockout_duration')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_upper(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
upper = ET.SubElement(character_restriction, "upper")
upper.text = kwargs.pop('upper')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_lower(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
lower = ET.SubElement(character_restriction, "lower")
lower.text = kwargs.pop('lower')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_numeric(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
numeric = ET.SubElement(character_restriction, "numeric")
numeric.text = kwargs.pop('numeric')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_character_restriction_special_char(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
character_restriction = ET.SubElement(password_attributes, "character-restriction")
special_char = ET.SubElement(character_restriction, "special-char")
special_char.text = kwargs.pop('special_char')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def password_attributes_admin_lockout_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
password_attributes = ET.SubElement(config, "password-attributes", xmlns="urn:brocade.com:mgmt:brocade-aaa")
admin_lockout_enable = ET.SubElement(password_attributes, "admin-lockout-enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_login(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
login = ET.SubElement(banner, "login")
login.text = kwargs.pop('login')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_motd(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
motd = ET.SubElement(banner, "motd")
motd.text = kwargs.pop('motd')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def banner_incoming(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
banner = ET.SubElement(config, "banner", xmlns="urn:brocade.com:mgmt:brocade-aaa")
incoming = ET.SubElement(banner, "incoming")
incoming.text = kwargs.pop('incoming')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index = ET.SubElement(rule, "index")
index.text = kwargs.pop('index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
action = ET.SubElement(rule, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_operation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
operation = ET.SubElement(rule, "operation")
operation.text = kwargs.pop('operation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
role = ET.SubElement(rule, "role")
role.text = kwargs.pop('role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_container_cmds_enumList(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
container_cmds = ET.SubElement(cmdlist, "container-cmds")
enumList = ET.SubElement(container_cmds, "enumList")
enumList.text = kwargs.pop('enumList')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_d_interface_fcoe_leaf_interface_fcoe_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_d = ET.SubElement(cmdlist, "interface-d")
interface_fcoe_leaf = ET.SubElement(interface_d, "interface-fcoe-leaf")
interface = ET.SubElement(interface_fcoe_leaf, "interface")
fcoe_leaf = ET.SubElement(interface, "fcoe-leaf")
fcoe_leaf.text = kwargs.pop('fcoe_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_e_interface_te_leaf_interface_tengigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_e = ET.SubElement(cmdlist, "interface-e")
interface_te_leaf = ET.SubElement(interface_e, "interface-te-leaf")
interface = ET.SubElement(interface_te_leaf, "interface")
tengigabitethernet_leaf = ET.SubElement(interface, "tengigabitethernet-leaf")
tengigabitethernet_leaf.text = kwargs.pop('tengigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_h_interface_ge_leaf_interface_gigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_h = ET.SubElement(cmdlist, "interface-h")
interface_ge_leaf = ET.SubElement(interface_h, "interface-ge-leaf")
interface = ET.SubElement(interface_ge_leaf, "interface")
gigabitethernet_leaf = ET.SubElement(interface, "gigabitethernet-leaf")
gigabitethernet_leaf.text = kwargs.pop('gigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_j_interface_pc_leaf_interface_port_channel_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_j = ET.SubElement(cmdlist, "interface-j")
interface_pc_leaf = ET.SubElement(interface_j, "interface-pc-leaf")
interface = ET.SubElement(interface_pc_leaf, "interface")
port_channel_leaf = ET.SubElement(interface, "port-channel-leaf")
port_channel_leaf.text = kwargs.pop('port_channel_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_l_interface_vlan_leaf_interface_vlan_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_l = ET.SubElement(cmdlist, "interface-l")
interface_vlan_leaf = ET.SubElement(interface_l, "interface-vlan-leaf")
interface = ET.SubElement(interface_vlan_leaf, "interface")
vlan_leaf = ET.SubElement(interface, "vlan-leaf")
vlan_leaf.text = kwargs.pop('vlan_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_m_interface_management_leaf_interface_management_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_m = ET.SubElement(cmdlist, "interface-m")
interface_management_leaf = ET.SubElement(interface_m, "interface-management-leaf")
interface = ET.SubElement(interface_management_leaf, "interface")
management_leaf = ET.SubElement(interface, "management-leaf")
management_leaf.text = kwargs.pop('management_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_o_interface_loopback_leaf_interface_loopback_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_o = ET.SubElement(cmdlist, "interface-o")
interface_loopback_leaf = ET.SubElement(interface_o, "interface-loopback-leaf")
interface = ET.SubElement(interface_loopback_leaf, "interface")
loopback_leaf = ET.SubElement(interface, "loopback-leaf")
loopback_leaf.text = kwargs.pop('loopback_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_q_interface_ve_leaf_interface_ve_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_q = ET.SubElement(cmdlist, "interface-q")
interface_ve_leaf = ET.SubElement(interface_q, "interface-ve-leaf")
interface = ET.SubElement(interface_ve_leaf, "interface")
ve_leaf = ET.SubElement(interface, "ve-leaf")
ve_leaf.text = kwargs.pop('ve_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_s_interface_fc_leaf_interface_fibrechannel_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_s = ET.SubElement(cmdlist, "interface-s")
interface_fc_leaf = ET.SubElement(interface_s, "interface-fc-leaf")
interface = ET.SubElement(interface_fc_leaf, "interface")
fibrechannel_leaf = ET.SubElement(interface, "fibrechannel-leaf")
fibrechannel_leaf.text = kwargs.pop('fibrechannel_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_u_interface_fe_leaf_interface_fortygigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_u = ET.SubElement(cmdlist, "interface-u")
interface_fe_leaf = ET.SubElement(interface_u, "interface-fe-leaf")
interface = ET.SubElement(interface_fe_leaf, "interface")
fortygigabitethernet_leaf = ET.SubElement(interface, "fortygigabitethernet-leaf")
fortygigabitethernet_leaf.text = kwargs.pop('fortygigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def rule_command_cmdlist_interface_w_interface_he_leaf_interface_hundredgigabitethernet_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rule = ET.SubElement(config, "rule", xmlns="urn:brocade.com:mgmt:brocade-aaa")
index_key = ET.SubElement(rule, "index")
index_key.text = kwargs.pop('index')
command = ET.SubElement(rule, "command")
cmdlist = ET.SubElement(command, "cmdlist")
interface_w = ET.SubElement(cmdlist, "interface-w")
interface_he_leaf = ET.SubElement(interface_w, "interface-he-leaf")
interface = ET.SubElement(interface_he_leaf, "interface")
hundredgigabitethernet_leaf = ET.SubElement(interface, "hundredgigabitethernet-leaf")
hundredgigabitethernet_leaf.text = kwargs.pop('hundredgigabitethernet_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def root_sa_root_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
root_sa = ET.SubElement(config, "root-sa", xmlns="urn:brocade.com:mgmt:brocade-aaa")
root = ET.SubElement(root_sa, "root")
enable = ET.SubElement(root, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def root_sa_root_access(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
root_sa = ET.SubElement(config, "root-sa", xmlns="urn:brocade.com:mgmt:brocade-aaa")
root = ET.SubElement(root_sa, "root")
access = ET.SubElement(root, "access")
access.text = kwargs.pop('access')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_alias_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
alias = ET.SubElement(alias_config, "alias")
name = ET.SubElement(alias, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_alias_expansion(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
alias = ET.SubElement(alias_config, "alias")
name_key = ET.SubElement(alias, "name")
name_key.text = kwargs.pop('name')
expansion = ET.SubElement(alias, "expansion")
expansion.text = kwargs.pop('expansion')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name = ET.SubElement(user, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_alias_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name_key = ET.SubElement(user, "name")
name_key.text = kwargs.pop('name')
alias = ET.SubElement(user, "alias")
name = ET.SubElement(alias, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def alias_config_user_alias_expansion(self, **kwargs):<|fim▁hole|> config = ET.Element("config")
alias_config = ET.SubElement(config, "alias-config", xmlns="urn:brocade.com:mgmt:brocade-aaa")
user = ET.SubElement(alias_config, "user")
name_key = ET.SubElement(user, "name")
name_key.text = kwargs.pop('name')
alias = ET.SubElement(user, "alias")
name_key = ET.SubElement(alias, "name")
name_key.text = kwargs.pop('name')
expansion = ET.SubElement(alias, "expansion")
expansion.text = kwargs.pop('expansion')
callback = kwargs.pop('callback', self._callback)
return callback(config)<|fim▁end|> | """Auto Generated Code
""" |
<|file_name|>guiTest.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import errno
import time
import hashlib
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
if "TRAVIS_BUILD_NUMBER" in os.environ:
if "SAUCE_USERNAME" not in os.environ:
print "No sauce labs login credentials found. Stopping tests..."
sys.exit(0)
capabilities = {'browserName': "firefox"}
capabilities['platform'] = "Windows 7"
capabilities['version'] = "48.0"
capabilities['screenResolution'] = "1280x1024"
capabilities["build"] = os.environ["TRAVIS_BUILD_NUMBER"]
capabilities["tunnel-identifier"] = os.environ["TRAVIS_JOB_NUMBER"]
# connect to sauce labs
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@localhost:4445" % (username, access_key)
driver = webdriver.Remote(command_executor="http://%s/wd/hub" % hub_url, desired_capabilities=capabilities)
else:
# local
print "Using LOCAL webdriver"
profile = webdriver.FirefoxProfile()
profile.set_preference("intl.accept_languages", "en")
driver = webdriver.Firefox(profile)
driver.maximize_window()
def write_random_file(size, filename):
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
with open(filename, 'wb') as fout:
fout.write(os.urandom(size))
def sha1_file(filename):
BLOCKSIZE = 65536
hasher = hashlib.sha1()
with open(filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
return hasher.hexdigest()
def sha1_folder(folder):
sha1_dict = {}
for root, dirs, files in os.walk(folder):
for filename in files:
file_path = os.path.join(root, filename)
sha1 = sha1_file(file_path)
relative_file_path = os.path.relpath(file_path, folder)
sha1_dict.update({relative_file_path: sha1})
return sha1_dict
def wait_for_text(time, xpath, text):
WebDriverWait(driver, time).until(expected_conditions.text_to_be_present_in_element((By.XPATH, xpath), text))
BACKUP_NAME = "BackupName"
PASSWORD = "the_backup_password_is_really_long_and_safe"
SOURCE_FOLDER = os.path.abspath("duplicati_gui_test_source")
DESTINATION_FOLDER = os.path.abspath("duplicati_gui_test_destination")
DESTINATION_FOLDER_DIRECT_RESTORE = os.path.abspath("duplicati_gui_test_destination_direct_restore")
RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_restore")
DIRECT_RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_direct_restore")
# wait 5 seconds for duplicati server to start
time.sleep(5)
driver.implicitly_wait(10)
driver.get("http://localhost:8200/ngax/index.html")
if "Duplicati" not in driver.title:
raise Exception("Unable to load duplicati GUI!")
# Create and hash random files in the source folder
write_random_file(1024 * 1024, SOURCE_FOLDER + os.sep + "1MB.test")
write_random_file(100 * 1024, SOURCE_FOLDER + os.sep + "subfolder" + os.sep + "100KB.test")
sha1_source = sha1_folder(SOURCE_FOLDER)
# Dismiss the password request
driver.find_element_by_link_text("No, my machine has only a single account").click()
# Add new backup
driver.find_element_by_link_text("Add backup").click()
# Choose the "add new" option
driver.find_element_by_id("blank").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
# Add new backup - General page
time.sleep(1)
driver.find_element_by_id("name").send_keys(BACKUP_NAME)
driver.find_element_by_id("passphrase").send_keys(PASSWORD)
driver.find_element_by_id("repeat-passphrase").send_keys(PASSWORD)
driver.find_element_by_id("nextStep1").click()
# Add new backup - Destination page
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER)
driver.find_element_by_id("nextStep2").click()
# Add new backup - Source Data page
driver.find_element_by_id("sourcePath").send_keys(os.path.abspath(SOURCE_FOLDER) + os.sep)
driver.find_element_by_id("sourceFolderPathAdd").click()
driver.find_element_by_id("nextStep3").click()
# Add new backup - Schedule page
useScheduleRun = driver.find_element_by_id("useScheduleRun")
if useScheduleRun.is_selected():
useScheduleRun.click()
driver.find_element_by_id("nextStep4").click()
# Add new backup - Options page
driver.find_element_by_id("save").click()
# Run the backup job and wait for finish
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//dl[@class='taskmenu']/dd/p/span[contains(text(),'Run now')]") if n.is_displayed()][0].click()
wait_for_text(60, "//div[@class='task ng-scope']/dl[2]/dd[1]", "(took ")
# Restore
if len([n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()]) == 0:
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()][0].click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash restored files
sha1_restore = sha1_folder(RESTORE_FOLDER)
# cleanup: delete source and restore folder and rename destination folder for direct restore
shutil.rmtree(SOURCE_FOLDER)
shutil.rmtree(RESTORE_FOLDER)
os.rename(DESTINATION_FOLDER, DESTINATION_FOLDER_DIRECT_RESTORE)
# direct restore
driver.find_element_by_link_text("Restore").click()
# Choose the "restore direct" option
driver.find_element_by_id("direct").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
time.sleep(1)
driver.find_element_by_link_text("Manually type path").click()<|fim▁hole|>driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER_DIRECT_RESTORE)
driver.find_element_by_id("nextStep1").click()
driver.find_element_by_id("password").send_keys(PASSWORD)
driver.find_element_by_id("connect").click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
time.sleep(1)
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(DIRECT_RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash direct restore files
sha1_direct_restore = sha1_folder(DIRECT_RESTORE_FOLDER)
print "Source hashes: " + str(sha1_source)
print "Restore hashes: " + str(sha1_restore)
print "Direct Restore hashes: " + str(sha1_direct_restore)
# Tell Sauce Labs to stop the test
driver.quit()
if not (sha1_source == sha1_restore and sha1_source == sha1_direct_restore):
sys.exit(1) # return with error<|fim▁end|> | |
<|file_name|>Deferred.js<|end_file_name|><|fim▁begin|>dojo.provide("tests._base.Deferred");
var delay = function(ms){
var d = new dojo.Deferred();
ms = ms || 20;
setTimeout(function(){
d.progress(0.5);
},ms/2);
setTimeout(function(){
d.resolve();
},ms);
return d.promise;
};
doh.register("tests._base.Deferred",
[
function callback(t){
var nd = new dojo.Deferred();
var cnt = 0;
nd.addCallback(function(res){<|fim▁hole|> doh.debug("debug from dojo.Deferred callback");
return res;
});
nd.addCallback(function(res){
// t.debug("val:", res);
cnt+=res;
return cnt;
});
nd.callback(5);
// t.debug("cnt:", cnt);
t.assertEqual(cnt, 5);
},
function callback_extra_args(t){
var nd = new dojo.Deferred();
var cnt = 0;
nd.addCallback(dojo.global, function(base, res){ cnt+=base; cnt+=res; return cnt; }, 30);
nd.callback(5);
t.assertEqual(cnt, 35);
},
function errback(t){
var nd = new dojo.Deferred();
var cnt = 0;
nd.addErrback(function(val){
return ++cnt;
});
nd.errback();
t.assertEqual(cnt, 1);
},
function callbackTwice(t){
var nd = new dojo.Deferred();
var cnt = 0;
nd.addCallback(function(res){
return ++cnt;
});
nd.callback();
t.assertEqual(cnt, 1);
var thrown = false;
try{
nd.callback();
}catch(e){
thrown = true;
}
t.assertTrue(thrown);
},
function addBoth(t){
var nd = new dojo.Deferred();
var cnt = 0;
nd.addBoth(function(res){
return ++cnt;
});
nd.callback();
t.assertEqual(cnt, 1);
// nd.callback();
// t.debug(cnt);
// t.assertEqual(cnt, 1);
},
function callbackNested(t){
var nd = new dojo.Deferred();
var nestedReturn = "yellow";
nd.addCallback(function(res){
nd.addCallback(function(res2){
nestedReturn = res2;
});
return "blue";
});
nd.callback("red");
t.assertEqual("blue", nestedReturn);
},
function simpleThen(t){
var td = new doh.Deferred();
delay().then(function(){
td.callback(true);
});
return td;
},
function thenChaining(t){
var td = new doh.Deferred();
var p = delay();
var p2 = p.then(function(){
return 1;
});
p3 = p2.then(function(){
return 2;
});
p3.then(function(){
p2.then(function(v){
t.assertEqual(v, 1);
p3.then(function(v){
t.assertEqual(v, 2);
td.callback(true);
});
});
});
return td;
},
function simpleWhen(t){
var td = new doh.Deferred();
dojo.when(delay(), function(){
td.callback(true);
});
return td;
},
function progress(t){
var td = new doh.Deferred();
var percentDone;
dojo.when(delay(), function(){
t.is(percentDone, 0.5);
td.callback(true);
},function(){},
function(completed){
percentDone = completed;
});
return td;
},
function errorHandler(t){
var def = new dojo.Deferred();
var handledError;
dojo.config.deferredOnError = function(e){
handledError = e;
};
def.reject(new Error("test"));
t.t(handledError instanceof Error);
},
function cancelThenDerivative(t){
var def = new dojo.Deferred();
var def2 = def.then();
try{
def2.cancel();
t.t(true); // Didn't throw an error
}catch(e){
t.t(false);
}
},
function cancelPromiseValue(t){
var cancelledDef;
var def = new dojo.Deferred(function(_def){ cancelledDef = _def; });
def.promise.cancel();
t.is(def, cancelledDef);
},
function errorResult(t){
var def = new dojo.Deferred();
var result = new Error("rejected");
def.reject(result);
t.is(def.fired, 1);
t.is(def.results[1], result);
},
function globalLeak(t){
var def = new dojo.Deferred();
def.then(function(){ return def; });
def.resolve(true);
t.is(dojo.global.results, undefined, "results is leaking into global");
t.is(dojo.global.fired, undefined, "fired is leaking into global");
},
function backAndForthProcess(t){
var def = new dojo.Deferred();
var retval = "fail";
def.addErrback(function(){
return "ignore error and throw this good string";
}).addCallback(function(){
throw new Error("error1");
}).addErrback(function(){
return "ignore second error and make it good again";
}).addCallback(function(){
retval = "succeed";
});
def.errback("");
t.assertEqual("succeed", retval);
},
function backAndForthProcessThen(t){
var def = new dojo.Deferred;
var retval = "fail";
def.then(null, function(){
return "ignore error and throw this good string";
}).then(function(){
throw "error1";
}).then(null, function(){
return "ignore second error and make it good again";
}).then(function(){
retval = "succeed";
});
def.reject("");
t.assertEqual("succeed", retval);
},
function returnErrorObject(t){
var def = new dojo.Deferred();
var retval = "fail";
def.addCallback(function(){
return new Error("returning an error should work same as throwing");
}).addErrback(function(){
retval = "succeed";
});
def.callback();
t.assertEqual("succeed", retval);
},
function returnErrorObjectThen(t){
var def = new dojo.Deferred();
var retval = "fail";
def.then(function(){
return new Error("returning an error should NOT work same as throwing");
}).then(function(){
retval = "succeed";
});
def.resolve();
t.assertEqual("succeed", retval);
},
function errbackWithPromise(t){
var def = new dojo.Deferred();
var retval;
def.addCallbacks(function(){}, function(err){
return err;
});
def.promise.then(
function(){ retval = "fail"; },
function(){ retval = "succeed"; });
def.errback(new Error);
t.assertEqual("succeed", retval);
}
]
);<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import re
from setuptools import setup, find_packages
# Auto detect the library version from the __init__.py file
with open('xbee/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
setup(
name='XBee',
version=version,
description='Python tools for working with XBee radios',
long_description=open('README.rst').read(),
url='https://github.com/nioinnovation/python-xbee',
author='n.io',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Terminals :: Serial',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(exclude=['tests', '*.tests']),
install_requires=['pyserial'],<|fim▁hole|>)<|fim▁end|> | extras_require={
'tornado': ['tornado']
} |
<|file_name|>shows-model.js<|end_file_name|><|fim▁begin|>import xhr from './lib/xhr';
class ShowsModel {
constructor() {
this.shows = [];<|fim▁hole|> fetch(cb) {
xhr('https://raw.githubusercontent.com/dashersw/erste.js-demo/master/src/static/data/shows.json', (err, data) => {
this.shows = data.slice(0, 20);
cb(this.shows);
});
};
}
export default new ShowsModel();<|fim▁end|> | }
|
<|file_name|>Alayaya.js<|end_file_name|><|fim▁begin|>const DrawCard = require('../../drawcard.js');
class Alayaya extends DrawCard {
setupCardAbilities() {<|fim▁hole|> this.reaction({
when: {
afterChallenge: event => (
event.challenge.winner === this.controller &&
this.isParticipating() &&
event.challenge.loser.gold >= 1)
},
handler: context => {
let otherPlayer = context.event.challenge.loser;
this.game.transferGold({ from: otherPlayer, to: this.controller, amount: 1 });
this.game.addMessage('{0} uses {1} to move 1 gold from {2}\'s gold pool to their own', this.controller, this, otherPlayer);
}
});
}
}
Alayaya.code = '05013';
module.exports = Alayaya;<|fim▁end|> | |
<|file_name|>test-fast-csv.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.7.1
(function() {
var $, CSV, ES, TEXT, TRM, TYPES, alert, badge, create_readstream, debug, echo, help, info, log, njs_fs, rainbow, route, rpr, urge, warn, whisper;
njs_fs = require('fs');
TYPES = require('coffeenode-types');
TEXT = require('coffeenode-text');
TRM = require('coffeenode-trm');
rpr = TRM.rpr.bind(TRM);
badge = 'TIMETABLE/read-gtfs-data';
log = TRM.get_logger('plain', badge);
info = TRM.get_logger('info', badge);
whisper = TRM.get_logger('whisper', badge);
alert = TRM.get_logger('alert', badge);
debug = TRM.get_logger('debug', badge);
warn = TRM.get_logger('warn', badge);
help = TRM.get_logger('help', badge);
urge = TRM.get_logger('urge', badge);
echo = TRM.echo.bind(TRM);
rainbow = TRM.rainbow.bind(TRM);
create_readstream = require('../create-readstream');
/* http://c2fo.github.io/fast-csv/index.html, https://github.com/C2FO/fast-csv */
CSV = require('fast-csv');
ES = require('event-stream');
$ = ES.map.bind(ES);
this.$count = function(input_stream, title) {<|fim▁hole|> return info((title != null ? title : 'Count') + ':', count);
});
return $((function(_this) {
return function(record, handler) {
count += 1;
return handler(null, record);
};
})(this));
};
this.$skip_empty = function() {
return $((function(_this) {
return function(record, handler) {
if (record.length === 0) {
return handler();
}
return handler(null, record);
};
})(this));
};
this.$show = function() {
return $((function(_this) {
return function(record, handler) {
urge(rpr(record.toString()));
return handler(null, record);
};
})(this));
};
this.read_trips = function(route, handler) {
var input;
input = CSV.fromPath(route);
input.on('end', function() {
log('ok: trips');
return handler(null);
});
input.setMaxListeners(100);
input.pipe(this.$count(input, 'trips A')).pipe(this.$show());
return null;
};
if (!module.parent) {
route = '/Volumes/Storage/cnd/node_modules/timetable-data/germany-berlin-2014/agency.txt';
this.read_trips(route, function(error) {
if (error != null) {
throw error;
}
return log('ok');
});
}
}).call(this);<|fim▁end|> | var count;
count = 0;
input_stream.on('end', function() { |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* @module main application controller
* @description Runs socket.io and static http server, listens for client connections and directs user/room instances.
*
* @author cemckinley <[email protected]>
* @copyright Copyright (c) 2013 Author, contributors
* @license GPL v3
*/
var fs = require('fs'),
https = require('https'),<|fim▁hole|> globalEvents = require('./controllers/global-events'),
User = require('./controllers/user');
var nodeMud = (function(){
/** properties **/
var httpsOptions = {
key: fs.readFileSync(config.ssl.keyPath),
cert: fs.readFileSync(config.ssl.certPath),
passphrase: config.ssl.passphrase
},
httpsServer = https.createServer(httpsOptions, _httpsHandler).listen(config.socket.port), // server for socket.io socket
io = socketio.listen(httpsServer); // websocket
/** functions **/
function init(){
db.connect();
// event listeners
io.sockets.on('connection', _onClientConnect);
globalEvents.on('userAuth', _onClientAuth);
}
function _httpsHandler(req, res){
res.writeHead(200);
res.end();
}
function _onClientConnect(socket){
var client = new SessionHandler(socket);
}
function _onClientAuth(userData, clientSocket){
var user = new User(clientSocket, userData);
clientSocket.emit('message', userData.name + 'successful user auth');
}
/** PUBLIC API **/
return {
init: init
};
}());
nodeMud.init();<|fim▁end|> | socketio = require('socket.io'),
config = require('./config/env'),
db = require('./controllers/db'),
SessionHandler = require('./controllers/session-handler'), |
<|file_name|>NapierDBVis.java<|end_file_name|><|fim▁begin|>package example.multiview;
import io.db.Connect;
import io.db.ConnectFactory;
import io.db.FormatResultSet;
import io.json.JSONStructureMaker;
import io.parcoord.db.MakeTableModel;
import java.awt.BasicStroke;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Map.Entry;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.metal.MetalLookAndFeel;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import model.graph.Edge;
import model.graph.EdgeSetValueMaker;
import model.graph.GraphFilter;
import model.graph.GraphModel;
import model.graph.impl.SymmetricGraphInstance;
import model.matrix.DefaultMatrixTableModel;
import model.matrix.MatrixTableModel;
import model.shared.selection.LinkedGraphMatrixSelectionModelBridge;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.MissingNode;
import org.codehaus.jackson.node.ObjectNode;
import swingPlus.graph.GraphCellRenderer;
import swingPlus.graph.JGraph;
import swingPlus.graph.force.impl.BarnesHut2DForceCalculator;
import swingPlus.graph.force.impl.EdgeWeightedAttractor;
import swingPlus.matrix.JHeaderRenderer;
import swingPlus.matrix.JMatrix;
import swingPlus.parcoord.JColumnList;
import swingPlus.parcoord.JColumnList2;
import swingPlus.parcoord.JParCoord;
import swingPlus.shared.MyFrame;
import swingPlus.tablelist.ColumnSortControl;
import swingPlus.tablelist.JEditableVarColTable;
import ui.StackedRowTableUI;
import util.Messages;
import util.colour.ColorUtilities;
import util.ui.NewMetalTheme;
import util.ui.VerticalLabelUI;
import example.graph.renderers.node.NodeDegreeGraphCellRenderer;
import example.multiview.renderers.edge.EdgeCountFatEdgeRenderer;
import example.multiview.renderers.matrix.JSONObjHeaderRenderer;
import example.multiview.renderers.matrix.KeyedDataHeaderRenderer;
import example.multiview.renderers.matrix.NumberShadeRenderer;
import example.multiview.renderers.node.JSONNodeTypeGraphRenderer;
import example.multiview.renderers.node.JSONTooltipGraphCellRenderer;
import example.multiview.renderers.node.KeyedDataGraphCellRenderer;
import example.multiview.renderers.node.TableTooltipGraphCellRenderer;
import example.multiview.renderers.node.valuemakers.NodeTotalEdgeWeightValueMaker;
import example.tablelist.renderers.ColourBarCellRenderer;
public class NapierDBVis {
static final Logger LOGGER = Logger.getLogger (NapierDBVis.class);
/**
* @param args
*/
public static void main (final String[] args) {
//final MetalLookAndFeel lf = new MetalLookAndFeel();
MetalLookAndFeel.setCurrentTheme (new NewMetalTheme());
PropertyConfigurator.configure (Messages.makeProperties ("log4j"));
new NapierDBVis ();
}
public NapierDBVis () {
TableModel tableModel = null;
GraphModel graph = null;
TableModel listTableModel = null;
MatrixTableModel matrixModel = null;
Map<JsonNode, String> nodeTypeMap = null;
final Properties connectionProperties = Messages.makeProperties ("dbconnect", this.getClass(), false);
final Properties queryProperties = Messages.makeProperties ("queries", this.getClass(), false);
final Connect connect = ConnectFactory.getConnect (connectionProperties);
//ResultSet resultSet = null;
Statement stmt;
try {
stmt = connect.getConnection().createStatement();
//final ResultSet resultSet = stmt.executeQuery ("Select * from people where peopleid>0;");
final String peopleDataQuery = queryProperties.get ("PeopleData").toString();
System.err.println (peopleDataQuery);
final ResultSet peopleDataResultSet = stmt.executeQuery (peopleDataQuery);
final MakeTableModel mtm2 = new MakeTableModel();
tableModel = mtm2.makeTable (peopleDataResultSet);
//final ResultSet resultSet = stmt.executeQuery ("Select * from people where peopleid>0;");
final String pubJoinQuery = queryProperties.get ("PublicationJoin").toString();
System.err.println (pubJoinQuery);
final ResultSet pubJoinResultSet = stmt.executeQuery (pubJoinQuery);
//FormatResultSet.getInstance().printResultSet (resultSet);
final MakeTableModel mtm = new MakeTableModel();
TableModel tableModel2 = mtm.makeTable (pubJoinResultSet);
//final DatabaseMetaData dmd = connect.getConnection().getMetaData();
//final ResultSet resultSet2 = dmd.getProcedures (connect.getConnection().getCatalog(), null, "%");
//FormatResultSet.getInstance().printResultSet (resultSet2);
final String pubsByYearQuery = queryProperties.get ("PubsByYear").toString();
System.err.println (pubsByYearQuery);
final ResultSet pubsByYearResultSet = stmt.executeQuery (pubsByYearQuery);
final MakeTableModel mtm3 = new MakeTableModel();
TableModel tableModel3 = mtm3.makeTable (pubsByYearResultSet);
listTableModel = makePubByYearTable (tableModel3);
Map<Object, KeyedData> keyDataMap = makeKeyedDataMap (tableModel, 0, 1);
graph = makeGraph (keyDataMap, "peopleid", tableModel2);
matrixModel = new DefaultMatrixTableModel (graph);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
connect.close();
}
connect.close();
System.err.println (tableModel == null ? "no model" : "tableModel rows: "+tableModel.getRowCount()+", cols: "+tableModel.getColumnCount());
/*
try {
final ObjectMapper objMapper = new ObjectMapper ();
final JsonNode rootNode = objMapper.readValue (new File (fileName), JsonNode.class);
LOGGER.info ("rootnode: "+rootNode);
final JSONStructureMaker structureMaker = new JSONStructureMaker (rootNode);
graph = structureMaker.makeGraph (new String[] {"people"}, new String[] {"publications", "grants"});
//graph = structureMaker.makeGraph (new String[] {"grants"}, new String[] {"publications", "people"});
//graph = structureMaker.makeGraph (new String[] {"publications", "people", "grants"}, new String[] {"people"});
//tableModel = structureMaker.makeTable ("publications");
tableModel = structureMaker.makeTable ("people");
matrixModel = new DefaultMatrixTableModel (graph);
nodeTypeMap = structureMaker.makeNodeTypeMap (new String[] {"publications", "people", "grants"});
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
*/
Map<Object, Integer> keyRowMap = makeKeyRowMap (tableModel, 0);
final JGraph jgraph = new JGraph (graph);
final EdgeWeightedAttractor edgeWeighter = new EdgeWeightedAttractor ();
jgraph.setAttractiveForceCalculator (edgeWeighter);
jgraph.setShowEdges (true);
final EdgeSetValueMaker weightedEdgeMaker = new NodeTotalEdgeWeightValueMaker ();
//final GraphCellRenderer tableTupleRenderer = new TableTupleGraphRenderer (tableModel, keyRowMap);
final GraphCellRenderer jsonGraphRenderer = new JSONNodeTypeGraphRenderer (nodeTypeMap);
jgraph.setDefaultNodeRenderer (String.class, new NodeDegreeGraphCellRenderer (10.0));
jgraph.setDefaultNodeRenderer (JsonNode.class, jsonGraphRenderer);
jgraph.setDefaultNodeRenderer (ObjectNode.class, jsonGraphRenderer);
jgraph.setDefaultNodeRenderer (KeyedData.class, new KeyedDataGraphCellRenderer (weightedEdgeMaker));
jgraph.setDefaultEdgeRenderer (Integer.class, new EdgeCountFatEdgeRenderer ());
jgraph.setDefaultNodeToolTipRenderer (KeyedData.class, new TableTooltipGraphCellRenderer ());
final JTable pubTable = new JEditableVarColTable (listTableModel);
//final JTable jtable3 = new JTable (dtm);
pubTable.setSelectionMode (ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
pubTable.setRowSelectionAllowed (true);
//jt2.setColumnSelectionAllowed (true);
pubTable.setRowSorter (new TableRowSorter<DefaultTableModel> ((DefaultTableModel)listTableModel));
final StackedRowTableUI tlui = new StackedRowTableUI ();
pubTable.setUI (tlui);
tlui.setRelativeLayout (true);
final Color[] columnColours = new Color [pubTable.getColumnCount() - 1];
for (int n = 0; n < columnColours.length; n++) {
double perc = (double)n / columnColours.length;
columnColours[n] = ColorUtilities.mixColours (Color.orange, new Color (0, 128, 255), (float)perc);
}
pubTable.getTableHeader().setReorderingAllowed(true);
pubTable.getTableHeader().setResizingAllowed(false);
System.err.println ("ptc: "+pubTable.getColumnModel().getColumnCount());
for (int col = 1; col < pubTable.getColumnCount(); col++) {
System.err.println ("col: "+col+", ptyc: "+pubTable.getColumnModel().getColumn(col));
pubTable.getColumnModel().getColumn(col).setCellRenderer (new ColourBarCellRenderer (columnColours [(col - 1) % columnColours.length]));
}
final JColumnList jcl = new JColumnList (pubTable) {
@Override
public boolean isCellEditable (final int row, final int column) {
return super.isCellEditable (row, column) && row > 0;
}
};
//jcl.addTable (pubTable);
final JMatrix jmatrix = new JMatrix ((TableModel) matrixModel);
//final JHeaderRenderer stringHeader = new JSONObjHeaderRenderer ();
//final JHeaderRenderer stringHeader2 = new JSONObjHeaderRenderer ();
final JHeaderRenderer stringHeader = new KeyedDataHeaderRenderer ();
final JHeaderRenderer stringHeader2 = new KeyedDataHeaderRenderer ();
jmatrix.getRowHeader().setDefaultRenderer (Object.class, stringHeader);
jmatrix.getRowHeader().setDefaultRenderer (String.class, stringHeader);
jmatrix.getColumnHeader().setDefaultRenderer (Object.class, stringHeader2);
jmatrix.getColumnHeader().setDefaultRenderer (String.class, stringHeader2);
((JLabel)stringHeader2).setUI (new VerticalLabelUI (false));
stringHeader.setSelectionBackground (jmatrix.getRowHeader());
stringHeader2.setSelectionBackground (jmatrix.getColumnHeader());
//jmatrix.setDefaultRenderer (HashSet.class, stringHeader);
jmatrix.setDefaultRenderer (String.class, stringHeader);
jmatrix.setDefaultRenderer (Integer.class, new NumberShadeRenderer ());
final JTable table = new JParCoord (tableModel);
table.setSelectionMode (ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
table.setRowSelectionAllowed (true);
table.setAutoCreateRowSorter (true);
table.setColumnSelectionAllowed (true);
table.setForeground (Color.lightGray);
table.setSelectionForeground (Color.orange);
if (table instanceof JParCoord) {
((JParCoord)table).setBrushForegroundColour (Color.gray);
((JParCoord)table).setBrushSelectionColour (Color.red);
((JParCoord)table).setSelectedStroke (new BasicStroke (2.0f));
//((JParCoord)table).setBrushing (true);
}
table.setGridColor (Color.gray);
table.setShowVerticalLines (false);
table.setBorder (BorderFactory.createEmptyBorder (24, 2, 24, 2));
if (table.getRowSorter() instanceof TableRowSorter) {
final TableRowSorter<? extends TableModel> trs = (TableRowSorter<? extends TableModel>)table.getRowSorter();
}
table.setAutoResizeMode (JTable.AUTO_RESIZE_OFF);
/*
jgraph.setPreferredSize (new Dimension (768, 640));
table.setPreferredSize (new Dimension (768, 384));
table.setMinimumSize (new Dimension (256, 128));
final LinkedGraphMatrixSelectionModelBridge selectionBridge = new LinkedGraphMatrixSelectionModelBridge ();
selectionBridge.addJGraph (jgraph);
selectionBridge.addJTable (table);
selectionBridge.addJTable (jmatrix);
*/
SwingUtilities.invokeLater (
new Runnable () {
@Override
public void run() {
final JFrame jf2 = new MyFrame ("JGraph Demo");
jf2.setSize (1024, 768);
final JPanel optionPanel = new JPanel ();
optionPanel.setLayout (new BoxLayout (optionPanel, BoxLayout.Y_AXIS));
final JSlider llengthSlider = new JSlider (20, 1000, (int)edgeWeighter.getLinkLength());
llengthSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
edgeWeighter.setLinkLength (llengthSlider.getValue());
}
}
);
final JSlider lstiffSlider = new JSlider (20, 1000, edgeWeighter.getStiffness());
lstiffSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
edgeWeighter.setStiffness (lstiffSlider.getValue());
}
}
);
final JSlider repulseSlider = new JSlider (1, 50, 10);
repulseSlider.addChangeListener(
new ChangeListener () {
@Override
public void stateChanged (final ChangeEvent cEvent) {
((BarnesHut2DForceCalculator)jgraph.getRepulsiveForceCalculator()).setAttenuator (3.0 / repulseSlider.getValue());
}
}
);
final JCheckBox showSingletons = new JCheckBox ("Show singletons", true);
showSingletons.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (final ActionEvent e) {
final Object source = e.getSource();
if (source instanceof JCheckBox) {
final boolean selected = ((JCheckBox)source).isSelected();
final GraphFilter singletonFilter = new GraphFilter () {
@Override
public boolean includeNode (final Object obj) {
return jgraph.getModel().getEdges(obj).size() > 0 || selected;
}
@Override
public boolean includeEdge (final Edge edge) {
return true;
}
};
jgraph.setGraphFilter (singletonFilter);
}
}
}
);
final JButton clearSelections = new JButton ("Clear Selections");
clearSelections.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (ActionEvent e) {
jgraph.getSelectionModel().clearSelection ();
}
}
);
final JButton graphFreezer = new JButton ("Freeze Graph");
graphFreezer.addActionListener (
new ActionListener () {
@Override
public void actionPerformed (ActionEvent e) {
jgraph.pauseWorker();
}
}
);
optionPanel.add (new JLabel ("Link Length:"));
optionPanel.add (llengthSlider);
optionPanel.add (new JLabel ("Link Stiffness:"));
optionPanel.add (lstiffSlider);
optionPanel.add (new JLabel ("Repulse Strength:"));
optionPanel.add (repulseSlider);
optionPanel.add (showSingletons);
optionPanel.add (clearSelections);
optionPanel.add (graphFreezer);
JPanel listTablePanel = new JPanel (new BorderLayout ());
listTablePanel.add (new JScrollPane (pubTable), BorderLayout.CENTER);
final Box pubControlPanel = Box.createVerticalBox();
final JScrollPane pubTableScrollPane = new JScrollPane (pubControlPanel);
pubTableScrollPane.setPreferredSize (new Dimension (168, 400));
jcl.getColumnModel().getColumn(1).setWidth (30);
listTablePanel.add (pubTableScrollPane, BorderLayout.WEST);
JTable columnSorter = new ColumnSortControl (pubTable);
pubControlPanel.add (jcl.getTableHeader());
pubControlPanel.add (jcl);
pubControlPanel.add (columnSorter.getTableHeader());
pubControlPanel.add (columnSorter);
JScrollPane parCoordsScrollPane = new JScrollPane (table);
JScrollPane matrixScrollPane = new JScrollPane (jmatrix);
JTabbedPane jtp = new JTabbedPane ();
JPanel graphPanel = new JPanel (new BorderLayout ());
graphPanel.add (jgraph, BorderLayout.CENTER);
graphPanel.add (optionPanel, BorderLayout.WEST);
jtp.addTab ("Node-Link", graphPanel);
jtp.addTab ("Matrix", matrixScrollPane);
jtp.addTab ("Pubs", listTablePanel);
jtp.addTab ("||-Coords", parCoordsScrollPane);<|fim▁hole|>
//jf2.getContentPane().add (optionPanel, BorderLayout.EAST);
jf2.getContentPane().add (jtp, BorderLayout.CENTER);
//jf2.getContentPane().add (tableScrollPane, BorderLayout.SOUTH);
jf2.setVisible (true);
}
}
);
}
public GraphModel makeGraph (final ResultSet nodeSet, final ResultSet edgeSet) throws SQLException {
edgeSet.beforeFirst();
final GraphModel graph = new SymmetricGraphInstance ();
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
while (edgeSet.next()) {
Object author1 = edgeSet.getObject(1);
Object author2 = edgeSet.getObject(2);
graph.addNode (author1);
graph.addNode (author2);
final Set<Edge> edges = graph.getEdges (author1, author2);
if (edges.isEmpty()) {
graph.addEdge (author1, author2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = edges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
//graph.removeEdge (firstEdge);
//graph.addEdge (node1, node2, Integer.valueOf (val.intValue() + 1));
}
}
return graph;
}
public GraphModel makeGraph (final TableModel nodes, final String primaryKeyColumn, final TableModel edges) throws SQLException {
final GraphModel graph = new SymmetricGraphInstance ();
final Map<Object, Integer> primaryKeyRowMap = new HashMap<Object, Integer> ();
for (int row = 0; row < nodes.getRowCount(); row++) {
primaryKeyRowMap.put (nodes.getValueAt (row, 0), Integer.valueOf (row));
}
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
for (int row = 0; row < edges.getRowCount(); row++) {
Object authorKey1 = edges.getValueAt (row, 0);
Object authorKey2 = edges.getValueAt (row, 1);
int authorIndex1 = (primaryKeyRowMap.get(authorKey1) == null ? -1 : primaryKeyRowMap.get(authorKey1).intValue());
int authorIndex2 = (primaryKeyRowMap.get(authorKey2) == null ? -1 : primaryKeyRowMap.get(authorKey2).intValue());
if (authorIndex1 >= 0 && authorIndex2 >= 0) {
Object graphNode1 = nodes.getValueAt (authorIndex1, 1);
Object graphNode2 = nodes.getValueAt (authorIndex2, 1);
graph.addNode (graphNode1);
graph.addNode (graphNode2);
final Set<Edge> gedges = graph.getEdges (graphNode1, graphNode2);
if (gedges.isEmpty()) {
graph.addEdge (graphNode1, graphNode2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = gedges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
}
}
}
return graph;
}
public GraphModel makeGraph (final Map<Object, KeyedData> keyDataMap, final String primaryKeyColumn, final TableModel edges) throws SQLException {
final GraphModel graph = new SymmetricGraphInstance ();
// Look through the rootnode for fields named 'nodeType'
// Add that nodeTypes' subfields as nodes to a graph
for (int row = 0; row < edges.getRowCount(); row++) {
Object authorKey1 = edges.getValueAt (row, 0);
Object authorKey2 = edges.getValueAt (row, 1);
if (authorKey1 != null && authorKey2 != null) {
Object graphNode1 = keyDataMap.get (authorKey1);
Object graphNode2 = keyDataMap.get (authorKey2);
if (graphNode1 != null && graphNode2 != null) {
graph.addNode (graphNode1);
graph.addNode (graphNode2);
final Set<Edge> gedges = graph.getEdges (graphNode1, graphNode2);
if (gedges.isEmpty()) {
graph.addEdge (graphNode1, graphNode2, Integer.valueOf (1));
} else {
final Iterator<Edge> edgeIter = gedges.iterator();
final Edge firstEdge = edgeIter.next();
final Integer val = (Integer)firstEdge.getEdgeObject();
firstEdge.setEdgeObject (Integer.valueOf (val.intValue() + 1));
}
}
}
}
return graph;
}
public Map<Object, Integer> makeKeyRowMap (final TableModel tableModel, final int columnPKIndex) {
final Map<Object, Integer> primaryKeyRowMap = new HashMap<Object, Integer> ();
for (int row = 0; row < tableModel.getRowCount(); row++) {
primaryKeyRowMap.put (tableModel.getValueAt (row, 0), Integer.valueOf (row));
}
return primaryKeyRowMap;
}
public Map<Object, KeyedData> makeKeyedDataMap (final TableModel tableModel, final int columnPKIndex, final int columnLabelIndex) {
final Map<Object, KeyedData> primaryKeyDataMap = new HashMap<Object, KeyedData> ();
for (int row = 0; row < tableModel.getRowCount(); row++) {
primaryKeyDataMap.put (tableModel.getValueAt (row, columnPKIndex), makeKeyedData (tableModel, columnPKIndex, columnLabelIndex, row));
}
return primaryKeyDataMap;
}
public KeyedData makeKeyedData (final TableModel tableModel, final int columnPKIndex, final int columnLabelIndex, final int rowIndex) {
List<Object> data = new ArrayList<Object> ();
for (int n = 0; n < tableModel.getColumnCount(); n++) {
data.add (tableModel.getValueAt (rowIndex, n));
}
KeyedData kd = new KeyedData (tableModel.getValueAt (rowIndex, columnPKIndex), data, columnLabelIndex);
return kd;
}
/**
* can't do pivot queries in ANSI SQL
* @param sqlresult
* @return
*/
public TableModel makePubByYearTable (final TableModel sqlresult) {
DefaultTableModel tm = new DefaultTableModel () {
public Class<?> getColumnClass(int columnIndex) {
if (columnIndex > 0) {
return Long.class;
}
return Integer.class;
}
public boolean isCellEditable (final int row, final int column) {
return false;
}
};
Map<Object, List<Long>> yearsToTypes = new HashMap<Object, List<Long>> ();
Map<Object, Integer> columnTypes = new HashMap<Object, Integer> ();
tm.addColumn ("Year");
int col = 1;
for (int sqlrow = 0; sqlrow < sqlresult.getRowCount(); sqlrow++) {
Object type = sqlresult.getValueAt (sqlrow, 1);
if (columnTypes.get(type) == null) {
columnTypes.put(type, Integer.valueOf(col));
tm.addColumn (type);
col++;
}
}
System.err.println ("cols: "+columnTypes+", "+columnTypes.size());
for (int sqlrow = 0; sqlrow < sqlresult.getRowCount(); sqlrow++) {
Object year = sqlresult.getValueAt (sqlrow, 0);
if (year != null) {
Object type = sqlresult.getValueAt (sqlrow, 1);
Object val = sqlresult.getValueAt (sqlrow, 2);
int colIndex = columnTypes.get(type).intValue();
List<Long> store = yearsToTypes.get (year);
if (store == null) {
Long[] storep = new Long [col - 1];
Arrays.fill (storep, Long.valueOf(0));
List<Long> longs = Arrays.asList (storep);
store = new ArrayList (longs);
//Collections.fill (store, Long.valueOf (0));
yearsToTypes.put (year, store);
}
store.set (colIndex - 1, (Long)val);
}
}
for (Entry<Object, List<Long>> yearEntry : yearsToTypes.entrySet()) {
Object[] rowData = new Object [col];
rowData[0] = yearEntry.getKey();
for (int n = 1; n < col; n++) {
rowData[n] = yearEntry.getValue().get(n-1);
}
tm.addRow(rowData);
}
return tm;
}
}<|fim▁end|> | jtp.setPreferredSize(new Dimension (800, 480)); |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>"""
Usage:
run.py mlp --train=<train> --test=<test> --config=<config>
run.py som --train=<train> --test=<test> --config=<config>
Options:
--train Path to training data, txt file.
--test Path to test data, txt file.
--config Json configuration for the network.
"""<|fim▁hole|>from redes_neurais.resources.manager import run_mlp, run_som
import docopt
def run():
try:
args = docopt.docopt(__doc__)
if args["mlp"]:
run_mlp(args['--config'], args['--train'], args['--test'])
if args["som"]:
run_som(args['--config'], args['--train'], args['--test'])
except docopt.DocoptExit as e:
print e.message
if __name__ == "__main__":
run()<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""This file contains utility functions."""
import logging
import re
# Illegal Unicode characters for XML.
ILLEGAL_XML_RE = re.compile(
ur'[\x00-\x08\x0b-\x1f\x7f-\x84\x86-\x9f'
ur'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]')
def IsText(bytes_in, encoding=None):
"""Examine the bytes in and determine if they are indicative of a text.<|fim▁hole|> instance.
The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or method
supplied character encoding. Otherwise it will make the assumption the byte
sequence is not text, but a byte sequence.
Args:
bytes_in: The byte sequence passed to the method that needs examination.
encoding: Optional encoding to test, if not defined only ASCII, UTF-8 and
UTF-16 are tried.
Returns:
Boolean value indicating whether or not the byte sequence is a text or not.
"""
# TODO: Improve speed and accuracy of this method.
# Start with the assumption we are dealing with a text.
is_ascii = True
# Check if this is ASCII text string.
for char in bytes_in:
if not 31 < ord(char) < 128:
is_ascii = False
break
# We have an ASCII string.
if is_ascii:
return is_ascii
# Is this already a unicode text?
if isinstance(bytes_in, unicode):
return True
# Check if this is UTF-8
try:
_ = bytes_in.decode('utf-8')
return True
except UnicodeDecodeError:
pass
# TODO: UTF 16 decode is successful in too
# many edge cases where we are not really dealing with
# a text at all. Leaving this out for now, consider
# re-enabling or making a better determination.
#try:
# _ = bytes_in.decode('utf-16-le')
# return True
#except UnicodeDecodeError:
# pass
if encoding:
try:
_ = bytes_in.decode(encoding)
return True
except UnicodeDecodeError:
pass
except LookupError:
logging.error(
u'String encoding not recognized: {0:s}'.format(encoding))
return False
def GetUnicodeString(string):
"""Converts the string to Unicode if necessary."""
if not isinstance(string, unicode):
return str(string).decode('utf8', 'ignore')
return string
def GetInodeValue(inode_raw):
"""Read in a 'raw' inode value and try to convert it into an integer.
Args:
inode_raw: A string or an int inode value.
Returns:
An integer inode value.
"""
if isinstance(inode_raw, (int, long)):
return inode_raw
if isinstance(inode_raw, float):
return int(inode_raw)
try:
return int(inode_raw)
except ValueError:
# Let's do one more attempt.
inode_string, _, _ = str(inode_raw).partition('-')
try:
return int(inode_string)
except ValueError:
return -1
def RemoveIllegalXMLCharacters(string, replacement=u'\ufffd'):
"""Removes illegal Unicode characters for XML.
Args:
string: A string to replace all illegal characters for XML.
replacement: A replacement character to use in replacement of all
found illegal characters.
Return:
A string where all illegal Unicode characters for XML have been removed.
If the input is not a string it will be returned unchanged."""
if isinstance(string, basestring):
return ILLEGAL_XML_RE.sub(replacement, string)
return string<|fim▁end|> |
Parsers need quick and at least semi reliable method of discovering whether
or not a particular byte stream is a text or resembles text or not. This can
be used in text parsers to determine if a file is a text file or not for |
<|file_name|>logging.js<|end_file_name|><|fim▁begin|>/**
@license
* @pnp/logging v1.0.3 - pnp - light-weight, subscribable logging framework
* MIT (https://github.com/pnp/pnp/blob/master/LICENSE)
* Copyright (c) 2018 Microsoft
* docs: http://officedev.github.io/PnP-JS-Core
* source: https://github.com/pnp/pnp
* bugs: https://github.com/pnp/pnp/issues
*/
/**
* Class used to subscribe ILogListener and log messages throughout an application
*
*/
class Logger {
/**
* Gets or sets the active log level to apply for log filtering
*/
static get activeLogLevel() {
return Logger.instance.activeLogLevel;
}
static set activeLogLevel(value) {
Logger.instance.activeLogLevel = value;
}
static get instance() {
if (typeof Logger._instance === "undefined" || Logger._instance === null) {
Logger._instance = new LoggerImpl();
}
return Logger._instance;
}
/**
* Adds ILogListener instances to the set of subscribed listeners
*
* @param listeners One or more listeners to subscribe to this log
*/
static subscribe(...listeners) {
listeners.map(listener => Logger.instance.subscribe(listener));
}
/**
* Clears the subscribers collection, returning the collection before modifiction
*/
static clearSubscribers() {
return Logger.instance.clearSubscribers();
}
/**
* Gets the current subscriber count
*/
static get count() {
return Logger.instance.count;
}
/**
* Writes the supplied string to the subscribed listeners
*
* @param message The message to write
* @param level [Optional] if supplied will be used as the level of the entry (Default: LogLevel.Verbose)
*/
static write(message, level = 0 /* Verbose */) {
<|fim▁hole|> *
* @param json The json object to stringify and write
* @param level [Optional] if supplied will be used as the level of the entry (Default: LogLevel.Verbose)
*/
static writeJSON(json, level = 0 /* Verbose */) {
Logger.instance.log({ level: level, message: JSON.stringify(json) });
}
/**
* Logs the supplied entry to the subscribed listeners
*
* @param entry The message to log
*/
static log(entry) {
Logger.instance.log(entry);
}
/**
* Logs an error object to the subscribed listeners
*
* @param err The error object
*/
static error(err) {
Logger.instance.log({ data: err, level: 3 /* Error */, message: err.message });
}
}
class LoggerImpl {
constructor(activeLogLevel = 2 /* Warning */, subscribers = []) {
this.activeLogLevel = activeLogLevel;
this.subscribers = subscribers;
}
subscribe(listener) {
this.subscribers.push(listener);
}
clearSubscribers() {
const s = this.subscribers.slice(0);
this.subscribers.length = 0;
return s;
}
get count() {
return this.subscribers.length;
}
write(message, level = 0 /* Verbose */) {
this.log({ level: level, message: message });
}
log(entry) {
if (typeof entry !== "undefined" && this.activeLogLevel <= entry.level) {
this.subscribers.map(subscriber => subscriber.log(entry));
}
}
}
/**
* Implementation of LogListener which logs to the console
*
*/
class ConsoleListener {
/**
* Any associated data that a given logging listener may choose to log or ignore
*
* @param entry The information to be logged
*/
log(entry) {
const msg = this.format(entry);
switch (entry.level) {
case 0 /* Verbose */:
case 1 /* Info */:
console.log(msg);
break;
case 2 /* Warning */:
console.warn(msg);
break;
case 3 /* Error */:
console.error(msg);
break;
}
}
/**
* Formats the message
*
* @param entry The information to format into a string
*/
format(entry) {
const msg = [];
msg.push("Message: " + entry.message);
if (typeof entry.data !== "undefined") {
msg.push(" Data: " + JSON.stringify(entry.data));
}
return msg.join("");
}
}
/**
* Implementation of LogListener which logs to the supplied function
*
*/
class FunctionListener {
/**
* Creates a new instance of the FunctionListener class
*
* @constructor
* @param method The method to which any logging data will be passed
*/
constructor(method) {
this.method = method;
}
/**
* Any associated data that a given logging listener may choose to log or ignore
*
* @param entry The information to be logged
*/
log(entry) {
this.method(entry);
}
}
export { Logger, ConsoleListener, FunctionListener };
//# sourceMappingURL=logging.js.map<|fim▁end|> | Logger.instance.log({ level: level, message: message });
}
/**
* Writes the supplied string to the subscribed listeners
|
<|file_name|>type_support.cpp<|end_file_name|><|fim▁begin|>// Copyright 2014 Open Source Robotics Foundation, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "rclcpp/type_support_decl.hpp"
#include "rclcpp/visibility_control.hpp"
#include "rcl_interfaces/msg/list_parameters_result.hpp"
#include "rcl_interfaces/msg/parameter_descriptor.hpp"
#include "rcl_interfaces/msg/parameter_event.hpp"
#include "rcl_interfaces/msg/set_parameters_result.hpp"
#include "rcl_interfaces/srv/describe_parameters.hpp"
#include "rcl_interfaces/srv/get_parameter_types.hpp"
#include "rcl_interfaces/srv/get_parameters.hpp"
#include "rcl_interfaces/srv/list_parameters.hpp"
#include "rcl_interfaces/srv/set_parameters.hpp"
#include "rcl_interfaces/srv/set_parameters_atomically.hpp"
const rosidl_message_type_support_t *
rclcpp::type_support::get_parameter_event_msg_type_support()<|fim▁hole|> rcl_interfaces::msg::ParameterEvent
>();
}
const rosidl_message_type_support_t *
rclcpp::type_support::get_set_parameters_result_msg_type_support()
{
return rosidl_typesupport_cpp::get_message_type_support_handle<
rcl_interfaces::msg::SetParametersResult
>();
}
const rosidl_message_type_support_t *
rclcpp::type_support::get_parameter_descriptor_msg_type_support()
{
return rosidl_typesupport_cpp::get_message_type_support_handle<
rcl_interfaces::msg::ParameterDescriptor
>();
}
const rosidl_message_type_support_t *
rclcpp::type_support::get_list_parameters_result_msg_type_support()
{
return rosidl_typesupport_cpp::get_message_type_support_handle<
rcl_interfaces::msg::ListParametersResult
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_get_parameters_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::GetParameters
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_get_parameter_types_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::GetParameterTypes
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_set_parameters_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::SetParameters
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_list_parameters_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::ListParameters
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_describe_parameters_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::DescribeParameters
>();
}
const rosidl_service_type_support_t *
rclcpp::type_support::get_set_parameters_atomically_srv_type_support()
{
return rosidl_typesupport_cpp::get_service_type_support_handle<
rcl_interfaces::srv::SetParametersAtomically
>();
}<|fim▁end|> | {
return rosidl_typesupport_cpp::get_message_type_support_handle< |
<|file_name|>test_UseCaseContribution.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import os
import json
from subprocess import call
import cairis.core.BorgFactory
from cairis.core.Borg import Borg
from cairis.core.ReferenceSynopsis import ReferenceSynopsis
from cairis.core.ReferenceContribution import ReferenceContribution
from cairis.core.ARM import DatabaseProxyException
from cairis.mio.ModelImport import importModelFile
__author__ = 'Shamal Faily'
class UseCaseContributionTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cairis.core.BorgFactory.initialise()
importModelFile(os.environ['CAIRIS_SRC'] + '/test/webinos.xml',1)
def setUp(self):
f = open(os.environ['CAIRIS_SRC'] + '/test/usecase_contributions.json')
d = json.load(f)
f.close()
self.csData = d['characteristic_synopses'][0]
self.rcData = d['usecase_contributions'][0]
def tearDown(self):
pass
def testAddContribution(self):
ics = ReferenceSynopsis(-1,self.csData['theReference'],self.csData['theSynopsis'],self.csData['theDimension'],self.csData['theActorType'],self.csData['theActor'])
b = Borg()
b.dbProxy.addCharacteristicSynopsis(ics)
irc = ReferenceContribution(self.rcData['theSource'],self.rcData['theDestination'],self.rcData['theMeansEnd'],self.rcData['theContribution'])
b.dbProxy.addUseCaseContribution(irc)
orcs = b.dbProxy.getUseCaseContributions(self.rcData['theSource'])
orc,rType = orcs[self.rcData['theDestination']]
self.assertEqual(orc.source(), irc.source())
self.assertEqual(orc.destination(), irc.destination())<|fim▁hole|> b = Borg()
orcs = b.dbProxy.getUseCaseContributions(self.rcData['theSource'])
orc,rType = orcs[self.rcData['theDestination']]
orc.theContribution = 'Break'
b.dbProxy.updateUseCaseContribution(orc)
urcs = b.dbProxy.getUseCaseContributions(self.rcData['theSource'])
urc,rType = urcs[self.rcData['theDestination']]
self.assertEqual(orc.source(), urc.source())
self.assertEqual(orc.destination(), urc.destination())
self.assertEqual(orc.meansEnd(), urc.meansEnd())
self.assertEqual(orc.contribution(), urc.contribution())
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.assertEqual(orc.meansEnd(), irc.meansEnd())
self.assertEqual(orc.contribution(), irc.contribution())
def testUpdateContribution(self): |
<|file_name|>cargo_compile.rs<|end_file_name|><|fim▁begin|>//!
//! Cargo compile currently does the following steps:
//!
//! All configurations are already injected as environment variables via the
//! main cargo command
//!
//! 1. Read the manifest
//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as
//! stdin
//!
//! a. Shell out to `--do update` and `--do list` for each source
//! b. Resolve dependencies and return a list of name/version/source
//!
//! 3. Shell out to `--do download` for each source
//! 4. Shell out to `--do get` for each source, and build up the list of paths
//! to pass to rustc -L
//! 5. Call `cargo-rustc` with the results of the resolver zipped together with
//! the results of the `get`
//!
//! a. Topologically sort the dependencies
//! b. Compile each dependency in order, passing in the -L's pointing at each
//! previously compiled dependency
//!
use std::collections::HashMap;
use std::default::Default;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use core::registry::PackageRegistry;
use core::{Source, SourceId, PackageSet, Package, Target, PackageId};
use core::{Profile, TargetKind};
use core::resolver::Method;
use ops::{self, BuildOutput, ExecEngine};
use sources::{PathSource};
use util::config::{ConfigValue, Config};
use util::{CargoResult, internal, human, ChainError, profile};
/// Contains information about how a package should be compiled.
pub struct CompileOptions<'a> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
/// Extra features to build for the root package
pub features: &'a [String],
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
pub spec: Option<&'a str>,
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
/// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
/// Whether this is a release build or not
pub release: bool,
/// Mode for this compile.
pub mode: CompileMode,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<&'a [String]>,
}
#[derive(Clone, Copy, PartialEq)]
pub enum CompileMode {
Test,
Build,
Bench,
Doc { deps: bool },
}
pub enum CompileFilter<'a> {
Everything,
Only {
lib: bool,
bins: &'a [String],
examples: &'a [String],
tests: &'a [String],
benches: &'a [String],
}
}
pub fn compile<'a>(manifest_path: &Path,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
debug!("compile; manifest-path={}", manifest_path.display());
let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(),
options.config));
try!(source.update());
// TODO: Move this into PathSource
let package = try!(source.root_package());
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, Some(Box::new(source)), options)
}
pub fn compile_pkg<'a>(package: &Package,
source: Option<Box<Source + 'a>>,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
if spec.is_some() && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
if jobs == Some(0) {
return Err(human("jobs must be at least 1"))
}
let override_ids = try!(source_ids_from_config(config, package.root()));
let (packages, resolve_with_overrides, sources) = {
let mut registry = PackageRegistry::new(config);
if let Some(source) = source {
registry.preload(package.package_id().source_id(), source);
} else {
try!(registry.add_sources(&[package.package_id().source_id()
.clone()]));
}
// First, resolve the package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = try!(ops::resolve_pkg(&mut registry, package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
try!(registry.add_overrides(override_ids));
let method = Method::Required {
dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features: !no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, package, method,
Some(&resolve), None));
let req: Vec<PackageId> = resolved_with_overrides.iter().map(|r| {
r.clone()
}).collect();
let packages = try!(registry.get(&req).chain_error(|| {
human("Unable to get packages from source")
}));
(packages, resolved_with_overrides, registry.move_sources())
};
let pkgid = match spec {
Some(spec) => try!(resolve_with_overrides.query(spec)),
None => package.package_id(),
};
let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap();
let targets = try!(generate_targets(to_build, mode, filter, release));
let target_with_args = match *target_rustc_args {
Some(args) if targets.len() == 1 => {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
Some((target, profile))
}
Some(_) => {
return Err(human("extra arguments to `rustc` can only be passed to \
one target, consider filtering\nthe package by \
passing e.g. `--lib` or `--bin NAME` to specify \
a single target"))
}
None => None,
};
let targets = target_with_args.as_ref().map(|&(t, ref p)| vec![(t, p)])
.unwrap_or(targets);
let ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();<|fim▁hole|>
try!(ops::compile_targets(&targets, to_build,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
to_build.manifest().profiles()))
};
return Ok(ret);
}
impl<'a> CompileFilter<'a> {
pub fn new(lib_only: bool,
bins: &'a [String],
tests: &'a [String],
examples: &'a [String],
benches: &'a [String]) -> CompileFilter<'a> {
if lib_only || !bins.is_empty() || !tests.is_empty() ||
!examples.is_empty() || !benches.is_empty() {
CompileFilter::Only {
lib: lib_only, bins: bins, examples: examples, benches: benches,
tests: tests,
}
} else {
CompileFilter::Everything
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Everything => true,
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let list = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::Example => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
list.iter().any(|x| *x == target.name())
}
}
}
}
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
mode: CompileMode,
filter: &CompileFilter,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let profiles = pkg.manifest().profiles();
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Doc { .. } => &profiles.doc,
};
return match *filter {
CompileFilter::Everything => {
match mode {
CompileMode::Bench => {
Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
(t, profile)
}).collect::<Vec<_>>())
}
CompileMode::Test => {
let mut base = pkg.targets().iter().filter(|t| {
t.tested()
}).map(|t| {
(t, if t.is_example() {build} else {profile})
}).collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
if t.doctested() {
base.push((t, build));
}
}
Ok(base)
}
CompileMode::Build => {
Ok(pkg.targets().iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| (t, profile)).collect())
}
CompileMode::Doc { .. } => {
Ok(pkg.targets().iter().filter(|t| t.documented())
.map(|t| (t, profile)).collect())
}
}
}
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let mut targets = Vec::new();
if lib {
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push((t, profile));
} else {
return Err(human(format!("no library targets found")))
}
}
{
let mut find = |names: &[String], desc, kind, profile| {
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && *t.kind() == kind
});
let t = match target {
Some(t) => t,
None => return Err(human(format!("no {} target \
named `{}`",
desc, name))),
};
debug!("found {} `{}`", desc, name);
targets.push((t, profile));
}
Ok(())
};
try!(find(bins, "bin", TargetKind::Bin, profile));
try!(find(examples, "example", TargetKind::Example, build));
try!(find(tests, "test", TargetKind::Test, test));
try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
}
Ok(targets)
}
};
}
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>> {
let configs = try!(config.values());
debug!("loaded config; configs={:?}", configs);
let config_paths = match configs.get("paths") {
Some(cfg) => cfg,
None => return Ok(Vec::new())
};
let paths = try!(config_paths.list().chain_error(|| {
internal("invalid configuration for the key `paths`")
}));
paths.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
p.parent().unwrap().parent().unwrap().join(s)
}).filter(|p| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
cur_path != &**p
}).map(|p| SourceId::for_path(&p)).collect()
}
/// Parse all config files to learn about build configuration. Currently
/// configured options are:
///
/// * build.jobs
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
if n <= 0 {
return Err(human(format!("build.jobs must be positive, \
but found {} in {:?}", n, p)));
} else if n >= u32::max_value() as i64 {
return Err(human(format!("build.jobs is too large: \
found {} in {:?}", n, p)));
} else {
Some(n as u32)
}
}
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
base.host = try!(scrape_target_config(config, &config.rustc_info().host));
base.target = match target.as_ref() {
Some(triple) => try!(scrape_target_config(config, &triple)),
None => base.host.clone(),
};
Ok(base)
}
fn scrape_target_config(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: try!(config.get_path(&format!("{}.ar", key))),
linker: try!(config.get_path(&format!("{}.linker", key))),
overrides: HashMap::new(),
};
let table = match try!(config.get_table(&key)) {
Some((table, _)) => table,
None => return Ok(ret),
};
for (lib_name, _) in table.into_iter() {
if lib_name == "ar" || lib_name == "linker" { continue }
let mut output = BuildOutput {
library_paths: Vec::new(),
library_links: Vec::new(),
cfgs: Vec::new(),
metadata: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
let table = try!(config.get_table(&key)).unwrap().0;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
let whence = format!("in `{}` (in {})", key,
path.display());
let (paths, links) = try!(
BuildOutput::parse_rustc_flags(&v, &whence)
);
output.library_paths.extend(paths.into_iter());
output.library_links.extend(links.into_iter());
} else {
output.metadata.push((k, v));
}
},
ConfigValue::List(a, p) => {
if k == "rustc-link-lib" {
output.library_links.extend(a.into_iter().map(|v| v.0));
} else if k == "rustc-link-search" {
output.library_paths.extend(a.into_iter().map(|v| {
PathBuf::from(&v.0)
}));
} else if k == "rustc-cfg" {
output.cfgs.extend(a.into_iter().map(|v| v.0));
} else {
try!(config.expected("string", &k,
ConfigValue::List(a, p)));
}
},
// technically could be a list too, but that's the exception to
// the rule...
cv => { try!(config.expected("string", &k, cv)); }
}
}
ret.overrides.insert(lib_name, output);
}
Ok(ret)
}<|fim▁end|> | build_config.release = release;
if let CompileMode::Doc { deps } = mode {
build_config.doc_all = deps;
} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
* Geoide Composer, configuration tool for Geoide Viewer <|fim▁hole|>
import './startup.js';
import './jsonapi.js';
import './json_gv_api.js';
import './json_map_api.js';
import './parse_capabilities.js';<|fim▁end|> | * Copyright (C) 2016 IDgis
* See license:
* https://github.com/IDgis/geoide-admin/blob/master/LICENSE
*/ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2010-2017, GEM Foundation, G. Weatherill, M. Pagani,<|fim▁hole|>#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
#License as published by the Free Software Foundation, either version
#3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
#DISCLAIMER
#
# The software Hazard Modeller's Toolkit (openquake.hmtk) provided herein
#is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
#Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM's OpenQuake suite
# (http://www.globalquakemodel.org/openquake) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM's OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# ([email protected]).
#
# The Hazard Modeller's Toolkit (openquake.hmtk) is therefore distributed WITHOUT
#ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
#FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
#for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.<|fim▁end|> | # D. Monelli. |
<|file_name|>demo42.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
# All rights reserved.
# Redistribution and use in source and binary forms, with or without<|fim▁hole|># 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
"""
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
import time
from pybvc.controller.controller import Controller
from pybvc.openflowdev.ofswitch import (OFSwitch,
FlowEntry,
Match,
Instruction,
SetMplsTTLAction,
DecMplsTTLAction,
OutputAction)
from pybvc.common.utils import load_dict_from_file
from pybvc.common.status import STATUS
from pybvc.common.constants import ETH_TYPE_MPLS_UCAST
def delete_flows(ofswitch, table_id, flow_ids):
for flow_id in flow_ids:
result = ofswitch.delete_flow(table_id, flow_id)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow with id of '%s' successfully removed "
"from the Controller" % flow_id)
else:
print ("!!!Flow '%s' removal error, reason: %s" %
(flow_id, status.brief()))
def of_demo_42():
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit(0)
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
nodeName = d['nodeName']
rundelay = d['rundelay']
except:
print ("Failed to get Controller device attributes")
exit(0)
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print ("<<< Demo 42 Start")
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd)
ofswitch = OFSwitch(ctrl, nodeName)
print ("<<< 'Controller': %s, 'OpenFlow' switch: '%s'" %
(ctrlIpAddr, nodeName))
first_flow_id = 110
# ---------------------------------------------------
# First flow entry
# ---------------------------------------------------
table_id = 0
flow_id = first_flow_id
flow_name = "Modify MPLS TTL example1"
priority = 900
cookie = 1300
match_in_port = 3
match_eth_type = ETH_TYPE_MPLS_UCAST
match_mpls_label = 567
act_mod_mpls_ttl = 2
act_out_port = 112
print "\n"
print ("<<< Set OpenFlow flow on the Controller")
print (" Match: Input Port (%s)\n"
" Ethernet Type (%s)\n"
" MPLS Label (%s)" %
(match_in_port,
hex(match_eth_type),
match_mpls_label))
print (" Actions: Set MPLS TTL (%s)\n"
" Output (%s)" %
(act_mod_mpls_ttl, act_out_port))
time.sleep(rundelay)
# Allocate a placeholder for the Flow Entry
flow_entry1 = FlowEntry()
# Generic attributes of the Flow Entry
flow_entry1.set_flow_table_id(table_id)
flow_entry1.set_flow_name(flow_name)
flow_entry1.set_flow_id(flow_id)
flow_entry1.set_flow_cookie(cookie)
flow_entry1.set_flow_priority(priority)
flow_entry1.set_flow_hard_timeout(0)
flow_entry1.set_flow_idle_timeout(0)
# Instructions/Actions for the Flow Entry
instruction = Instruction(instruction_order=0)
action_order = 0
action = SetMplsTTLAction(action_order)
action.set_ttl(act_mod_mpls_ttl)
instruction.add_apply_action(action)
action_order += 1
action = OutputAction(action_order)
action.set_outport(act_out_port)
instruction.add_apply_action(action)
flow_entry1.add_instruction(instruction)
# Match Fields for the Flow Entry
match = Match()
match.set_in_port(match_in_port)
match.set_eth_type(match_eth_type)
match.set_mpls_label(match_mpls_label)
flow_entry1.add_match(match)
print ("\n")
print ("<<< Flow to send:")
print flow_entry1.get_payload()
time.sleep(rundelay)
result = ofswitch.add_modify_flow(flow_entry1)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow successfully added to the Controller")
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.detailed())
delete_flows(ofswitch, table_id, range(first_flow_id, flow_id + 1))
exit(0)
# ---------------------------------------------------
# Second flow entry
# ---------------------------------------------------
table_id = 0
flow_id += 1
flow_name = "Modify MPLS TTL example2"
priority = 900
cookie = 1300
match_in_port = 112
match_eth_type = ETH_TYPE_MPLS_UCAST
match_mpls_label = 567
act_out_port = 3
print "\n"
print ("<<< Set OpenFlow flow on the Controller")
print (" Match: Input Port (%s)\n"
" Ethernet Type (%s)\n"
" MPLS Label (%s)" %
(match_in_port,
hex(match_eth_type),
match_mpls_label))
print (" Actions: Decrement MPLS TTL\n"
" Output (%s)" %
(act_out_port))
time.sleep(rundelay)
# Allocate a placeholder for the Flow Entry
flow_entry2 = FlowEntry()
# Generic attributes of the Flow Entry
flow_entry2.set_flow_table_id(table_id)
flow_entry2.set_flow_name(flow_name)
flow_entry2.set_flow_id(flow_id)
flow_entry2.set_flow_cookie(cookie)
flow_entry2.set_flow_priority(priority)
flow_entry2.set_flow_hard_timeout(0)
flow_entry2.set_flow_idle_timeout(0)
# Instructions/Actions for the Flow Entry
instruction = Instruction(instruction_order=0)
action_order = 0
action = DecMplsTTLAction(action_order)
instruction.add_apply_action(action)
action_order += 1
action = OutputAction(action_order)
action.set_outport(act_out_port)
instruction.add_apply_action(action)
flow_entry2.add_instruction(instruction)
# Match Fields for the Flow Entry
match = Match()
match.set_in_port(match_in_port)
match.set_eth_type(match_eth_type)
match.set_mpls_label(match_mpls_label)
flow_entry2.add_match(match)
print ("\n")
print ("<<< Flow to send:")
print flow_entry2.get_payload()
time.sleep(rundelay)
result = ofswitch.add_modify_flow(flow_entry2)
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("<<< Flow successfully added to the Controller")
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.detailed())
delete_flows(ofswitch, table_id, range(first_flow_id, flow_id + 1))
exit(0)
print ("\n")
print ("<<< Delete flows from the Controller's cache "
"and from the table '%s' on the '%s' node" % (table_id, nodeName))
time.sleep(rundelay)
delete_flows(ofswitch, table_id, range(first_flow_id, flow_id + 1))
print ("\n")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
print (">>> Demo End")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
if __name__ == "__main__":
of_demo_42()<|fim▁end|> | # modification, are permitted provided that the following conditions are met:
|
<|file_name|>other.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !windows
package windriver
import (<|fim▁hole|> "github.com/oakmound/oak/v3/shiny/driver/internal/errscreen"
"github.com/oakmound/oak/v3/shiny/screen"
)
// Main is called by the program's main function to run the graphical
// application.
//
// It calls f on the Screen, possibly in a separate goroutine, as some OS-
// specific libraries require being on 'the main thread'. It returns when f
// returns.
func Main(f func(screen.Screen)) {
f(errscreen.Stub(fmt.Errorf(
"windriver: unsupported GOOS/GOARCH %s/%s", runtime.GOOS, runtime.GOARCH)))
}<|fim▁end|> | "fmt"
"runtime"
|
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>extern crate openssl;
use openssl::error as openssl_err;
use postgres::error as postgres_err;
use rustc_serialize::json;
use std::{io, error, fmt};
/// Proton's own error type, to make return types consistent.
#[derive(Debug)]
pub enum Error {
Io(io::Error),
JsonEncode(json::EncoderError),
JsonDecode(json::DecoderError),
JsonParse(json::ParserError),
Ssl(openssl_err::ErrorStack),
Rsfml(String),
Postgres(postgres_err::Error),
PostgresConnection(postgres_err::ConnectError),
FileNotFound(String),
FolderNotEmpty(String, usize),
InvalidFileName,
InvalidFrameDuration(u32),
InvalidLayout(String),
InvalidNumResults(usize),
InvalidPatch(String),
InvalidPermissionName(String),
InvalidProjectName(String),
InvalidPublicKey(String),
InvalidSequenceName(String),
InvalidSequenceSection(u32),
InvalidVixenData(String),
LoadProjectError,
MissingPermissionArg,
OffsetOutOfBounds(u32, u32),
DuplicateUser(String, String),
DuplicateSequence(String),
UnsupportedFileType(String),
AdminNotFound,
ChannelNotFound(u32),
ChannelDataNotFound(u32, u32),
FixtureNotFound(u32),
LayoutNotFound(u32),
ProjectNotFound(String),
PublicKeyNotFound(String),
SequenceNotFound(u32),
UserNotFound,
UnauthorizedAction,
TodoErr,
}
impl error::Error for Error {
/// This is a short description of what the error is (usually similar to the name)
fn description(&self) -> &str {
match *self {
Error::Io(_) => "IO error occurred",
Error::JsonDecode(_) => "Json decoding error occurred",
Error::JsonEncode(_) => "Json encoding error occurred",
Error::JsonParse(_) => "Json parsing error occurred",
Error::Ssl(_) => "SSL error occured",
Error::Rsfml(_) => "Rsfml error occured",
Error::Postgres(_) => "Postgres error occured",
Error::PostgresConnection(_) => "Postgres connection error occured",
Error::FileNotFound(_) => "File not found",
Error::FolderNotEmpty(_, _) => "Root folder was not empty",
Error::InvalidFileName => "Invalid file name",
Error::InvalidFrameDuration(_) => "Invalid frame duration",
Error::InvalidLayout(_) => "Invalid layout",
Error::InvalidNumResults(_) => "Invalid number of results returned",
Error::InvalidPatch(_) => "Invalid patch file",
Error::InvalidPermissionName(_) => "Invalid permission name",
Error::InvalidProjectName(_) => "Invalid project name",
Error::InvalidPublicKey(_) => "Invalid public key",
Error::InvalidSequenceName(_) => "Invalid sequence name",
Error::InvalidSequenceSection(_) => "Invalid sequence section",
Error::InvalidVixenData(_) => "Invalid Vixen data",
Error::LoadProjectError => "Loading project failed",
Error::MissingPermissionArg => "Permission argument required but missing (seqid or secid)",
Error::OffsetOutOfBounds(_, _) => "Offset out of bouds",<|fim▁hole|> Error::UnsupportedFileType(_) => "Unsupported file type",
Error::AdminNotFound => "Admin not found",
Error::ChannelNotFound(_) => "Channel not found",
Error::ChannelDataNotFound(_, _) => "Channel data not found",
Error::FixtureNotFound(_) => "Fixture not found",
Error::LayoutNotFound(_) => "Layout not found",
Error::ProjectNotFound(_) => "Project not found",
Error::PublicKeyNotFound(_) => "Public key not found",
Error::SequenceNotFound(_) => "Sequence not found",
Error::UserNotFound => "User not found",
Error::UnauthorizedAction => "Unauthorized action",
Error::TodoErr => "Todo",
}
}
/// The source cause of the error. Usually None. Some of the Error types that
/// are wrappers around other errors (io, ssl, rsfml, etc.) return the source error
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Io(ref err) => Some(err),
Error::JsonDecode(ref err) => Some(err),
Error::JsonEncode(ref err) => Some(err),
Error::JsonParse(ref err) => Some(err),
Error::Ssl(ref err) => Some(err),
Error::Rsfml(_) => None,
Error::Postgres(ref err) => Some(err),
Error::PostgresConnection(ref err) => Some(err),
Error::FileNotFound(_) => None,
Error::FolderNotEmpty(_, _) => None,
Error::InvalidFileName => None,
Error::InvalidFrameDuration(_) => None,
Error::InvalidLayout(_) => None,
Error::InvalidNumResults(_) => None,
Error::InvalidPatch(_) => None,
Error::InvalidPermissionName(_) => None,
Error::InvalidProjectName(_) => None,
Error::InvalidPublicKey(_) => None,
Error::InvalidSequenceName(_) => None,
Error::InvalidSequenceSection(_) => None,
Error::InvalidVixenData(_) => None,
Error::LoadProjectError => None,
Error::MissingPermissionArg => None,
Error::OffsetOutOfBounds(_, _) => None,
Error::DuplicateUser(_, _) => None,
Error::DuplicateSequence(_) => None,
Error::UnsupportedFileType(_) => None,
Error::AdminNotFound => None,
Error::ChannelNotFound(_) => None,
Error::ChannelDataNotFound(_, _) => None,
Error::FixtureNotFound(_) => None,
Error::LayoutNotFound(_) => None,
Error::ProjectNotFound(_) => None,
Error::PublicKeyNotFound(_) => None,
Error::SequenceNotFound(_) => None,
Error::UserNotFound => None,
Error::UnauthorizedAction => None,
Error::TodoErr => None,
}
}
}
impl fmt::Display for Error {
// Put the error strings here. Makes them printable with {}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Io(ref err) => write!(f,
"IO error occurred: {}", error::Error::description(err)),
Error::JsonDecode(ref err) => write!(f,
"Json decoding error occurred: {}", err),
Error::JsonEncode(ref err) => write!(f,
"Json encoding error occurred: {}", err),
Error::JsonParse(ref err) => write!(f,
"Json parsing error occurred: {}", err),
Error::Ssl(ref err) => write!(f,
"SSL error occured: {}", error::Error::description(err)),
Error::Rsfml(ref description) => write!(f,
"Rsfml error: {}", description),
Error::Postgres(ref err) => write!(f,
"Postgress error occured: {}", err),
Error::PostgresConnection(ref err) => write!(f,
"Postgress connection error occured: {}", err),
Error::FileNotFound(ref path) => write!(f,
"File not found at path '{}'", path),
Error::FolderNotEmpty(ref root, count) => write!(f,
"{} was not empty: {} files exist", root, count),
Error::InvalidFileName => write!(f,
"File name provided is invalid and cannot be retrieved"),
Error::InvalidFrameDuration(ref duration) => write!(f,
"Invalid frame duration: {}", duration),
Error::InvalidLayout(ref description) => write!(f,
"The layout being read or decoded is invalid: {}", description),
Error::InvalidNumResults(ref num_results) => write!(f,
"Invalid number of results returned: {}", num_results),
Error::InvalidPatch(ref description) => write!(f,
"Invalid patch file: {}", description),
Error::InvalidPermissionName(ref name) => write!(f,
"Invalid permission name provided: {}", name),
Error::InvalidProjectName(ref name) => write!(f,
"Invalid project name provided: {}", name),
Error::InvalidPublicKey(ref key) => write!(f,
"Public key is invalid: {}", key),
Error::InvalidSequenceName(ref seq_name) => write!(f,
"Sequence name had invalid characters: {}", seq_name),
Error::InvalidVixenData(ref details) => write!(f,
"Invalid Vixen data provided: {}", details),
Error::InvalidSequenceSection(ref section) => write!(f,
"Invalid sequence section: {}", section),
Error::LoadProjectError => write!(f, "Loading project failed"),
Error::MissingPermissionArg => write!(f,
"Permission argument required but missing (did you forget seqid or secid?)"),
Error::OffsetOutOfBounds(ref offset, ref upper_bound) => write!(f,
"Offset {} not between 0 and {} (inclusive)", offset, upper_bound),
Error::DuplicateUser(ref key, ref user) => write!(f,
"Duplicate user '{}' or key '{}'", user, key),
Error::DuplicateSequence(ref name) => write!(f,
"Duplicate sequence with name '{}'", name),
Error::UnsupportedFileType(ref file_type) => write!(f,
"Unsupported file type: {}", file_type),
Error::AdminNotFound => write!(f, "Admin not found"),
Error::ChannelNotFound(ref chanid) => write!(f,
"Channel not found: {}", chanid),
Error::ChannelDataNotFound(ref seqid, ref chanid) => write!(f,
"Channel data not found. seqid: {}, chanid: {}", seqid, chanid),
Error::FixtureNotFound(ref fix_id) => write!(f,
"Fixture not found: {}", fix_id),
Error::LayoutNotFound(ref layout_id) => write!(f,
"Layout not found: {}", layout_id),
Error::ProjectNotFound(ref proj_name) => write!(f,
"Project not found: {}", proj_name),
Error::PublicKeyNotFound(ref key) => write!(f,
"PublicKey not found: {}", key),
Error::SequenceNotFound(ref name) => write!(f,
"Sequence not found: '{}'", name),
Error::UserNotFound => write!(f, "User not found"),
Error::UnauthorizedAction => write!(f, "Unauthorized action"),
Error::TodoErr => write!(f, "TodoErr"),
}
}
}<|fim▁end|> | Error::DuplicateUser(_, _) => "User already exists",
Error::DuplicateSequence(_) => "Sequence already exists", |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use crate::c_str;
use crate::graphic::{GlBuffer, GlProgram, GlProgramBuilder, GlUniform, GlVertexArray};
use crate::opengl;
use nalgebra::{Isometry3, Matrix4, Point3};
use opengl::types::{GLsizeiptr, GLuint};
use std::ffi::c_void;
use std::mem;
use std::rc::Rc;
mod layer;
mod read_write;
pub use layer::TerrainLayer;
pub use read_write::Metadata;
const TERRAIN_FRAGMENT_SHADER: &str = include_str!("../../shaders/terrain.fs");
const TERRAIN_VERTEX_SHADER: &str = include_str!("../../shaders/terrain.vs");
const TERRAIN_GEOMETRY_SHADER: &str = include_str!("../../shaders/terrain.gs");
const GRID_SIZE: u32 = 1023;
pub struct TerrainRenderer {
program: GlProgram,
u_transform: GlUniform<Matrix4<f64>>,
vertex_array: GlVertexArray,
#[allow(dead_code)]
buffer_position: GlBuffer,
#[allow(dead_code)]
buffer_indices: GlBuffer,
num_indices: usize,
terrain_layers: Vec<TerrainLayer>,
}
impl TerrainRenderer {
pub fn new<I>(gl: Rc<opengl::Gl>, terrain_paths: I) -> Self
where
I: Iterator,
I::Item: AsRef<std::path::Path>,
{
let program =
GlProgramBuilder::new_with_vertex_shader(Rc::clone(&gl), TERRAIN_VERTEX_SHADER)
.geometry_shader(TERRAIN_GEOMETRY_SHADER)
.fragment_shader(TERRAIN_FRAGMENT_SHADER)
.build();
// TODO(nnmm): If our initial position as returned by local_from_global is very different
// from (0, 0, 0), the first call to camera_changed() will be very resource intensive
let u_transform = GlUniform::new(&program, "world_to_gl", Matrix4::identity());
let vertex_array = GlVertexArray::new(Rc::clone(&gl));
let (buffer_position, buffer_indices, num_indices) =
Self::create_mesh(&program, &vertex_array, Rc::clone(&gl));
let terrain_layers = terrain_paths
.map(|p| TerrainLayer::new(&program, p, GRID_SIZE + 1).unwrap())
.collect();
Self {
program,
u_transform,
vertex_array,
buffer_position,
buffer_indices,
num_indices,
terrain_layers,
}
}
fn create_mesh(
program: &GlProgram,
vertex_array: &GlVertexArray,
gl: Rc<opengl::Gl>,
) -> (GlBuffer, GlBuffer, usize) {
let num_vertices = (GRID_SIZE + 1) * (GRID_SIZE + 1) * 3;
let mut vertices: Vec<i32> = Vec::with_capacity(num_vertices as usize);
for iy in 0..=GRID_SIZE as i32 {
for ix in 0..=GRID_SIZE as i32 {
vertices.push(ix);
vertices.push(iy);
vertices.push(0);
}
}
let flat_ix = |x: GLuint, y: GLuint| y * (GRID_SIZE + 1) as GLuint + x;
let num_indices = GRID_SIZE * GRID_SIZE * 3 * 2;
let mut indices: Vec<GLuint> = Vec::with_capacity(num_indices as usize);
for iy in 0..GRID_SIZE as GLuint {
for ix in 0..GRID_SIZE as GLuint {
// Two triangles = one quad
indices.push(flat_ix(ix, iy));
indices.push(flat_ix(ix + 1, iy));
indices.push(flat_ix(ix, iy + 1));
indices.push(flat_ix(ix + 1, iy));
indices.push(flat_ix(ix, iy + 1));
indices.push(flat_ix(ix + 1, iy + 1));
}
}
// See https://learnopengl.com/Getting-started/Hello-Triangle, section
// "Vertex Array Object" for background on buffers and vertex array objects
vertex_array.bind();
let buffer_position = GlBuffer::new_array_buffer(Rc::clone(&gl));
let buffer_indices = GlBuffer::new_element_array_buffer(Rc::clone(&gl));
buffer_position.bind();
unsafe {
program.gl.BufferData(
opengl::ARRAY_BUFFER,
(vertices.len() * mem::size_of::<i32>()) as GLsizeiptr,
vertices.as_ptr() as *const c_void,
opengl::STATIC_DRAW,
);
let pos_attr = gl.GetAttribLocation(program.id, c_str!("aPos")) as GLuint;
gl.EnableVertexAttribArray(pos_attr);
// aPos is an ivec3. If we wanted, we could make it an ivec2 or a single index.
gl.VertexAttribIPointer(
pos_attr,
3,
opengl::INT,
3 * mem::size_of::<i32>() as i32,
std::ptr::null(), // no offset
);
}
buffer_indices.bind();
unsafe {
program.gl.BufferData(
opengl::ELEMENT_ARRAY_BUFFER,
(indices.len() * mem::size_of::<GLuint>()) as GLsizeiptr,
indices.as_ptr() as *const c_void,
opengl::STATIC_DRAW,
);
}
(buffer_position, buffer_indices, indices.len())
}
pub fn camera_changed(&mut self, world_to_gl: &Matrix4<f64>, camera_to_world: &Isometry3<f64>) {
let camera_pos = Point3::from(camera_to_world.translation.vector);
self.terrain_layers
.iter_mut()
.for_each(|layer| layer.update(camera_pos));
self.u_transform.value = *world_to_gl;
}
pub fn draw(&mut self) {
if self.terrain_layers.is_empty() {
return;
}
unsafe {
self.vertex_array.bind();
// Switch from the point cloud rendering shader to terrain shader
self.program.gl.UseProgram(self.program.id);
// Activate wireframe mode
self.program
.gl
.PolygonMode(opengl::FRONT_AND_BACK, opengl::LINE);
self.u_transform.submit();
// If you want the terrain to have alpha < 1, put this before
// the DrawElements call:
// self.program.gl.Enable(opengl::BLEND);
// self.program
// .gl
// .BlendFunc(opengl::SRC_ALPHA, opengl::ONE_MINUS_SRC_ALPHA);
// And after:
// self.program.gl.Disable(opengl::BLEND);
for layer in self.terrain_layers.iter() {
// Set the terrain to be used with the next draw call
layer.submit();
// Draw the mesh using the current terrain data
self.program.gl.DrawElements(
opengl::TRIANGLES,
self.num_indices as i32,
opengl::UNSIGNED_INT,
std::ptr::null(), // no offset
);
}
}
}
pub fn local_from_global(&self) -> Option<Isometry3<f64>> {
self.terrain_layers
.first()
.map(|layer| *layer.terrain_from_world())
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>user.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers<|fim▁hole|>
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('name', )
class UserSerializer(serializers.ModelSerializer):
"""
Serializes User objects
"""
profile = UserProfileSerializer()
class Meta:
model = User
fields = ('id', 'username', 'email', 'profile', )<|fim▁end|> | from django.contrib.auth.models import User
from dixit.account.models import UserProfile
|
<|file_name|>message.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package p2p
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"net"
"sync"
"sync/atomic"
"time"
"github.com/soilcurrency/go-ethereum/rlp"
)
// Msg defines the structure of a p2p message.
//
// Note that a Msg can only be sent once since the Payload reader is
// consumed during sending. It is not possible to create a Msg and
// send it any number of times. If you want to reuse an encoded
// structure, encode the payload into a byte array and create a
// separate Msg with a bytes.Reader as Payload for each send.
type Msg struct {
Code uint64
Size uint32 // size of the paylod
Payload io.Reader
ReceivedAt time.Time
}
// Decode parses the RLP content of a message into
// the given value, which must be a pointer.
//
// For the decoding rules, please see package rlp.
func (msg Msg) Decode(val interface{}) error {
s := rlp.NewStream(msg.Payload, uint64(msg.Size))
if err := s.Decode(val); err != nil {
return newPeerError(errInvalidMsg, "(code %x) (size %d) %v", msg.Code, msg.Size, err)
}
return nil
}
func (msg Msg) String() string {
return fmt.Sprintf("msg #%v (%v bytes)", msg.Code, msg.Size)
}
// Discard reads any remaining payload data into a black hole.
func (msg Msg) Discard() error {
_, err := io.Copy(ioutil.Discard, msg.Payload)
return err
}
type MsgReader interface {
ReadMsg() (Msg, error)
}
type MsgWriter interface {
// WriteMsg sends a message. It will block until the message's
// Payload has been consumed by the other end.
//
// Note that messages can be sent only once because their
// payload reader is drained.
WriteMsg(Msg) error
}
// MsgReadWriter provides reading and writing of encoded messages.
// Implementations should ensure that ReadMsg and WriteMsg can be
// called simultaneously from multiple goroutines.
type MsgReadWriter interface {
MsgReader
MsgWriter<|fim▁hole|>// data should encode as an RLP list.
func Send(w MsgWriter, msgcode uint64, data interface{}) error {
size, r, err := rlp.EncodeToReader(data)
if err != nil {
return err
}
return w.WriteMsg(Msg{Code: msgcode, Size: uint32(size), Payload: r})
}
// SendItems writes an RLP with the given code and data elements.
// For a call such as:
//
// SendItems(w, code, e1, e2, e3)
//
// the message payload will be an RLP list containing the items:
//
// [e1, e2, e3]
//
func SendItems(w MsgWriter, msgcode uint64, elems ...interface{}) error {
return Send(w, msgcode, elems)
}
// netWrapper wraps a MsgReadWriter with locks around
// ReadMsg/WriteMsg and applies read/write deadlines.
type netWrapper struct {
rmu, wmu sync.Mutex
rtimeout, wtimeout time.Duration
conn net.Conn
wrapped MsgReadWriter
}
func (rw *netWrapper) ReadMsg() (Msg, error) {
rw.rmu.Lock()
defer rw.rmu.Unlock()
rw.conn.SetReadDeadline(time.Now().Add(rw.rtimeout))
return rw.wrapped.ReadMsg()
}
func (rw *netWrapper) WriteMsg(msg Msg) error {
rw.wmu.Lock()
defer rw.wmu.Unlock()
rw.conn.SetWriteDeadline(time.Now().Add(rw.wtimeout))
return rw.wrapped.WriteMsg(msg)
}
// eofSignal wraps a reader with eof signaling. the eof channel is
// closed when the wrapped reader returns an error or when count bytes
// have been read.
type eofSignal struct {
wrapped io.Reader
count uint32 // number of bytes left
eof chan<- struct{}
}
// note: when using eofSignal to detect whether a message payload
// has been read, Read might not be called for zero sized messages.
func (r *eofSignal) Read(buf []byte) (int, error) {
if r.count == 0 {
if r.eof != nil {
r.eof <- struct{}{}
r.eof = nil
}
return 0, io.EOF
}
max := len(buf)
if int(r.count) < len(buf) {
max = int(r.count)
}
n, err := r.wrapped.Read(buf[:max])
r.count -= uint32(n)
if (err != nil || r.count == 0) && r.eof != nil {
r.eof <- struct{}{} // tell Peer that msg has been consumed
r.eof = nil
}
return n, err
}
// MsgPipe creates a message pipe. Reads on one end are matched
// with writes on the other. The pipe is full-duplex, both ends
// implement MsgReadWriter.
func MsgPipe() (*MsgPipeRW, *MsgPipeRW) {
var (
c1, c2 = make(chan Msg), make(chan Msg)
closing = make(chan struct{})
closed = new(int32)
rw1 = &MsgPipeRW{c1, c2, closing, closed}
rw2 = &MsgPipeRW{c2, c1, closing, closed}
)
return rw1, rw2
}
// ErrPipeClosed is returned from pipe operations after the
// pipe has been closed.
var ErrPipeClosed = errors.New("p2p: read or write on closed message pipe")
// MsgPipeRW is an endpoint of a MsgReadWriter pipe.
type MsgPipeRW struct {
w chan<- Msg
r <-chan Msg
closing chan struct{}
closed *int32
}
// WriteMsg sends a messsage on the pipe.
// It blocks until the receiver has consumed the message payload.
func (p *MsgPipeRW) WriteMsg(msg Msg) error {
if atomic.LoadInt32(p.closed) == 0 {
consumed := make(chan struct{}, 1)
msg.Payload = &eofSignal{msg.Payload, msg.Size, consumed}
select {
case p.w <- msg:
if msg.Size > 0 {
// wait for payload read or discard
select {
case <-consumed:
case <-p.closing:
}
}
return nil
case <-p.closing:
}
}
return ErrPipeClosed
}
// ReadMsg returns a message sent on the other end of the pipe.
func (p *MsgPipeRW) ReadMsg() (Msg, error) {
if atomic.LoadInt32(p.closed) == 0 {
select {
case msg := <-p.r:
return msg, nil
case <-p.closing:
}
}
return Msg{}, ErrPipeClosed
}
// Close unblocks any pending ReadMsg and WriteMsg calls on both ends
// of the pipe. They will return ErrPipeClosed. Close also
// interrupts any reads from a message payload.
func (p *MsgPipeRW) Close() error {
if atomic.AddInt32(p.closed, 1) != 1 {
// someone else is already closing
atomic.StoreInt32(p.closed, 1) // avoid overflow
return nil
}
close(p.closing)
return nil
}
// ExpectMsg reads a message from r and verifies that its
// code and encoded RLP content match the provided values.
// If content is nil, the payload is discarded and not verified.
func ExpectMsg(r MsgReader, code uint64, content interface{}) error {
msg, err := r.ReadMsg()
if err != nil {
return err
}
if msg.Code != code {
return fmt.Errorf("message code mismatch: got %d, expected %d", msg.Code, code)
}
if content == nil {
return msg.Discard()
} else {
contentEnc, err := rlp.EncodeToBytes(content)
if err != nil {
panic("content encode error: " + err.Error())
}
if int(msg.Size) != len(contentEnc) {
return fmt.Errorf("message size mismatch: got %d, want %d", msg.Size, len(contentEnc))
}
actualContent, err := ioutil.ReadAll(msg.Payload)
if err != nil {
return err
}
if !bytes.Equal(actualContent, contentEnc) {
return fmt.Errorf("message payload mismatch:\ngot: %x\nwant: %x", actualContent, contentEnc)
}
}
return nil
}<|fim▁end|> | }
// Send writes an RLP-encoded message with the given code. |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>"""
Tests for open ended grading interfaces
./manage.py lms --settings test test lms/djangoapps/open_ended_grading
"""
import ddt
import json
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from edxmako.shortcuts import render_to_string
from edxmako.tests import mako_middleware_process_request
from mock import MagicMock, patch, Mock
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from config_models.models import cache
from courseware.tests import factories
from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.lms_xblock.runtime import LmsModuleSystem
from student.roles import CourseStaffRole
from student.models import unique_id_for_user
from xblock_django.models import XBlockDisableConfig
from xmodule import peer_grading_module
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_TOY_MODULESTORE, ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.open_ended_grading_classes import peer_grading_service, controller_query_service
from xmodule.tests import test_util_open_ended
from open_ended_grading import staff_grading_service, views, utils
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
log = logging.getLogger(__name__)
class EmptyStaffGradingService(object):
"""
A staff grading service that does not return a problem list from get_problem_list.
Used for testing to see if error message for empty problem list is correctly displayed.
"""
def get_problem_list(self, course_id, user_id):
"""
Return a staff grading response that is missing a problem list key.
"""
return {'success': True, 'error': 'No problems found.'}
def make_instructor(course, user_email):
"""
Makes a given user an instructor in a course.
"""
CourseStaffRole(course.id).add_users(User.objects.get(email=user_email))
class StudentProblemListMockQuery(object):
"""
Mock controller query service for testing student problem list functionality.
"""
def get_grading_status_list(self, *args, **kwargs):
"""
Get a mock grading status list with locations from the open_ended test course.
@returns: grading status message dictionary.
"""
return {
"version": 1,
"problem_list": [
{
"problem_name": "Test1",
"grader_type": "IN",
"eta_available": True,
"state": "Finished",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion1Attempt"
},
{
"problem_name": "Test2",
"grader_type": "NA",
"eta_available": True,
"state": "Waiting to be Graded",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion"
},
{
"problem_name": "Test3",
"grader_type": "PE",
"eta_available": True,
"state": "Waiting to be Graded",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion454"
},
],
"success": True
}
class TestStaffGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
MODULESTORE = TEST_DATA_MIXED_TOY_MODULESTORE
def setUp(self):
super(TestStaffGradingService, self).setUp()
self.student = '[email protected]'
self.instructor = '[email protected]'
self.password = 'foo'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = SlashSeparatedCourseKey("edX", "toy", "2012_Fall")
self.location_string = self.course_id.make_usage_key('html', 'TestLocation').to_deprecated_string()
self.toy = modulestore().get_course(self.course_id)
make_instructor(self.toy, self.instructor)
self.mock_service = staff_grading_service.staff_grading_service()
self.logout()
def test_access(self):
"""
Make sure only staff have access.
"""
self.login(self.student, self.password)
# both get and post should return 404
for view_name in ('staff_grading_get_next', 'staff_grading_save_grade'):
url = reverse(view_name, kwargs={'course_id': self.course_id.to_deprecated_string()})
self.assert_request_status_code(404, url, method="GET")
self.assert_request_status_code(404, url, method="POST")
def test_get_next(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_next', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {'location': self.location_string}
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEquals(content['submission_id'], self.mock_service.cnt)
self.assertIsNotNone(content['submission'])
self.assertIsNotNone(content['num_graded'])
self.assertIsNotNone(content['min_for_ml'])
self.assertIsNotNone(content['num_pending'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['ml_error_info'])
self.assertIsNotNone(content['max_score'])
self.assertIsNotNone(content['rubric'])
def save_grade_base(self, skip=False):
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {'score': '12',
'feedback': 'great!',
'submission_id': '123',
'location': self.location_string,
'submission_flagged': "true",
'rubric_scores[]': ['1', '2']}
if skip:
data.update({'skipped': True})
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'], str(content))
self.assertEquals(content['submission_id'], self.mock_service.cnt)
def test_save_grade(self):
self.save_grade_base(skip=False)
def test_save_grade_skip(self):
self.save_grade_base(skip=True)
def test_get_problem_list(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_problem_list', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {}
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEqual(content['problem_list'], [])
@patch('open_ended_grading.staff_grading_service._service', EmptyStaffGradingService())
def test_get_problem_list_missing(self):
"""
Test to see if a staff grading response missing a problem list is given the appropriate error.
Mock the staff grading service to enable the key to be missing.
"""<|fim▁hole|>
# Get a valid user object.
instructor = User.objects.get(email=self.instructor)
# Mock a request object.
request = Mock(
user=instructor,
)
# Get the response and load its content.
response = json.loads(staff_grading_service.get_problem_list(request, self.course_id.to_deprecated_string()).content)
# A valid response will have an "error" key.
self.assertTrue('error' in response)
# Check that the error text is correct.
self.assertIn("Cannot find", response['error'])
def test_save_grade_with_long_feedback(self):
"""
Test if feedback is too long save_grade() should return error message.
"""
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {
'score': '12',
'feedback': '',
'submission_id': '123',
'location': self.location_string,
'submission_flagged': "false",
'rubric_scores[]': ['1', '2']
}
feedback_fragment = "This is very long feedback."
data["feedback"] = feedback_fragment * (
(staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH / len(feedback_fragment) + 1)
)
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
# Should not succeed.
self.assertEquals(content['success'], False)
self.assertEquals(
content['error'],
"Feedback is too long, Max length is {0} characters.".format(
staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH
)
)
class TestPeerGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
super(TestPeerGradingService, self).setUp()
self.student = '[email protected]'
self.instructor = '[email protected]'
self.password = 'foo'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = SlashSeparatedCourseKey("edX", "toy", "2012_Fall")
self.location_string = self.course_id.make_usage_key('html', 'TestLocation').to_deprecated_string()
self.toy = modulestore().get_course(self.course_id)
location = "i4x://edX/toy/peergrading/init"
field_data = DictFieldData({'data': "<peergrading/>", 'location': location, 'category': 'peergrading'})
self.mock_service = peer_grading_service.MockPeerGradingService()
self.system = LmsModuleSystem(
static_url=settings.STATIC_URL,
track_function=None,
get_module=None,
render_template=render_to_string,
replace_urls=None,
s3_interface=test_util_open_ended.S3_INTERFACE,
open_ended_grading_interface=test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
mixins=settings.XBLOCK_MIXINS,
error_descriptor_class=ErrorDescriptor,
descriptor_runtime=None,
)
self.descriptor = peer_grading_module.PeerGradingDescriptor(self.system, field_data, ScopeIds(None, None, None, None))
self.descriptor.xmodule_runtime = self.system
self.peer_module = self.descriptor
self.peer_module.peer_gs = self.mock_service
self.logout()
def test_get_next_submission_success(self):
data = {'location': self.location_string}
response = self.peer_module.get_next_submission(data)
content = response
self.assertTrue(content['success'])
self.assertIsNotNone(content['submission_id'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['submission_key'])
self.assertIsNotNone(content['max_score'])
def test_get_next_submission_missing_location(self):
data = {}
d = self.peer_module.get_next_submission(data)
self.assertFalse(d['success'])
self.assertEqual(d['error'], "Missing required keys: location")
def test_save_grade_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false',
'answer_unknown': 'false',
'rubric_scores_complete': 'true'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_grade(qdict)
self.assertTrue(response['success'])
def test_save_grade_missing_keys(self):
data = {}
d = self.peer_module.save_grade(data)
self.assertFalse(d['success'])
self.assertTrue(d['error'].find('Missing required keys:') > -1)
def test_is_calibrated_success(self):
data = {'location': self.location_string}
response = self.peer_module.is_student_calibrated(data)
self.assertTrue(response['success'])
self.assertTrue('calibrated' in response)
def test_is_calibrated_failure(self):
data = {}
response = self.peer_module.is_student_calibrated(data)
self.assertFalse(response['success'])
self.assertFalse('calibrated' in response)
def test_show_calibration_essay_success(self):
data = {'location': self.location_string}
response = self.peer_module.show_calibration_essay(data)
self.assertTrue(response['success'])
self.assertIsNotNone(response['submission_id'])
self.assertIsNotNone(response['prompt'])
self.assertIsNotNone(response['submission_key'])
self.assertIsNotNone(response['max_score'])
def test_show_calibration_essay_missing_key(self):
data = {}
response = self.peer_module.show_calibration_essay(data)
self.assertFalse(response['success'])
self.assertEqual(response['error'], "Missing required keys: location")
def test_save_calibration_essay_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_calibration_essay(qdict)
self.assertTrue(response['success'])
self.assertTrue('actual_score' in response)
def test_save_calibration_essay_missing_keys(self):
data = {}
response = self.peer_module.save_calibration_essay(data)
self.assertFalse(response['success'])
self.assertTrue(response['error'].find('Missing required keys:') > -1)
self.assertFalse('actual_score' in response)
def test_save_grade_with_long_feedback(self):
"""
Test if feedback is too long save_grade() should return error message.
"""
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': '',
'submission_flagged': 'false',
'answer_unknown': 'false',
'rubric_scores_complete': 'true'
}
feedback_fragment = "This is very long feedback."
data["feedback"] = feedback_fragment * (
(staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH / len(feedback_fragment) + 1)
)
response_dict = self.peer_module.save_grade(data)
# Should not succeed.
self.assertEquals(response_dict['success'], False)
self.assertEquals(
response_dict['error'],
"Feedback is too long, Max length is {0} characters.".format(
staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH
)
)
class TestPanel(ModuleStoreTestCase):
"""
Run tests on the open ended panel
"""
def setUp(self):
super(TestPanel, self).setUp()
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
def test_open_ended_panel(self):
"""
Test to see if the peer grading module in the demo course is found
@return:
"""
found_module, peer_grading_module = views.find_peer_grading_module(self.course)
self.assertTrue(found_module)
@patch(
'open_ended_grading.utils.create_controller_query_service',
Mock(
return_value=controller_query_service.MockControllerQueryService(
settings.OPEN_ENDED_GRADING_INTERFACE,
utils.render_to_string
)
)
)
def test_problem_list(self):
"""
Ensure that the problem list from the grading controller server can be rendered properly locally
@return:
"""
request = RequestFactory().get(
reverse("open_ended_problems", kwargs={'course_id': self.course_key})
)
request.user = self.user
mako_middleware_process_request(request)
response = views.student_problem_list(request, self.course.id.to_deprecated_string())
self.assertRegexpMatches(response.content, "Here is a list of open ended problems for this course.")
class TestPeerGradingFound(ModuleStoreTestCase):
"""
Test to see if peer grading modules can be found properly.
"""
def setUp(self):
super(TestPeerGradingFound, self).setUp()
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended_nopath']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
def test_peer_grading_nopath(self):
"""
The open_ended_nopath course contains a peer grading module with no path to it.
Ensure that the exception is caught.
"""
found, url = views.find_peer_grading_module(self.course)
self.assertEqual(found, False)
class TestStudentProblemList(ModuleStoreTestCase):
"""
Test if the student problem list correctly fetches and parses problems.
"""
def setUp(self):
super(TestStudentProblemList, self).setUp()
# Load an open ended course with several problems.
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
# Enroll our user in our course and make them an instructor.
make_instructor(self.course, self.user.email)
@patch(
'open_ended_grading.utils.create_controller_query_service',
Mock(return_value=StudentProblemListMockQuery())
)
def test_get_problem_list(self):
"""
Test to see if the StudentProblemList class can get and parse a problem list from ORA.
Mock the get_grading_status_list function using StudentProblemListMockQuery.
"""
# Initialize a StudentProblemList object.
student_problem_list = utils.StudentProblemList(self.course.id, unique_id_for_user(self.user))
# Get the initial problem list from ORA.
success = student_problem_list.fetch_from_grading_service()
# Should be successful, and we should have three problems. See mock class for details.
self.assertTrue(success)
self.assertEqual(len(student_problem_list.problem_list), 3)
# See if the problem locations are valid.
valid_problems = student_problem_list.add_problem_data(reverse('courses'))
# One location is invalid, so we should now have two.
self.assertEqual(len(valid_problems), 2)
# Ensure that human names are being set properly.
self.assertEqual(valid_problems[0]['grader_type_display_name'], "Instructor Assessment")
@ddt.ddt
class TestTabs(ModuleStoreTestCase):
"""
Test tabs.
"""
def setUp(self):
super(TestTabs, self).setUp()
self.course = CourseFactory(advanced_modules=('combinedopenended'))
self.addCleanup(lambda: self._enable_xblock_disable_config(False))
def _enable_xblock_disable_config(self, enabled):
""" Enable or disable xblocks disable. """
config = XBlockDisableConfig.current()
config.enabled = enabled
config.disabled_blocks = "\n".join(('combinedopenended', 'peergrading'))
config.save()
cache.clear()
@ddt.data(
views.StaffGradingTab,
views.PeerGradingTab,
views.OpenEndedGradingTab,
)
def test_tabs_enabled(self, tab):
self.assertTrue(tab.is_enabled(self.course))
@ddt.data(
views.StaffGradingTab,
views.PeerGradingTab,
views.OpenEndedGradingTab,
)
def test_tabs_disabled(self, tab):
self._enable_xblock_disable_config(True)
self.assertFalse(tab.is_enabled(self.course))<|fim▁end|> | |
<|file_name|>moveit_attached_object_demo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
moveit_attached_object_demo.py - Version 0.1 2014-01-14
Attach an object to the end-effector and then move the arm to test collision avoidance.
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2014 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy, sys
import thread, copy
import moveit_commander
from moveit_commander import RobotCommander, MoveGroupCommander, PlanningSceneInterface
from geometry_msgs.msg import PoseStamped, Pose
from moveit_msgs.msg import CollisionObject, AttachedCollisionObject, PlanningScene
from math import radians
from copy import deepcopy
class MoveItDemo:
def __init__(self):
# Initialize the move_group API
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('moveit_demo')
# Construct the initial scene object
scene = PlanningSceneInterface()
# Pause for the scene to get ready
rospy.sleep(1)
# Initialize the MoveIt! commander for the right arm
right_arm = MoveGroupCommander('right_arm')
# Initialize the MoveIt! commander for the gripper
right_gripper = MoveGroupCommander('right_gripper')
# Get the name of the end-effector link
end_effector_link = right_arm.get_end_effector_link()
# Allow some leeway in position (meters) and orientation (radians)
right_arm.set_goal_position_tolerance(0.01)
right_arm.set_goal_orientation_tolerance(0.05)
# Allow replanning to increase the odds of a solution
right_arm.allow_replanning(True)
# Allow 5 seconds per planning attempt
right_arm.set_planning_time(5)
# Remove leftover objects from a previous run
scene.remove_attached_object(end_effector_link, 'tool')
scene.remove_world_object('table')
scene.remove_world_object('box1')
scene.remove_world_object('box2')
scene.remove_world_object('target')
# Set the height of the table off the ground
table_ground = 0.75
# Set the length, width and height of the table
table_size = [0.2, 0.7, 0.01]<|fim▁hole|>
# Set the length, width and height of the object to attach
tool_size = [0.3, 0.02, 0.02]
# Create a pose for the tool relative to the end-effector
p = PoseStamped()
p.header.frame_id = end_effector_link
scene.attach_mesh
# Place the end of the object within the grasp of the gripper
p.pose.position.x = tool_size[0] / 2.0 - 0.025
p.pose.position.y = 0.0
p.pose.position.z = 0.0
# Align the object with the gripper (straight out)
p.pose.orientation.x = 0
p.pose.orientation.y = 0
p.pose.orientation.z = 0
p.pose.orientation.w = 1
# Attach the tool to the end-effector
scene.attach_box(end_effector_link, 'tool', p, tool_size)
# Add a floating table top
table_pose = PoseStamped()
table_pose.header.frame_id = 'base_footprint'
table_pose.pose.position.x = 0.35
table_pose.pose.position.y = 0.0
table_pose.pose.position.z = table_ground + table_size[2] / 2.0
table_pose.pose.orientation.w = 1.0
scene.add_box('table', table_pose, table_size)
# Update the current state
right_arm.set_start_state_to_current_state()
# Move the arm with the attached object to the 'straight_forward' position
right_arm.set_named_target('straight_forward')
right_arm.go()
rospy.sleep(2)
# Return the arm in the "resting" pose stored in the SRDF file
right_arm.set_named_target('resting')
right_arm.go()
rospy.sleep(2)
scene.remove_attached_object(end_effector_link, 'tool')
moveit_commander.roscpp_shutdown()
moveit_commander.os._exit(0)
if __name__ == "__main__":
MoveItDemo()<|fim▁end|> | |
<|file_name|>15.py<|end_file_name|><|fim▁begin|><|fim▁hole|> if number not in found_numbers:
found_numbers[number] = number
else:
return False
return True<|fim▁end|> | def unique_elems(data):
found_numbers = {}
for number in data: |
<|file_name|>detectFlash.js<|end_file_name|><|fim▁begin|>// -----------------------------------------------------------------------------
// Globals
// Major version of Flash required
var requiredMajorVersion = 8;
// Minor version of Flash required
var requiredMinorVersion = 0;
// Minor version of Flash required
var requiredRevision = 0;
// the version of javascript supported
var jsVersion = 1.0;
// -----------------------------------------------------------------------------
var isIE = (navigator.appVersion.indexOf("MSIE") != -1) ? true : false;
var isWin = (navigator.appVersion.toLowerCase().indexOf("win") != -1) ? true : false;
var isOpera = (navigator.userAgent.indexOf("Opera") != -1) ? true : false;
jsVersion = 1.1;
// JavaScript helper required to detect Flash Player PlugIn version information
function JSGetSwfVer(i){
// NS/Opera version >= 3 check for Flash plugin in plugin array
if (navigator.plugins != null && navigator.plugins.length > 0) {
if (navigator.plugins["Shockwave Flash 2.0"] || navigator.plugins["Shockwave Flash"]) {
var swVer2 = navigator.plugins["Shockwave Flash 2.0"] ? " 2.0" : "";
var flashDescription = navigator.plugins["Shockwave Flash" + swVer2].description;
descArray = flashDescription.split(" ");
tempArrayMajor = descArray[2].split(".");
versionMajor = tempArrayMajor[0];
<|fim▁hole|> } else {
tempArrayMinor = descArray[4].split("r");
}
versionRevision = tempArrayMinor[1] > 0 ? tempArrayMinor[1] : 0;
flashVer = versionMajor + "." + versionMinor + "." + versionRevision;
} else {
flashVer = -1;
}
}
// MSN/WebTV 2.6 supports Flash 4
else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.6") != -1) flashVer = 4;
// WebTV 2.5 supports Flash 3
else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.5") != -1) flashVer = 3;
// older WebTV supports Flash 2
else if (navigator.userAgent.toLowerCase().indexOf("webtv") != -1) flashVer = 2;
// Can't detect in all other cases
else {
flashVer = -1;
}
return flashVer;
}
// When called with reqMajorVer, reqMinorVer, reqRevision returns true if that version or greater is available
function DetectFlashVer(reqMajorVer, reqMinorVer, reqRevision)
{
reqVer = parseFloat(reqMajorVer + "." + reqRevision);
// loop backwards through the versions until we find the newest version
for (i=25;i>0;i--) {
if (isIE && isWin && !isOpera) {
versionStr = VBGetSwfVer(i);
} else {
versionStr = JSGetSwfVer(i);
}
if (versionStr == -1 ) {
return false;
} else if (versionStr != 0) {
if(isIE && isWin && !isOpera) {
tempArray = versionStr.split(" ");
tempString = tempArray[1];
versionArray = tempString .split(",");
} else {
versionArray = versionStr.split(".");
}
versionMajor = versionArray[0];
versionMinor = versionArray[1];
versionRevision = versionArray[2];
versionString = versionMajor + "." + versionRevision; // 7.0r24 == 7.24
versionNum = parseFloat(versionString);
// is the major.revision >= requested major.revision AND the minor version >= requested minor
if ( (versionMajor > reqMajorVer) && (versionNum >= reqVer) ) {
return true;
} else {
return ((versionNum >= reqVer && versionMinor >= reqMinorVer) ? true : false );
}
}
}
}<|fim▁end|> | versionMinor = tempArrayMajor[1];
if ( descArray[3] != "" ) {
tempArrayMinor = descArray[3].split("r");
|
<|file_name|>mongo.go<|end_file_name|><|fim▁begin|>// Copyright (C) MongoDB, Inc. 2017-present.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
package mongo // import "go.mongodb.org/mongo-driver/mongo"
import (
"context"
"errors"
"fmt"
"net"
"reflect"
"strconv"
"strings"
"go.mongodb.org/mongo-driver/mongo/options"
"go.mongodb.org/mongo-driver/x/bsonx"
"go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/bson/primitive"
)
// Dialer is used to make network connections.
type Dialer interface {
DialContext(ctx context.Context, network, address string) (net.Conn, error)
}
// BSONAppender is an interface implemented by types that can marshal a
// provided type into BSON bytes and append those bytes to the provided []byte.
// The AppendBSON can return a non-nil error and non-nil []byte. The AppendBSON
// method may also write incomplete BSON to the []byte.
type BSONAppender interface {
AppendBSON([]byte, interface{}) ([]byte, error)
}
// BSONAppenderFunc is an adapter function that allows any function that
// satisfies the AppendBSON method signature to be used where a BSONAppender is
// used.
type BSONAppenderFunc func([]byte, interface{}) ([]byte, error)
// AppendBSON implements the BSONAppender interface
func (baf BSONAppenderFunc) AppendBSON(dst []byte, val interface{}) ([]byte, error) {
return baf(dst, val)
}
// MarshalError is returned when attempting to transform a value into a document
// results in an error.
type MarshalError struct {
Value interface{}
Err error
}
// Error implements the error interface.
func (me MarshalError) Error() string {
return fmt.Sprintf("cannot transform type %s to a BSON Document: %v", reflect.TypeOf(me.Value), me.Err)
}
// Pipeline is a type that makes creating aggregation pipelines easier. It is a
// helper and is intended for serializing to BSON.
//
// Example usage:
//
// mongo.Pipeline{
// {{"$group", bson.D{{"_id", "$state"}, {"totalPop", bson.D{{"$sum", "$pop"}}}}}},
// {{"$match", bson.D{{"totalPop", bson.D{{"$gte", 10*1000*1000}}}}}},
// }
//
type Pipeline []bson.D
// transformAndEnsureID is a hack that makes it easy to get a RawValue as the _id value. This will
// be removed when we switch from using bsonx to bsoncore for the driver package.
func transformAndEnsureID(registry *bsoncodec.Registry, val interface{}) (bsonx.Doc, interface{}, error) {
// TODO: performance is going to be pretty bad for bsonx.Doc here since we turn it into a []byte
// only to turn it back into a bsonx.Doc. We can fix this post beta1 when we refactor the driver
// package to use bsoncore.Document instead of bsonx.Doc.
if registry == nil {
registry = bson.NewRegistryBuilder().Build()
}
switch tt := val.(type) {
case nil:
return nil, nil, ErrNilDocument
case bsonx.Doc:
val = tt.Copy()
case []byte:
// Slight optimization so we'll just use MarshalBSON and not go through the codec machinery.
val = bson.Raw(tt)
}
// TODO(skriptble): Use a pool of these instead.
buf := make([]byte, 0, 256)
b, err := bson.MarshalAppendWithRegistry(registry, buf, val)
if err != nil {
return nil, nil, MarshalError{Value: val, Err: err}
}
d, err := bsonx.ReadDoc(b)
if err != nil {
return nil, nil, err
}
var id interface{}
idx := d.IndexOf("_id")
var idElem bsonx.Elem
switch idx {
case -1:
idElem = bsonx.Elem{"_id", bsonx.ObjectID(primitive.NewObjectID())}
d = append(d, bsonx.Elem{})
copy(d[1:], d)
d[0] = idElem
default:
idElem = d[idx]
copy(d[1:idx+1], d[0:idx])
d[0] = idElem
}
idBuf := make([]byte, 0, 256)
t, data, err := idElem.Value.MarshalAppendBSONValue(idBuf[:0])
if err != nil {
return nil, nil, err
}
err = bson.RawValue{Type: t, Value: data}.UnmarshalWithRegistry(registry, &id)
if err != nil {
return nil, nil, err
}
return d, id, nil
}
// transformAndEnsureIDv2 is a hack that makes it easy to get a RawValue as the _id value. This will
// be removed when we switch from using bsonx to bsoncore for the driver package.
func transformAndEnsureIDv2(registry *bsoncodec.Registry, val interface{}) (bsoncore.Document, interface{}, error) {
if registry == nil {
registry = bson.NewRegistryBuilder().Build()
}
switch tt := val.(type) {
case nil:
return nil, nil, ErrNilDocument
case bsonx.Doc:
val = tt.Copy()
case []byte:
// Slight optimization so we'll just use MarshalBSON and not go through the codec machinery.
val = bson.Raw(tt)
}
// TODO(skriptble): Use a pool of these instead.
doc := make(bsoncore.Document, 0, 256)
doc, err := bson.MarshalAppendWithRegistry(registry, doc, val)
if err != nil {
return nil, nil, MarshalError{Value: val, Err: err}
}
var id interface{}
value := doc.Lookup("_id")
switch value.Type {
case bsontype.Type(0):
value = bsoncore.Value{Type: bsontype.ObjectID, Data: bsoncore.AppendObjectID(nil, primitive.NewObjectID())}
olddoc := doc
doc = make(bsoncore.Document, 0, len(olddoc)+17) // type byte + _id + null byte + object ID
_, doc = bsoncore.ReserveLength(doc)
doc = bsoncore.AppendValueElement(doc, "_id", value)
doc = append(doc, olddoc[4:]...) // remove the length
doc = bsoncore.UpdateLength(doc, 0, int32(len(doc)))
default:
// We copy the bytes here to ensure that any bytes returned to the user aren't modified
// later.
buf := make([]byte, len(value.Data))
copy(buf, value.Data)
value.Data = buf
}
err = bson.RawValue{Type: value.Type, Value: value.Data}.UnmarshalWithRegistry(registry, &id)
if err != nil {
return nil, nil, err
}
return doc, id, nil
}
func transformDocument(registry *bsoncodec.Registry, val interface{}) (bsonx.Doc, error) {
if doc, ok := val.(bsonx.Doc); ok {
return doc.Copy(), nil
}
b, err := transformBsoncoreDocument(registry, val)
if err != nil {
return nil, err
}
return bsonx.ReadDoc(b)
}
func transformBsoncoreDocument(registry *bsoncodec.Registry, val interface{}) (bsoncore.Document, error) {
if registry == nil {
registry = bson.DefaultRegistry
}
if val == nil {
return nil, ErrNilDocument
}
if bs, ok := val.([]byte); ok {
// Slight optimization so we'll just use MarshalBSON and not go through the codec machinery.
val = bson.Raw(bs)
}
// TODO(skriptble): Use a pool of these instead.
buf := make([]byte, 0, 256)
b, err := bson.MarshalAppendWithRegistry(registry, buf[:0], val)
if err != nil {
return nil, MarshalError{Value: val, Err: err}
}
return b, nil
}
func ensureID(d bsonx.Doc) (bsonx.Doc, interface{}) {
var id interface{}
elem, err := d.LookupElementErr("_id")
switch err.(type) {
case nil:
id = elem
default:
oid := primitive.NewObjectID()
d = append(d, bsonx.Elem{"_id", bsonx.ObjectID(oid)})
id = oid
}
return d, id
}
func ensureDollarKey(doc bsonx.Doc) error {
if len(doc) == 0 {
return errors.New("update document must have at least one element")
}
if !strings.HasPrefix(doc[0].Key, "$") {
return errors.New("update document must contain key beginning with '$'")
}
return nil
}
func ensureDollarKeyv2(doc bsoncore.Document) error {
firstElem, err := doc.IndexErr(0)
if err != nil {
return errors.New("update document must have at least one element")
}
if !strings.HasPrefix(firstElem.Key(), "$") {
return errors.New("update document must contain key beginning with '$'")
}
return nil
}
func ensureNoDollarKey(doc bsoncore.Document) error {
if elem, err := doc.IndexErr(0); err == nil && strings.HasPrefix(elem.Key(), "$") {
return errors.New("replacement document cannot contains keys beginning with '$")
}
return nil
}
func transformAggregatePipeline(registry *bsoncodec.Registry, pipeline interface{}) (bsonx.Arr, error) {
pipelineArr := bsonx.Arr{}
switch t := pipeline.(type) {
case bsoncodec.ValueMarshaler:
btype, val, err := t.MarshalBSONValue()
if err != nil {
return nil, err
}
if btype != bsontype.Array {
return nil, fmt.Errorf("ValueMarshaler returned a %v, but was expecting %v", btype, bsontype.Array)
}
err = pipelineArr.UnmarshalBSONValue(btype, val)
if err != nil {
return nil, err
}
default:
val := reflect.ValueOf(t)
if !val.IsValid() || (val.Kind() != reflect.Slice && val.Kind() != reflect.Array) {
return nil, fmt.Errorf("can only transform slices and arrays into aggregation pipelines, but got %v", val.Kind())
}
for idx := 0; idx < val.Len(); idx++ {
elem, err := transformDocument(registry, val.Index(idx).Interface())
if err != nil {
return nil, err
}
pipelineArr = append(pipelineArr, bsonx.Document(elem))
}
}
return pipelineArr, nil
}
func transformAggregatePipelinev2(registry *bsoncodec.Registry, pipeline interface{}) (bsoncore.Document, bool, error) {
switch t := pipeline.(type) {
case bsoncodec.ValueMarshaler:
btype, val, err := t.MarshalBSONValue()
if err != nil {
return nil, false, err
}
if btype != bsontype.Array {
return nil, false, fmt.Errorf("ValueMarshaler returned a %v, but was expecting %v", btype, bsontype.Array)
}
var hasOutputStage bool
pipelineDoc := bsoncore.Document(val)
values, _ := pipelineDoc.Values()
if pipelineLen := len(values); pipelineLen > 0 {
if finalDoc, ok := values[pipelineLen-1].DocumentOK(); ok {
if elem, err := finalDoc.IndexErr(0); err == nil && (elem.Key() == "$out" || elem.Key() == "$merge") {
hasOutputStage = true
}
}
}
return pipelineDoc, hasOutputStage, nil
default:
val := reflect.ValueOf(t)
if !val.IsValid() || (val.Kind() != reflect.Slice && val.Kind() != reflect.Array) {
return nil, false, fmt.Errorf("can only transform slices and arrays into aggregation pipelines, but got %v", val.Kind())
}
aidx, arr := bsoncore.AppendArrayStart(nil)
var hasOutputStage bool
valLen := val.Len()
for idx := 0; idx < valLen; idx++ {
doc, err := transformBsoncoreDocument(registry, val.Index(idx).Interface())
if err != nil {
return nil, false, err
}
if idx == valLen-1 {
if elem, err := doc.IndexErr(0); err == nil && (elem.Key() == "$out" || elem.Key() == "$merge") {
hasOutputStage = true
}
}
arr = bsoncore.AppendDocumentElement(arr, strconv.Itoa(idx), doc)
}
arr, _ = bsoncore.AppendArrayEnd(arr, aidx)
return arr, hasOutputStage, nil
}
}
func transformUpdateValue(registry *bsoncodec.Registry, update interface{}, dollarKeysAllowed bool) (bsoncore.Value, error) {
documentCheckerFunc := ensureDollarKeyv2
if !dollarKeysAllowed {
documentCheckerFunc = ensureNoDollarKey
}
var u bsoncore.Value
var err error
switch t := update.(type) {
case nil:
return u, ErrNilDocument
case primitive.D, bsonx.Doc:
u.Type = bsontype.EmbeddedDocument
u.Data, err = transformBsoncoreDocument(registry, update)
if err != nil {
return u, err
}
return u, documentCheckerFunc(u.Data)
case bson.Raw:
u.Type = bsontype.EmbeddedDocument
u.Data = t
return u, documentCheckerFunc(u.Data)
case bsoncore.Document:
u.Type = bsontype.EmbeddedDocument
u.Data = t
return u, documentCheckerFunc(u.Data)
case []byte:
u.Type = bsontype.EmbeddedDocument
u.Data = t
return u, documentCheckerFunc(u.Data)
case bsoncodec.Marshaler:
u.Type = bsontype.EmbeddedDocument
u.Data, err = t.MarshalBSON()
if err != nil {
return u, err
}
return u, documentCheckerFunc(u.Data)
case bsoncodec.ValueMarshaler:
u.Type, u.Data, err = t.MarshalBSONValue()
if err != nil {
return u, err
}
if u.Type != bsontype.Array && u.Type != bsontype.EmbeddedDocument {
return u, fmt.Errorf("ValueMarshaler returned a %v, but was expecting %v or %v", u.Type, bsontype.Array, bsontype.EmbeddedDocument)
}
return u, err
default:
val := reflect.ValueOf(t)
if !val.IsValid() {
return u, fmt.Errorf("can only transform slices and arrays into update pipelines, but got %v", val.Kind())
}
if val.Kind() != reflect.Slice && val.Kind() != reflect.Array {
u.Type = bsontype.EmbeddedDocument
u.Data, err = transformBsoncoreDocument(registry, update)
if err != nil {
return u, err
}
return u, documentCheckerFunc(u.Data)
}
u.Type = bsontype.Array
aidx, arr := bsoncore.AppendArrayStart(nil)
valLen := val.Len()
for idx := 0; idx < valLen; idx++ {
doc, err := transformBsoncoreDocument(registry, val.Index(idx).Interface())
if err != nil {
return u, err
}
if err := documentCheckerFunc(doc); err != nil {
return u, err
}
arr = bsoncore.AppendDocumentElement(arr, strconv.Itoa(idx), doc)
}
u.Data, _ = bsoncore.AppendArrayEnd(arr, aidx)
return u, err
}
}
func transformValue(registry *bsoncodec.Registry, val interface{}) (bsoncore.Value, error) {
if registry == nil {<|fim▁hole|> }
buf := make([]byte, 0, 256)
bsonType, bsonValue, err := bson.MarshalValueAppendWithRegistry(registry, buf[:0], val)
if err != nil {
return bsoncore.Value{}, MarshalError{Value: val, Err: err}
}
return bsoncore.Value{Type: bsonType, Data: bsonValue}, nil
}
// Build the aggregation pipeline for the CountDocument command.
func countDocumentsAggregatePipeline(registry *bsoncodec.Registry, filter interface{}, opts *options.CountOptions) (bsoncore.Document, error) {
filterDoc, err := transformBsoncoreDocument(registry, filter)
if err != nil {
return nil, err
}
aidx, arr := bsoncore.AppendArrayStart(nil)
didx, arr := bsoncore.AppendDocumentElementStart(arr, strconv.Itoa(0))
arr = bsoncore.AppendDocumentElement(arr, "$match", filterDoc)
arr, _ = bsoncore.AppendDocumentEnd(arr, didx)
index := 1
if opts != nil {
if opts.Skip != nil {
didx, arr = bsoncore.AppendDocumentElementStart(arr, strconv.Itoa(index))
arr = bsoncore.AppendInt64Element(arr, "$skip", *opts.Skip)
arr, _ = bsoncore.AppendDocumentEnd(arr, didx)
index++
}
if opts.Limit != nil {
didx, arr = bsoncore.AppendDocumentElementStart(arr, strconv.Itoa(index))
arr = bsoncore.AppendInt64Element(arr, "$limit", *opts.Limit)
arr, _ = bsoncore.AppendDocumentEnd(arr, didx)
index++
}
}
didx, arr = bsoncore.AppendDocumentElementStart(arr, strconv.Itoa(index))
iidx, arr := bsoncore.AppendDocumentElementStart(arr, "$group")
arr = bsoncore.AppendInt32Element(arr, "_id", 1)
iiidx, arr := bsoncore.AppendDocumentElementStart(arr, "n")
arr = bsoncore.AppendInt32Element(arr, "$sum", 1)
arr, _ = bsoncore.AppendDocumentEnd(arr, iiidx)
arr, _ = bsoncore.AppendDocumentEnd(arr, iidx)
arr, _ = bsoncore.AppendDocumentEnd(arr, didx)
return bsoncore.AppendArrayEnd(arr, aidx)
}<|fim▁end|> | registry = bson.DefaultRegistry
}
if val == nil {
return bsoncore.Value{}, ErrNilValue |
<|file_name|>test.routing.ts<|end_file_name|><|fim▁begin|>/**
* Created by Andreas Mann on 20.12.2016.
*/
import { Routes, RouterModule } from '@angular/router';
import { TestComponent } from './test.component';
import { TableComponent } from './table/table.component';<|fim▁hole|> path: '',
component: TestComponent,
children: [
{ path: 'table', component: TableComponent },
{ path: 'project', component: ProjectComponent },
]
}
];
export const routing = RouterModule.forChild(routes);<|fim▁end|> | import { ProjectComponent } from './project/project.component';
const routes: Routes = [
{ |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import include, url
<|fim▁hole|> url(r'^$', views.BlogListView.as_view(), name="blog_list"),
url(r'^(?P<slug>[\w-]+)/$', views.BlogSingleView.as_view(), name="blog_single"),
url(r'^category/(?P<slug>[\w-]+)/$', views.BlogCategoryView.as_view(), name="blog_category"),
]<|fim▁end|> | from . import views
urlpatterns = [ |
<|file_name|>validator.go<|end_file_name|><|fim▁begin|>// Package govalidator is package of validators and sanitizers for strings, structs and collections.
package govalidator
import (
"encoding/json"
"fmt"
"net"
"net/url"
"reflect"
"regexp"
"sort"
"strings"
"unicode"
"unicode/utf8"
)
var fieldsRequiredByDefault bool
// SetFieldsRequiredByDefault causes validation to fail when struct fields
// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`).
// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
// type exampleStruct struct {
// Name string ``
// Email string `valid:"email"`
// This, however, will only fail when Email is empty or an invalid email address:
// type exampleStruct2 struct {
// Name string `valid:"-"`
// Email string `valid:"email"`
// Lastly, this will only fail when Email is an invalid email address but not when it's empty:
// type exampleStruct2 struct {
// Name string `valid:"-"`
// Email string `valid:"email,optional"`
func SetFieldsRequiredByDefault(value bool) {
fieldsRequiredByDefault = value
}
// IsEmail check if the string is an email.
func IsEmail(str string) bool {
// TODO uppercase letters are not supported
return rxEmail.MatchString(str)
}
// IsURL check if the string is an URL.
func IsURL(str string) bool {
if str == "" || len(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") {
return false
}
u, err := url.Parse(str)
if err != nil {
return false
}
if strings.HasPrefix(u.Host, ".") {
return false
}
if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) {
return false
}
return rxURL.MatchString(str)
}
// IsRequestURL check if the string rawurl, assuming
// it was recieved in an HTTP request, is a valid
// URL confirm to RFC 3986
func IsRequestURL(rawurl string) bool {
url, err := url.ParseRequestURI(rawurl)
if err != nil {
return false //Couldn't even parse the rawurl
}
if len(url.Scheme) == 0 {
return false //No Scheme found
}
return true
}
// IsRequestURI check if the string rawurl, assuming
// it was recieved in an HTTP request, is an
// absolute URI or an absolute path.
func IsRequestURI(rawurl string) bool {
_, err := url.ParseRequestURI(rawurl)
return err == nil
}
// IsAlpha check if the string contains only letters (a-zA-Z). Empty string is valid.
func IsAlpha(str string) bool {
if IsNull(str) {
return true
}
return rxAlpha.MatchString(str)
}
//IsUTFLetter check if the string contains only unicode letter characters.
//Similar to IsAlpha but for all languages. Empty string is valid.
func IsUTFLetter(str string) bool {
if IsNull(str) {
return true
}
for _, c := range str {
if !unicode.IsLetter(c) {
return false
}
}
return true
}
// IsAlphanumeric check if the string contains only letters and numbers. Empty string is valid.
func IsAlphanumeric(str string) bool {
if IsNull(str) {
return true
}
return rxAlphanumeric.MatchString(str)
}
// IsUTFLetterNumeric check if the string contains only unicode letters and numbers. Empty string is valid.
func IsUTFLetterNumeric(str string) bool {
if IsNull(str) {
return true
}
for _, c := range str {
if !unicode.IsLetter(c) && !unicode.IsNumber(c) { //letters && numbers are ok
return false
}
}
return true
}
// IsNumeric check if the string contains only numbers. Empty string is valid.
func IsNumeric(str string) bool {
if IsNull(str) {
return true
}
return rxNumeric.MatchString(str)
}
// IsUTFNumeric check if the string contains only unicode numbers of any kind.
// Numbers can be 0-9 but also Fractions ¾,Roman Ⅸ and Hangzhou 〩. Empty string is valid.
func IsUTFNumeric(str string) bool {
if IsNull(str) {
return true
}
if strings.IndexAny(str, "+-") > 0 {
return false
}
if len(str) > 1 {
str = strings.TrimPrefix(str, "-")
str = strings.TrimPrefix(str, "+")
}
for _, c := range str {
if unicode.IsNumber(c) == false { //numbers && minus sign are ok
return false
}
}
return true
}
// IsUTFDigit check if the string contains only unicode radix-10 decimal digits. Empty string is valid.
func IsUTFDigit(str string) bool {
if IsNull(str) {
return true
}
if strings.IndexAny(str, "+-") > 0 {
return false
}
if len(str) > 1 {
str = strings.TrimPrefix(str, "-")
str = strings.TrimPrefix(str, "+")
}
for _, c := range str {
if !unicode.IsDigit(c) { //digits && minus sign are ok
return false
}
}
return true
}
// IsHexadecimal check if the string is a hexadecimal number.
func IsHexadecimal(str string) bool {
return rxHexadecimal.MatchString(str)
}
// IsHexcolor check if the string is a hexadecimal color.
func IsHexcolor(str string) bool {
return rxHexcolor.MatchString(str)
}
// IsRGBcolor check if the string is a valid RGB color in form rgb(RRR, GGG, BBB).
func IsRGBcolor(str string) bool {
return rxRGBcolor.MatchString(str)
}
// IsLowerCase check if the string is lowercase. Empty string is valid.
func IsLowerCase(str string) bool {
if IsNull(str) {
return true
}
return str == strings.ToLower(str)
}
// IsUpperCase check if the string is uppercase. Empty string is valid.
func IsUpperCase(str string) bool {
if IsNull(str) {
return true
}
return str == strings.ToUpper(str)
}
// IsInt check if the string is an integer. Empty string is valid.
func IsInt(str string) bool {
if IsNull(str) {
return true
}
return rxInt.MatchString(str)
}
// IsFloat check if the string is a float.
func IsFloat(str string) bool {
return str != "" && rxFloat.MatchString(str)
}
// IsDivisibleBy check if the string is a number that's divisible by another.
// If second argument is not valid integer or zero, it's return false.
// Otherwise, if first argument is not valid integer or zero, it's return true (Invalid string converts to zero).
func IsDivisibleBy(str, num string) bool {
f, _ := ToFloat(str)
p := int64(f)
q, _ := ToInt(num)
if q == 0 {
return false
}
return (p == 0) || (p%q == 0)
}
// IsNull check if the string is null.
func IsNull(str string) bool {
return len(str) == 0
}
// IsByteLength check if the string's length (in bytes) falls in a range.
func IsByteLength(str string, min, max int) bool {
return len(str) >= min && len(str) <= max
}
// IsUUIDv3 check if the string is a UUID version 3.
func IsUUIDv3(str string) bool {
return rxUUID3.MatchString(str)
}
// IsUUIDv4 check if the string is a UUID version 4.
func IsUUIDv4(str string) bool {
return rxUUID4.MatchString(str)
}
// IsUUIDv5 check if the string is a UUID version 5.
func IsUUIDv5(str string) bool {
return rxUUID5.MatchString(str)
}
// IsUUID check if the string is a UUID (version 3, 4 or 5).
func IsUUID(str string) bool {
return rxUUID.MatchString(str)
}
// IsCreditCard check if the string is a credit card.
func IsCreditCard(str string) bool {
r, _ := regexp.Compile("[^0-9]+")
sanitized := r.ReplaceAll([]byte(str), []byte(""))
if !rxCreditCard.MatchString(string(sanitized)) {
return false
}
var sum int64
var digit string
var tmpNum int64
var shouldDouble bool
for i := len(sanitized) - 1; i >= 0; i-- {
digit = string(sanitized[i:(i + 1)])
tmpNum, _ = ToInt(digit)
if shouldDouble {
tmpNum *= 2
if tmpNum >= 10 {
sum += ((tmpNum % 10) + 1)
} else {
sum += tmpNum
}
} else {
sum += tmpNum
}
shouldDouble = !shouldDouble
}
if sum%10 == 0 {
return true
}
return false
}
// IsISBN10 check if the string is an ISBN version 10.
func IsISBN10(str string) bool {
return IsISBN(str, 10)
}
// IsISBN13 check if the string is an ISBN version 13.
func IsISBN13(str string) bool {
return IsISBN(str, 13)
}
// IsISBN check if the string is an ISBN (version 10 or 13).
// If version value is not equal to 10 or 13, it will be check both variants.
func IsISBN(str string, version int) bool {
r, _ := regexp.Compile("[\\s-]+")
sanitized := r.ReplaceAll([]byte(str), []byte(""))
var checksum int32
var i int32
if version == 10 {
if !rxISBN10.MatchString(string(sanitized)) {
return false
}
for i = 0; i < 9; i++ {
checksum += (i + 1) * int32(sanitized[i]-'0')
}
if sanitized[9] == 'X' {
checksum += 10 * 10
} else {
checksum += 10 * int32(sanitized[9]-'0')
}
if checksum%11 == 0 {
return true
}
return false
} else if version == 13 {
if !rxISBN13.MatchString(string(sanitized)) {
return false
}
factor := []int32{1, 3}
for i = 0; i < 12; i++ {
checksum += factor[i%2] * int32(sanitized[i]-'0')
}
if (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0 {
return true
}
return false
}
return IsISBN(str, 10) || IsISBN(str, 13)
}
// IsJSON check if the string is valid JSON (note: uses json.Unmarshal).
func IsJSON(str string) bool {
var js json.RawMessage
return json.Unmarshal([]byte(str), &js) == nil
}
// IsMultibyte check if the string contains one or more multibyte chars. Empty string is valid.
func IsMultibyte(str string) bool {
if IsNull(str) {
return true
}
return rxMultibyte.MatchString(str)
}
// IsASCII check if the string contains ASCII chars only. Empty string is valid.
func IsASCII(str string) bool {
if IsNull(str) {
return true
}
return rxASCII.MatchString(str)
}
// IsPrintableASCII check if the string contains printable ASCII chars only. Empty string is valid.
func IsPrintableASCII(str string) bool {
if IsNull(str) {
return true
}
return rxPrintableASCII.MatchString(str)
}
// IsFullWidth check if the string contains any full-width chars. Empty string is valid.
func IsFullWidth(str string) bool {
if IsNull(str) {
return true
}
return rxFullWidth.MatchString(str)
}
// IsHalfWidth check if the string contains any half-width chars. Empty string is valid.
func IsHalfWidth(str string) bool {
if IsNull(str) {
return true
}
return rxHalfWidth.MatchString(str)
}
// IsVariableWidth check if the string contains a mixture of full and half-width chars. Empty string is valid.
func IsVariableWidth(str string) bool {
if IsNull(str) {
return true
}
return rxHalfWidth.MatchString(str) && rxFullWidth.MatchString(str)
}
// IsBase64 check if a string is base64 encoded.
func IsBase64(str string) bool {
return rxBase64.MatchString(str)
}
// IsFilePath check is a string is Win or Unix file path and returns it's type.
func IsFilePath(str string) (bool, int) {
if rxWinPath.MatchString(str) {
//check windows path limit see:
// http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath
if len(str[3:]) > 32767 {
return false, Win
}
return true, Win
} else if rxUnixPath.MatchString(str) {
return true, Unix
}
return false, Unknown
}
// IsDataURI checks if a string is base64 encoded data URI such as an image
func IsDataURI(str string) bool {
dataURI := strings.Split(str, ",")
if !rxDataURI.MatchString(dataURI[0]) {
return false
}
return IsBase64(dataURI[1])
}
// IsISO3166Alpha2 checks if a string is valid two-letter country code
func IsISO3166Alpha2(str string) bool {
for _, entry := range ISO3166List {
if str == entry.Alpha2Code {
return true
}
}
return false
}
// IsISO3166Alpha3 checks if a string is valid three-letter country code
func IsISO3166Alpha3(str string) bool {
for _, entry := range ISO3166List {
if str == entry.Alpha3Code {
return true
}
}
return false
}
// IsIP checks if a string is either IP version 4 or 6.
func IsIP(str string) bool {
return net.ParseIP(str) != nil
}
// IsIPv4 check if the string is an IP version 4.
func IsIPv4(str string) bool {
ip := net.ParseIP(str)
return ip != nil && strings.Contains(str, ".")
}
// IsIPv6 check if the string is an IP version 6.
func IsIPv6(str string) bool {
ip := net.ParseIP(str)
return ip != nil && strings.Contains(str, ":")
}
// IsMAC check if a string is valid MAC address.
// Possible MAC formats:
// 01:23:45:67:89:ab
// 01:23:45:67:89:ab:cd:ef
// 01-23-45-67-89-ab
// 01-23-45-67-89-ab-cd-ef
// 0123.4567.89ab
// 0123.4567.89ab.cdef
func IsMAC(str string) bool {
_, err := net.ParseMAC(str)
return err == nil
}
// IsMongoID check if the string is a valid hex-encoded representation of a MongoDB ObjectId.
func IsMongoID(str string) bool {
return rxHexadecimal.MatchString(str) && (len(str) == 24)
}
// IsLatitude check if a string is valid latitude.
func IsLatitude(str string) bool {
return rxLatitude.MatchString(str)
}
// IsLongitude check if a string is valid longitude.
func IsLongitude(str string) bool {
return rxLongitude.MatchString(str)
}
// ValidateStruct use tags for fields
func ValidateStruct(s interface{}) (bool, error) {
if s == nil {
return true, nil
}
result := true
var err error
val := reflect.ValueOf(s)
if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr {
val = val.Elem()
}
// we only accept structs
if val.Kind() != reflect.Struct {
return false, fmt.Errorf("function only accepts structs; got %s", val.Kind())
}
var errs Errors
for i := 0; i < val.NumField(); i++ {
valueField := val.Field(i)
typeField := val.Type().Field(i)
if typeField.PkgPath != "" {
continue // Private field
}
resultField, err := typeCheck(valueField, typeField)
if err != nil {
errs = append(errs, err)
}
result = result && resultField
}
if len(errs) > 0 {
err = errs
}
return result, err
}
// parseTag splits a struct field's tag into its
// comma-separated options.
func parseTag(tag string) tagOptions {
split := strings.SplitN(tag, ",", -1)
return tagOptions(split)
}
func isValidTag(s string) bool {
if s == "" {
return false
}
for _, c := range s {
switch {
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
// Backslash and quote chars are reserved, but
// otherwise any punctuation chars are allowed
// in a tag name.
default:
if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
return false
}
}
}
return true
}
// IsSSN will validate the given string as a U.S. Social Security Number
func IsSSN(str string) bool {
if str == "" || len(str) != 11 {
return false
}
return rxSSN.MatchString(str)
}
// ByteLength check string's length
func ByteLength(str string, params ...string) bool {
if len(params) == 2 {
min, _ := ToInt(params[0])
max, _ := ToInt(params[1])
return len(str) >= int(min) && len(str) <= int(max)
}
return false
}
// StringMatches checks if a string matches a given pattern.
func StringMatches(s string, params ...string) bool {
if len(params) == 1 {
pattern := params[0]
return Matches(s, pattern)
}
return false
}
// StringLength check string's length (including multi byte strings)
func StringLength(str string, params ...string) bool {
if len(params) == 2 {
strLength := utf8.RuneCountInString(str)
min, _ := ToInt(params[0])
max, _ := ToInt(params[1])
return strLength >= int(min) && strLength <= int(max)
}
return false
}
// Contains returns whether checks that a comma-separated list of options
// contains a particular substr flag. substr must be surrounded by a
// string boundary or commas.
func (opts tagOptions) contains(optionName string) bool {
for i := range opts {
tagOpt := opts[i]
if tagOpt == optionName {
return true
}
}
return false
}
func checkRequired(v reflect.Value, t reflect.StructField, options tagOptions) (bool, error) {
if options.contains("required") {
err := fmt.Errorf("non zero value required")
return false, Error{t.Name, err}
} else if fieldsRequiredByDefault && !options.contains("optional") {
err := fmt.Errorf("All fields are required to at least have one validation defined")
return false, Error{t.Name, err}
}
// not required and empty is valid
return true, nil
}
func typeCheck(v reflect.Value, t reflect.StructField) (bool, error) {
if !v.IsValid() {
return false, nil
}
tag := t.Tag.Get(tagName)
// Check if the field should be ignored
switch tag {
case "":
if !fieldsRequiredByDefault {
return true, nil
}
err := fmt.Errorf("All fields are required to at least have one validation defined")
return false, Error{t.Name, err}
case "-":
return true, nil
}
options := parseTag(tag)
for i := range options {
tagOpt := options[i]
if ok := isValidTag(tagOpt); !ok {
continue
}
if validatefunc, ok := CustomTypeTagMap[tagOpt]; ok {
options = append(options[:i], options[i+1:]...) // we found our custom validator, so remove it from the options
if result := validatefunc(v.Interface()); !result {
return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), tagOpt)}
}
return true, nil
}
}
if isEmptyValue(v) {
// an empty value is not validated, check only required
return checkRequired(v, t, options)
}
switch v.Kind() {
case reflect.Bool,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
reflect.Float32, reflect.Float64,
reflect.String:
// for each tag option check the map of validator functions
for i := range options {
tagOpt := options[i]
negate := false
// Check wether the tag looks like '!something' or 'something'
if len(tagOpt) > 0 && tagOpt[0] == '!' {
tagOpt = string(tagOpt[1:])
negate = true
}
if ok := isValidTag(tagOpt); !ok {
err := fmt.Errorf("Unknown Validator %s", tagOpt)
return false, Error{t.Name, err}
}
// Check for param validators
for key, value := range ParamTagRegexMap {
ps := value.FindStringSubmatch(tagOpt)
if len(ps) > 0 {
if validatefunc, ok := ParamTagMap[key]; ok {
switch v.Kind() {
case reflect.String:
field := fmt.Sprint(v) // make value into string, then validate with regex
if result := validatefunc(field, ps[1:]...); !result && !negate || result && negate {
var err error
if !negate {
err = fmt.Errorf("%s does not validate as %s", field, tagOpt)
} else {
err = fmt.Errorf("%s does validate as %s", field, tagOpt)
}
return false, Error{t.Name, err}
}
default:
//Not Yet Supported Types (Fail here!)
err := fmt.Errorf("Validator %s doesn't support kind %s", tagOpt, v.Kind())
return false, Error{t.Name, err}
}
}
}
}
if validatefunc, ok := TagMap[tagOpt]; ok {
switch v.Kind() {
case reflect.String:
field := fmt.Sprint(v) // make value into string, then validate with regex
if result := validatefunc(field); !result && !negate || result && negate {
var err error
if !negate {
err = fmt.Errorf("%s does not validate as %s", field, tagOpt)
} else {
err = fmt.Errorf("%s does validate as %s", field, tagOpt)
}
return false, Error{t.Name, err}
}
default:
//Not Yet Supported Types (Fail here!)
err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", tagOpt, v.Kind(), v)
return false, Error{t.Name, err}
}
}
}
return true, nil
case reflect.Map:
if v.Type().Key().Kind() != reflect.String {
return false, &UnsupportedTypeError{v.Type()}
}
var sv stringValues
sv = v.MapKeys()
sort.Sort(sv)
result := true
for _, k := range sv {
resultItem, err := ValidateStruct(v.MapIndex(k).Interface())
if err != nil {
return false, err
}
result = result && resultItem
}
return result, nil
case reflect.Slice:
result := true
for i := 0; i < v.Len(); i++ {
var resultItem bool
var err error
if v.Index(i).Kind() != reflect.Struct {
resultItem, err = typeCheck(v.Index(i), t)
if err != nil {
return false, err
}
} else {
resultItem, err = ValidateStruct(v.Index(i).Interface())
if err != nil {
return false, err
}
}
result = result && resultItem
}
return result, nil
case reflect.Array:
result := true
for i := 0; i < v.Len(); i++ {
var resultItem bool
var err error
if v.Index(i).Kind() != reflect.Struct {
resultItem, err = typeCheck(v.Index(i), t)
if err != nil {
return false, err
}
} else {
resultItem, err = ValidateStruct(v.Index(i).Interface())
if err != nil {
return false, err
}
}
result = result && resultItem
}
return result, nil
case reflect.Interface:
// If the value is an interface then encode its element
if v.IsNil() {
return true, nil
}
return ValidateStruct(v.Interface())
case reflect.Ptr:
// If the value is a pointer then check its element
if v.IsNil() {
return true, nil
}
return typeCheck(v.Elem(), t)
case reflect.Struct:
return ValidateStruct(v.Interface())
default:
return false, &UnsupportedTypeError{v.Type()}
}
}
func isEmptyValue(v reflect.Value) bool {
switch v.Kind() {
case reflect.String, reflect.Array:
return v.Len() == 0
case reflect.Map, reflect.Slice:
return v.Len() == 0 || v.IsNil()
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Ptr:
return v.IsNil()
}
return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface())
}
// ErrorByField returns error for specified field of the struct
// validated by ValidateStruct or empty string if there are no errors
// or this field doesn't exists or doesn't have any errors.
func ErrorByField(e error, field string) string {
if e == nil {
return ""
}
return ErrorsByField(e)[field]
}
// ErrorsByField returns map of errors of the struct validated
// by ValidateStruct or empty map if there are no errors.
func ErrorsByField(e error) map[string]string {
m := make(map[string]string)
if e == nil {
return m
}
// prototype for ValidateStruct
switch e.(type) {
case Error:
m[e.(Error).Name] = e.(Error).Err.Error()<|fim▁hole|> }
return m
}
// Error returns string equivalent for reflect.Type
func (e *UnsupportedTypeError) Error() string {
return "validator: unsupported type: " + e.Type.String()
}
func (sv stringValues) Len() int { return len(sv) }
func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) }
func (sv stringValues) get(i int) string { return sv[i].String() }<|fim▁end|> | case Errors:
for _, item := range e.(Errors).Errors() {
m[item.(Error).Name] = item.(Error).Err.Error()
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the<|fim▁hole|># This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.<|fim▁end|> | # License, or (at your option) any later version.
# |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.