prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core';
import {OnInit, AfterContentInit, AfterViewInit} from '@angular/core';
@Component({
selector: 'yw-app',
template: `
<div class="container-fluid">
<yw-messages [messages]="logMessages">
<header>
<h2>Messages Logged</h2>
</header>
<footer><|fim▁hole|> `
})
export class AppComponent implements OnInit,
AfterContentInit, AfterViewInit {
logMessages: string[] = [];
count = 0;
ngOnInit() {
this.log('ngOnInit');
}
ngAfterContentInit() {
this.log('ngAfterContentInit');
}
ngAfterViewInit() {
this.log('ngAfterViewInit');
}
log(message: string) {
this.logMessages.push(`${++this.count}: ${message}`);
}
}<|fim▁end|> | <p>--- End of messages</p>
</footer>
</yw-messages>
</div> |
<|file_name|>routes.py<|end_file_name|><|fim▁begin|>fin = open("routesRaw.txt", "r")
routes = {}
<|fim▁hole|> operationId = xs[2]
stepNumber = int(xs[3].replace("\n", ""))
if (routeId in routes):
routes[routeId].append(operationId)
else:
routes[routeId] = []
routes[routeId].append(operationId)
if (len(routes[routeId]) != stepNumber):
input("err!")
fin.close()
fout = open("routes.txt", "w")
for routeId in routes:
rowStr = "RTE\t" + routeId + "\t"
for op in routes[routeId]:
rowStr += "," + op
fout.write(rowStr + "\n")
fout.close()<|fim▁end|> | for x in fin:
xs = x.split("\t")
routeId = xs[1]
|
<|file_name|>dnos9.py<|end_file_name|><|fim▁begin|># This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
required: false
default: 22
username:
description:
- User to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
ANSIBLE_NET_USERNAME will be used instead.
required: false
password:
description:
- Password to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
ANSIBLE_NET_PASSWORD will be used instead.
required: false
default: null
ssh_keyfile:
description:
- Path to an ssh key used to authenticate the SSH session to the remote
device. If the value is not specified in the task, the value of
environment variable ANSIBLE_NET_SSH_KEYFILE will be used instead.
required: false
authorize:<|fim▁hole|> is not specified in the task, the value of environment variable
ANSIBLE_NET_AUTHORIZE will be used instead.
required: false
default: no
choices: ['yes', 'no']
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode
on the remote device. If I(authorize) is false, then this argument
does nothing. If the value is not specified in the task, the value of
environment variable ANSIBLE_NET_AUTH_PASS will be used instead.
required: false
default: none
timeout:
description:
- Specifies idle timeout (in seconds) for the connection. Useful if the
console freezes before continuing. For example when saving
configurations.
required: false
default: 10
provider:
description:
- Convenience method that allows all M(dnos9) arguments to be passed as
a dict object. All constraints (required, choices, etc) must be
met either by individual arguments or values in this dict.
required: false
default: null
"""<|fim▁end|> | description:
- Instructs the module to enter priviledged mode on the remote device
before sending any commands. If not specified, the device will
attempt to excecute all commands in non-priviledged mode. If the value |
<|file_name|>Layout.js<|end_file_name|><|fim▁begin|>// @flow
/* eslint-disable react/no-danger */
// $FlowMeteor
import { find, propEq } from "ramda";
import { Link } from "react-router";
import Split, {
Left,
Right,
} from "../../../components/@primitives/layout/split";
import Meta from "../../../components/shared/meta";
import AddToCart from "../../../components/giving/add-to-cart";
type ILayout = {
account: Object,
};
const Layout = ({ account }: ILayout) => (
<div>
<Split nav classes={["background--light-primary"]}>
<Meta
title={account.name}
description={account.summary}
image={
account.images
? find(propEq("fileLabel", "2:1"), account.images).cloudfront
: account.image
}
meta={[{ property: "og:type", content: "article" }]}
/>
<Right
background={
account.images
? find(propEq("fileLabel", "2:1"), account.images).cloudfront
: account.image
}
mobile
/>
<Right
background={<|fim▁hole|> mobile={false}
/>
</Split>
<Left scroll classes={["background--light-primary"]}>
<Link
to="/give/now"
className={
"locked-top locked-left soft-double@lap-and-up soft " +
"h7 text-dark-secondary plain visuallyhidden@handheld"
}
>
<i
className="icon-arrow-back soft-half-right display-inline-block"
style={{ verticalAlign: "middle" }}
/>
<span
className="display-inline-block"
style={{ verticalAlign: "middle", marginTop: "5px" }}
>
Back
</span>
</Link>
<div className="soft@lap-and-up soft-double-top@lap-and-up">
<div className="soft soft-double-bottom soft-double-top@lap-and-up">
<h2>{account.name}</h2>
<div dangerouslySetInnerHTML={{ __html: account.description }} />
</div>
</div>
<div className="background--light-secondary">
<div className="constrain-copy soft-double@lap-and-up">
<div className="soft soft-double-bottom soft-double-top@lap-and-up">
<AddToCart accounts={[account]} donate />
</div>
</div>
</div>
</Left>
</div>
);
export default Layout;<|fim▁end|> | account.images
? find(propEq("fileLabel", "2:1"), account.images).cloudfront
: account.image
} |
<|file_name|>unwind-tup.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// except according to those terms.
// error-pattern:fail
fn fold_local() -> @~[int]{
fail2!();
}
fn main() {
let _lss = (fold_local(), 0);
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
<|file_name|>tone_gen.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from sys import argv, stderr<|fim▁hole|>e.g.: {program} 64000 442.0 5
""".format(program=argv[0])
if len(argv) < 3 or len(argv) > 4 :
print(usage, file = stderr)
exit(1)
A4 = 0
sample_rate = 0
octaves = 8
try:
A4 = float(argv[2])
except:
print("Error, invalid argument: Freq. must be a number!", file = stderr)
print(usage, file = stderr)
exit(1)
try:
sample_rate = int(argv[1])
except:
print("Error, invalid argument: Sample rate must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
if len(argv) == 4 :
try:
octaves = int(argv[3])
except:
print("Error, invalid argument: Octaves must be an integer!", \
file = stderr)
print(usage, file = stderr)
exit(1)
freq_ratio = 2**(1/12)
base_freq = A4/(freq_ratio**57)
periods = [round(sample_rate/(2*base_freq*freq_ratio**t)) \
for t in range(0, 12*octaves)]
print("uint16_t tone_periods[{ntones}] = {{".format(ntones=12*octaves))
for o in range(0, octaves):
print('\t', end='')
for i in range(0, 12):
print("{period}, ".format(period=periods[12*o+i]), end='')
print('')
print("};")<|fim▁end|> |
usage = \
"""
Usage: {program} <sample rate> <A4 freq.> [octaves=8] |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SSL_DISVBLE = True
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_RECORD_QUERIES = True
MAIL_SERVER = 'smtp.qq.com'
MAIL_PORT = 25
MAIL_USE_TLS = True
MAIL_USE_SSL = False
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
FLASKY_MAIL_SUBJECT_PREFIX = '[Flasky]'
FLASKY_MAIL_SENDER = 'Flasky Admin <[email protected]>'
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN')
FLASKY_POSTS_PER_PAGE = 20
FLASKY_FOLLOWERS_PER_PAGE = 25<|fim▁hole|> FLASKY_COMMENTS_PER_PAGE = 25
FLASKY_SLOW_DB_QUERY_TIME = 0.5
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.FLASKY_MAIL_SENDER,
toaddrs=[cls.FLASKY_ADMIN],
subject=cls.FLASKY_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}<|fim▁end|> | |
<|file_name|>ExcelException.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015 SK holdings Co., Ltd. All rights reserved.
* This software is the confidential and proprietary information of SK holdings.
* You shall not disclose such confidential information and shall use it only in
* accordance with the terms of the license agreement you entered into with SK holdings.
* (http://www.eclipse.org/legal/epl-v10.html)
*/
package nexcore.alm.common.excel;
import nexcore.alm.common.exception.BaseException;
/**
* Excel import/export 과정에서 발생할 수 있는 예외상황
*
* @author indeday
*
*/
public class ExcelException extends BaseException {
/**
* serialVersionUID
*/
private static final long serialVersionUID = 5191191573910676820L;
/**
* @see BaseException#BaseException(String, String)
*/<|fim▁hole|> }
/**
* @see BaseException#BaseException(String, Throwable, String)
*/
public ExcelException(Throwable cause, String message, String logType) {
super(message, cause, logType);
}
/**
* @see BaseException#BaseException(Throwable, String)
*/
public ExcelException(Throwable cause, String message) {
super(cause, message);
}
/**
* @see BaseException#BaseException(Throwable, boolean)
*/
public ExcelException(Throwable cause, boolean useLog) {
super(cause, useLog);
}
}<|fim▁end|> | public ExcelException(String message, String logType) {
super(message, logType); |
<|file_name|>tableDynamicSpec.ts<|end_file_name|><|fim▁begin|>describe('ng-table-dynamic', function() {
interface IPerson {
id?: number;
name?: string;
age: number;
money?: number;
}
interface IExtendedDynamicTableColDef extends NgTable.Columns.IDynamicTableColDef {
field: NgTable.Columns.DynamicTableColField<string>
}
interface ICustomizedScope extends ng.IScope {
tableParams: NgTableParams<IPerson>;
cols: IExtendedDynamicTableColDef[];
}
var dataset = [
{ id: 1, name: "Moroni", age: 50, money: -10 },
{ id: 2, name: "Tiancum", age: 43, money: 120 },
{ id: 3, name: "Jacob", age: 27, money: 5.5 },
{ id: 4, name: "Nephi", age: 29, money: -54 },
{ id: 5, name: "Enos", age: 34, money: 110 },
{ id: 6, name: "Tiancum", age: 43, money: 1000 },
{ id: 7, name: "Jacob", age: 27, money: -201 },
{ id: 8, name: "Nephi", age: 29, money: 100 },
{ id: 9, name: "Enos", age: 34, money: -52.5 },
{ id: 10, name: "Tiancum", age: 43, money: 52.1 },
{ id: 11, name: "Jacob", age: 27, money: 110 },
{ id: 12, name: "Nephi", age: 29, money: -55 },
{ id: 13, name: "Enos", age: 34, money: 551 },
{ id: 14, name: "Tiancum", age: 43, money: -1410 },
{ id: 15, name: "Jacob", age: 27, money: 410 },
{ id: 16, name: "Nephi", age: 29, money: 100 },
{ id: 17, name: "Enos", age: 34, money: -100 }
];
beforeEach(angular.mock.module('ngTable'));
var scope: ICustomizedScope;
beforeEach(inject(function($rootScope: ng.IScope) {
scope = $rootScope.$new(true) as ICustomizedScope;
}));
describe('basics', function(){
var elm: ng.IAugmentedJQuery;
beforeEach(inject(function($compile: ng.ICompileService, $q: ng.IQService, NgTableParams: NgTable.ITableParamsConstructor<any>) {
elm = angular.element(
'<div>' +
'<table ng-table-dynamic="tableParams with cols">' +
'<tr ng-repeat="user in $data">' +
'<td ng-repeat="col in $columns">{{user[col.field]}}</td>' +
'</tr>' +
'</table>' +
'</div>');
function getCustomClass(context: NgTable.Columns.ColumnFieldContext){
if (context.$column.title().indexOf('Money') !== -1){
return 'moneyHeaderClass';
} else{
return 'customClass';
}
}
function money(context: NgTable.Columns.ColumnFieldContext) {
let selectOptions = [{
'id': 10,
'title': '10'
}];
return $q.when(selectOptions);
}
scope.tableParams = new NgTableParams({}, {});
scope.cols = [
{
'class': getCustomClass,
field: 'name',
filter: { ['name']: 'text'},
headerTitle: 'Sort by Name',
sortable: 'name',
show: true,
title: 'Name of person'
},
{
'class': getCustomClass,
field: 'age',
headerTitle: 'Sort by Age',
sortable: 'age',
show: true,
title: 'Age'
},
{
'class': getCustomClass,
field: 'money',
filter: { ['action']: 'select' },
headerTitle: 'Sort by Money',
filterData: money,
show: true,
title: 'Money'
}
];
$compile(elm)(scope);
scope.$digest();
}));
it('should create table header', function() {
var thead = elm.find('thead');
expect(thead.length).toBe(1);
var rows = thead.find('tr');
expect(rows.length).toBe(2);
var titles = angular.element(rows[0]).find('th');
expect(titles.length).toBe(3);
expect(angular.element(titles[0]).text().trim()).toBe('Name of person');
expect(angular.element(titles[1]).text().trim()).toBe('Age');
expect(angular.element(titles[2]).text().trim()).toBe('Money');
expect(angular.element(rows[1]).hasClass('ng-table-filters')).toBeTruthy();
var filters = angular.element(rows[1]).find('th');
expect(filters.length).toBe(3);
expect(angular.element(filters[0]).hasClass('filter')).toBeTruthy();
expect(angular.element(filters[1]).hasClass('filter')).toBeTruthy();
expect(angular.element(filters[2]).hasClass('filter')).toBeTruthy();
});
it('should create table header classes', inject(function($compile: ng.ICompileService, $rootScope: ng.IScope) {
var thead = elm.find('thead');
var rows = thead.find('tr');
var titles = angular.element(rows[0]).find('th');
expect(angular.element(titles[0]).hasClass('header')).toBeTruthy();
expect(angular.element(titles[1]).hasClass('header')).toBeTruthy();
expect(angular.element(titles[2]).hasClass('header')).toBeTruthy();
expect(angular.element(titles[0]).hasClass('sortable')).toBeTruthy();
expect(angular.element(titles[1]).hasClass('sortable')).toBeTruthy();
expect(angular.element(titles[2]).hasClass('sortable')).toBeFalsy();
expect(angular.element(titles[0]).hasClass('customClass')).toBeTruthy();
expect(angular.element(titles[1]).hasClass('customClass')).toBeTruthy();
expect(angular.element(titles[2]).hasClass('moneyHeaderClass')).toBeTruthy();
}));
it('should create table header titles', function() {
var thead = elm.find('thead');
var rows = thead.find('tr');
var titles = angular.element(rows[0]).find('th');
expect(angular.element(titles[0]).attr('title').trim()).toBe('Sort by Name');
expect(angular.element(titles[1]).attr('title').trim()).toBe('Sort by Age');
expect(angular.element(titles[2]).attr('title').trim()).toBe('Sort by Money');
});
it('should show data-title-text', inject(function(NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
var tbody = elm.find('tbody');
scope.tableParams = new NgTableParams({
page: 1, // show first page
count: 10 // count per page
}, {
dataset: dataset
});
scope.$digest();
var filterRow = angular.element(elm.find('thead').find('tr')[1]);
var filterCells = filterRow.find('th');
expect(angular.element(filterCells[0]).attr('data-title-text').trim()).toBe('Name of person');
expect(angular.element(filterCells[1]).attr('data-title-text').trim()).toBe('Age');
expect(angular.element(filterCells[2]).attr('data-title-text').trim()).toBe('Money');
var dataRows = elm.find('tbody').find('tr');
var dataCells = angular.element(dataRows[0]).find('td');
expect(angular.element(dataCells[0]).attr('data-title-text').trim()).toBe('Name of person');
expect(angular.element(dataCells[1]).attr('data-title-text').trim()).toBe('Age');
expect(angular.element(dataCells[2]).attr('data-title-text').trim()).toBe('Money');
}));
it('should show/hide columns', inject(function(NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
var tbody = elm.find('tbody');
scope.tableParams = new NgTableParams({
page: 1, // show first page
count: 10 // count per page
}, {
dataset: dataset
});
scope.$digest();
var headerRow = angular.element(elm.find('thead').find('tr')[0]);
expect(headerRow.find('th').length).toBe(3);
var filterRow = angular.element(elm.find('thead').find('tr')[1]);
expect(filterRow.find('th').length).toBe(3);
var dataRow = angular.element(elm.find('tbody').find('tr')[0]);
expect(dataRow.find('td').length).toBe(3);
scope.cols[0].show = false;
scope.$digest();
expect(headerRow.find('th').length).toBe(2);
expect(filterRow.find('th').length).toBe(2);
expect(dataRow.find('td').length).toBe(2);
expect(angular.element(headerRow.find('th')[0]).text().trim()).toBe('Age');
expect(angular.element(headerRow.find('th')[1]).text().trim()).toBe('Money');
expect(angular.element(filterRow.find('th')[0]).find('input').length).toBe(0);
expect(angular.element(filterRow.find('th')[1]).find('select').length).toBe(1);
}));
});
describe('changing column list', function(){
var elm: ng.IAugmentedJQuery;
beforeEach(inject(function($compile: ng.ICompileService, $q: ng.IQService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
elm = angular.element(
'<div>' +
'<table ng-table-dynamic="tableParams with cols">' +
'<tr ng-repeat="user in $data">' +
'<td ng-repeat="col in $columns">{{user[col.field]}}</td>' +
'</tr>' +
'</table>' +
'</div>');
function getCustomClass(parmasScope: NgTable.Columns.ColumnFieldContext){
if (parmasScope.$column.title().indexOf('Money') !== -1){
return 'moneyHeaderClass';
} else{
return 'customClass';
}
}
function money(/*$column*/) {
var def = $q.defer();
def.resolve([{
'id': 10,
'title': '10'
}]);
return def;
}
scope.tableParams = new NgTableParams({}, {});
scope.cols = [
{
'class': getCustomClass,
field: 'name',
filter: { name: 'text' },
headerTitle: 'Sort by Name',
sortable: 'name',
show: true,
title: 'Name of person'
},
{
'class': getCustomClass,
field: 'age',
headerTitle: 'Sort by Age',
sortable: 'age',
show: true,
title: 'Age'
}
];
$compile(elm)(scope);
scope.$digest();
}));
it('adding new column should update table header', function() {
var newCol: IExtendedDynamicTableColDef = {
'class': 'moneyadd',
field: 'money',
filter: { action: 'select' },
headerTitle: 'Sort by Money',
show: true,
title: 'Money'
};
scope.cols.push(newCol);
scope.$digest();
var thead = elm.find('thead');
expect(thead.length).toBe(1);
var rows = thead.find('tr');
expect(rows.length).toBe(2);
var titles = angular.element(rows[0]).find('th');
expect(titles.length).toBe(3);
expect(angular.element(titles[0]).text().trim()).toBe('Name of person');
expect(angular.element(titles[1]).text().trim()).toBe('Age');
expect(angular.element(titles[2]).text().trim()).toBe('Money');
var filterRow = angular.element(rows[1]);
expect(filterRow.hasClass('ng-table-filters')).toBeTruthy();
expect(filterRow.hasClass("ng-hide")).toBe(false);
var filters = filterRow.find('th');
expect(filters.length).toBe(3);
expect(angular.element(filters[0]).hasClass('filter')).toBeTruthy();
expect(angular.element(filters[1]).hasClass('filter')).toBeTruthy();
expect(angular.element(filters[2]).hasClass('filter')).toBeTruthy();
});
it('removing new column should update table header', function() {
scope.cols.splice(0, 1);
scope.$digest();
var thead = elm.find('thead');
expect(thead.length).toBe(1);
var rows = thead.find('tr');
var titles = angular.element(rows[0]).find('th');
expect(titles.length).toBe(1);
expect(angular.element(titles[0]).text().trim()).toBe('Age');
var filterRow = angular.element(rows[1]);
expect(filterRow.hasClass("ng-hide")).toBe(true);
});
it('setting columns to null should remove all table columns from header', function() {
scope.cols = null;
scope.$digest();
var thead = elm.find('thead');
expect(thead.length).toBe(1);
var rows = thead.find('tr');
var titles = angular.element(rows[0]).find('th');
expect(titles.length).toBe(0);
var filterRow = angular.element(rows[1]);
expect(filterRow.hasClass("ng-hide")).toBe(true);
expect(filterRow.find('th').length).toBe(0);
});
});
describe('title-alt', function() {
var elm: ng.IAugmentedJQuery;
beforeEach(inject(function($compile: ng.ICompileService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
elm = angular.element(
'<table ng-table-dynamic="tableParams with cols">' +
'<tr ng-repeat="user in $data">' +
'<td ng-repeat="col in $columns">{{user[col.field]}}</td>' +
'</tr>' +
'</table>');
scope.cols = [
{ field: 'name', title: 'Name of person', titleAlt: 'Name' },
{ field: 'age', title: 'Age', titleAlt: 'Age' },
{ field: 'money', title: 'Money', titleAlt: '£' }
];
scope.tableParams = new NgTableParams({
page: 1, // show first page
count: 10 // count per page
}, {
dataset: dataset
});
$compile(elm)(scope);
scope.$digest();
}));
it('should show as data-title-text', inject(function($compile: ng.ICompileService) {
var filterRow = angular.element(elm.find('thead').find('tr')[1]);
var filterCells = filterRow.find('th');
expect(angular.element(filterCells[0]).attr('data-title-text').trim()).toBe('Name');
expect(angular.element(filterCells[1]).attr('data-title-text').trim()).toBe('Age');
expect(angular.element(filterCells[2]).attr('data-title-text').trim()).toBe('£');
var dataRows = elm.find('tbody').find('tr');
var dataCells = angular.element(dataRows[0]).find('td');
expect(angular.element(dataCells[0]).attr('data-title-text').trim()).toBe('Name');
expect(angular.element(dataCells[1]).attr('data-title-text').trim()).toBe('Age');
expect(angular.element(dataCells[2]).attr('data-title-text').trim()).toBe('£');
}));
});
describe('filters', function(){
var elm: ng.IAugmentedJQuery;
beforeEach(inject(function($compile: ng.ICompileService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
elm = angular.element(
'<table ng-table-dynamic="tableParams with cols">' +
'<tr ng-repeat="user in $data">' +
'<td ng-repeat="col in $columns">{{user[col.field]}}</td>' +
'</tr>' +
'</table>');
}));
describe('filter specified as alias', function(){
beforeEach(inject(function($compile: ng.ICompileService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
scope.cols = [
{ field: 'name', filter: {username: 'text'} }
];
scope.tableParams = new NgTableParams({}, {});
$compile(elm)(scope);
scope.$digest();
}));
it('should render named filter template', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').find('input');
expect(inputs.length).toBe(1);
expect(inputs.eq(0).attr('type')).toBe('text');
expect(inputs.eq(0).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(0).attr('name')).toBe('username');
});
it('should render named filter template - select template', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').find('input');
expect(inputs.length).toBe(1);
expect(inputs.eq(0).attr('type')).toBe('text');
expect(inputs.eq(0).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(0).attr('name')).toBe('username');
});
it('should databind ngTableParams.filter to filter input', function () {
scope.tableParams.filter()['username'] = 'my name is...';
scope.$digest();
var input = elm.find('thead').find('tr').eq(1).find('th').find('input');
expect(input.val()).toBe('my name is...');
});
});
describe('select filter', function(){
beforeEach(inject(function ($compile: ng.ICompileService, $q: ng.IQService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
scope.cols = [{
field: 'name',
filter: {username: 'select'},
filterData: getNamesAsDefer
}, {
field: 'names2',
filter: {username2: 'select'},
filterData: getNamesAsPromise
}, {
field: 'names3',
filter: {username3: 'select'},
filterData: getNamesAsArray
}];
scope.tableParams = new NgTableParams({}, {});
$compile(elm)(scope);
scope.$digest();
function getNamesAsDefer(/*$column*/) {
return $q.when([{
'id': 10,
'title': 'Christian'
}, {
'id': 11,
'title': 'Simon'
}]);<|fim▁hole|> return $q.when([{
'id': 20,
'title': 'Christian'
}, {
'id': 21,
'title': 'Simon'
}]);
}
function getNamesAsArray(/*$column*/) {
return [{
'id': 20,
'title': 'Christian'
}, {
'id': 21,
'title': 'Simon'
}];
}
}));
it('should render select lists', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').find('select');
expect(inputs.length).toBe(3);
expect(inputs.eq(0).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(0).attr('name')).toBe('username');
expect(inputs.eq(1).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(1).attr('name')).toBe('username2');
expect(inputs.eq(2).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(2).attr('name')).toBe('username3');
});
it('should render select list return as a promise', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').eq(1).find('select');
var select = inputs.eq(0) as ng.IAugmentedJQuery;
expect((select[0] as HTMLSelectElement).options.length).toBeGreaterThan(0);
var $column = (select.scope() as NgTable.Columns.ColumnFieldContext).$column;
var plucker = _.partialRight(_.pick, ['id', 'title']);
var actual = _.map($column.data as NgTable.ISelectOption[], plucker);
expect(actual).toEqual([{
'id': '',
'title': ''
},{
'id': 20,
'title': 'Christian'
}, {
'id': 21,
'title': 'Simon'
}]);
});
it('should render select list return as an array', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').eq(2).find('select');
var select = inputs.eq(0) as ng.IAugmentedJQuery;
expect((select[0] as HTMLSelectElement).options.length).toBeGreaterThan(0);
var $column = (select.scope() as NgTable.Columns.ColumnFieldContext).$column;
var plucker = _.partialRight(_.pick, ['id', 'title']);
var actual = _.map($column.data as NgTable.ISelectOption[], plucker);
expect(actual).toEqual([{
'id': '',
'title': ''
},{
'id': 20,
'title': 'Christian'
}, {
'id': 21,
'title': 'Simon'
}]);
});
});
describe('multiple filter inputs', function(){
beforeEach(inject(function($compile: ng.ICompileService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
scope.cols = [
{ field: 'name', filter: {name: 'text', age: 'text'} }
];
scope.tableParams = new NgTableParams({}, {});
$compile(elm)(scope);
scope.$digest();
}));
it('should render filter template for each key/value pair ordered by key', function() {
var inputs = elm.find('thead').find('tr').eq(1).find('th').find('input');
expect(inputs.length).toBe(2);
expect(inputs.eq(0).attr('type')).toBe('text');
expect(inputs.eq(0).attr('ng-model')).not.toBeUndefined();
expect(inputs.eq(1).attr('type')).toBe('text');
expect(inputs.eq(1).attr('ng-model')).not.toBeUndefined();
});
it('should databind ngTableParams.filter to filter inputs', function () {
scope.tableParams.filter()['name'] = 'my name is...';
scope.tableParams.filter()['age'] = '10';
scope.$digest();
var inputs = elm.find('thead').find('tr').eq(1).find('th').find('input');
expect(inputs.eq(0).val()).toBe('my name is...');
expect(inputs.eq(1).val()).toBe('10');
});
});
describe('dynamic filter', function(){
var ageFilter: NgTable.IFilterTemplateDefMap;
beforeEach(inject(function($compile: ng.ICompileService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
ageFilter = { age: 'text'};
function getFilter(paramsScope: NgTable.Columns.ColumnFieldContext): NgTable.IFilterTemplateDefMap{
if (paramsScope.$column.title() === 'Name of user') {
return {username: 'text'};
} else if (paramsScope.$column.title() === 'Age') {
return ageFilter;
} else {
return undefined;
}
}
scope.cols = [
{ field: 'name', title: 'Name of user', filter: getFilter },
{ field: 'age', title: 'Age', filter: getFilter }
];
scope.tableParams = new NgTableParams({}, {});
$compile(elm)(scope);
scope.$digest();
}));
it('should render named filter template', function() {
var usernameInput = elm.find('thead').find('tr').eq(1).find('th').eq(0).find('input');
expect(usernameInput.attr('type')).toBe('text');
expect(usernameInput.attr('name')).toBe('username');
var ageInput = elm.find('thead').find('tr').eq(1).find('th').eq(1).find('input');
expect(ageInput.attr('type')).toBe('text');
expect(ageInput.attr('name')).toBe('age');
});
it('should databind ngTableParams.filter to filter input', function () {
scope.tableParams.filter()['username'] = 'my name is...';
scope.tableParams.filter()['age'] = '10';
scope.$digest();
var usernameInput = elm.find('thead').find('tr').eq(1).find('th').eq(0).find('input');
expect(usernameInput.val()).toBe('my name is...');
var ageInput = elm.find('thead').find('tr').eq(1).find('th').eq(1).find('input');
expect(ageInput.val()).toBe('10');
});
it('should render new template as filter changes', inject(function($compile: ng.ICompileService) {
var scriptTemplate = angular.element(
'<script type="text/ng-template" id="ng-table/filters/number.html"><input type="number" name="{{name}}"/></script>');
$compile(scriptTemplate)(scope);
ageFilter['age'] = 'number';
scope.$digest();
var ageInput = elm.find('thead').find('tr').eq(1).find('th').eq(1).find('input');
expect(ageInput.attr('type')).toBe('number');
expect(ageInput.attr('name')).toBe('age');
}));
});
});
describe('reorder columns', function() {
var elm: ng.IAugmentedJQuery;
var getTitles = function () {
var thead = elm.find('thead');
var rows = thead.find('tr');
var titles = angular.element(rows[0]).find('th');
return angular.element(titles).text().trim().split(/\s+/g)
};
beforeEach(inject(function ($compile: ng.ICompileService, $q: ng.IQService, NgTableParams: NgTable.ITableParamsConstructor<IPerson>) {
elm = angular.element(
'<div>' +
'<table ng-table-dynamic="tableParams with cols">' +
'<tr ng-repeat="user in $data">' +
"<td ng-repeat=\"col in $columns\">{{user[col.field]}}</td>" +
'</tr>' +
'</table>' +
'</div>');
scope.tableParams = new NgTableParams({}, {});
scope.cols = [
{
field: 'name',
title: 'Name'
},
{
field: 'age',
title: 'Age'
},
{
field: 'money',
title: 'Money'
}
];
$compile(elm)(scope);
scope.$digest();
}));
it('"in place" switch of columns within array should reorder html table columns', function () {
expect(getTitles()).toEqual([ 'Name', 'Age', 'Money' ]);
var colToSwap = scope.cols[2];
scope.cols[2] = scope.cols[1];
scope.cols[1] = colToSwap;
scope.$digest();
expect(getTitles()).toEqual([ 'Name', 'Money', 'Age' ]);
});
it('"in place" reverse of column array should reorder html table columns', function () {
expect(getTitles()).toEqual([ 'Name', 'Age', 'Money' ]);
scope.cols.reverse();
scope.$digest();
expect(getTitles()).toEqual([ 'Money', 'Age', 'Name' ]);
});
it('html table columns should reflect order of columns in replacement array', function () {
expect(getTitles()).toEqual([ 'Name', 'Age', 'Money' ]);
var newArray = scope.cols.map(angular.identity);
newArray.reverse();
scope.cols = newArray;
scope.$digest();
expect(getTitles()).toEqual([ 'Money', 'Age', 'Name' ]);
});
});
});<|fim▁end|> | }
function getNamesAsPromise(/*$column*/) { |
<|file_name|>TableCodec.java<|end_file_name|><|fim▁begin|>package org.broadinstitute.sting.utils.codecs.table;
import org.broad.tribble.Feature;
import org.broad.tribble.readers.LineReader;
import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
/**
* Reads tab deliminated tabular text files
*
* <p>
* <ul>
* <li>Header: must begin with line HEADER or track (for IGV), followed by any number of column names,
* separated by whitespace.</li>
* <li>Comment lines starting with # are ignored</li>
* <li>Each non-header and non-comment line is split into parts by whitespace,
* and these parts are assigned as a map to their corresponding column name in the header.
* Note that the first element (corresponding to the HEADER column) must be a valid genome loc
* such as 1, 1:1 or 1:1-10, which is the position of the Table element on the genome. TableCodec
* requires that there be one value for each column in the header, and no more, on all lines.</li>
* </ul>
* </p>
*
* </p>
*
* <h2>File format example</h2>
* <pre>
* HEADER a b c
* 1:1 1 2 3
* 1:2 4 5 6<|fim▁hole|> *
* @author Mark DePristo
* @since 2009
*/
public class TableCodec implements ReferenceDependentFeatureCodec {
final static protected String delimiterRegex = "\\s+";
final static protected String headerDelimiter = "HEADER";
final static protected String igvHeaderDelimiter = "track";
final static protected String commentDelimiter = "#";
protected ArrayList<String> header = new ArrayList<String>();
/**
* The parser to use when resolving genome-wide locations.
*/
protected GenomeLocParser genomeLocParser;
/**
* Set the parser to use when resolving genetic data.
* @param genomeLocParser The supplied parser.
*/
@Override
public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
this.genomeLocParser = genomeLocParser;
}
@Override
public Feature decodeLoc(String line) {
return decode(line);
}
@Override
public Feature decode(String line) {
if (line.startsWith(headerDelimiter) || line.startsWith(commentDelimiter) || line.startsWith(igvHeaderDelimiter))
return null;
String[] split = line.split(delimiterRegex);
if (split.length < 1)
throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
return new TableFeature(genomeLocParser.parseGenomeLoc(split[0]),Arrays.asList(split),header);
}
@Override
public Class<TableFeature> getFeatureType() {
return TableFeature.class;
}
@Override
public Object readHeader(LineReader reader) {
String line = "";
try {
boolean isFirst = true;
while ((line = reader.readLine()) != null) {
if ( isFirst && ! line.startsWith(headerDelimiter) && ! line.startsWith(commentDelimiter)) {
throw new UserException.MalformedFile("TableCodec file does not have a header");
}
isFirst &= line.startsWith(commentDelimiter);
if (line.startsWith(headerDelimiter)) {
if (header.size() > 0) throw new IllegalStateException("Input table file seems to have two header lines. The second is = " + line);
String spl[] = line.split(delimiterRegex);
for (String s : spl) header.add(s);
return header;
} else if (!line.startsWith(commentDelimiter)) {
break;
}
}
} catch (IOException e) {
throw new UserException.MalformedFile("unable to parse header from TableCodec file",e);
}
return header;
}
public boolean canDecode(final String potentialInput) { return false; }
}<|fim▁end|> | * 1:3 7 8 9
* </pre> |
<|file_name|>CharacterHandler.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2015 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "AccountMgr.h"
#include "ArenaTeam.h"
#include "ArenaTeamMgr.h"
#include "Battleground.h"
#include "CalendarMgr.h"
#include "Chat.h"
#include "Common.h"
#include "DatabaseEnv.h"
#include "Group.h"
#include "Guild.h"
#include "GuildMgr.h"
#include "Language.h"
#include "Log.h"
#include "ObjectAccessor.h"
#include "ObjectMgr.h"
#include "Opcodes.h"
#include "Pet.h"
#include "PlayerDump.h"
#include "Player.h"
#include "ReputationMgr.h"
#include "ScriptMgr.h"
#include "SharedDefines.h"
#include "SocialMgr.h"
#include "SystemConfig.h"
#include "UpdateMask.h"
#include "Util.h"
#include "World.h"
#include "WorldPacket.h"
#include "WorldSession.h"
#ifdef ELUNA
#include "LuaEngine.h"
#endif
class LoginQueryHolder : public SQLQueryHolder
{
private:
uint32 m_accountId;
ObjectGuid m_guid;
public:
LoginQueryHolder(uint32 accountId, ObjectGuid guid)
: m_accountId(accountId), m_guid(guid) { }
ObjectGuid GetGuid() const { return m_guid; }
uint32 GetAccountId() const { return m_accountId; }
bool Initialize();
};
bool LoginQueryHolder::Initialize()
{
SetSize(MAX_PLAYER_LOGIN_QUERY);
bool res = true;
uint32 lowGuid = m_guid.GetCounter();
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_FROM, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_GROUP_MEMBER);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_GROUP, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_INSTANCE);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_BOUND_INSTANCES, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_AURAS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_AURAS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_SPELL);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_SPELLS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_QUEST_STATUS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUS_DAILY);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_DAILY_QUEST_STATUS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUS_WEEKLY);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_WEEKLY_QUEST_STATUS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUS_MONTHLY);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_MONTHLY_QUEST_STATUS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUS_SEASONAL);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_SEASONAL_QUEST_STATUS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_REPUTATION);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_REPUTATION, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_INVENTORY);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_INVENTORY, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_ACTIONS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_ACTIONS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_MAILCOUNT);
stmt->setUInt32(0, lowGuid);
stmt->setUInt64(1, uint64(time(NULL)));
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_MAIL_COUNT, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_MAILDATE);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_MAIL_DATE, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_SOCIALLIST);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_SOCIAL_LIST, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_HOMEBIND);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_HOME_BIND, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_SPELLCOOLDOWNS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_SPELL_COOLDOWNS, stmt);
if (sWorld->getBoolConfig(CONFIG_DECLINED_NAMES_USED))
{
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_DECLINEDNAMES);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_DECLINED_NAMES, stmt);
}
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_GUILD_MEMBER);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_GUILD, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_ARENAINFO);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_ARENA_INFO, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_ACHIEVEMENTS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_ACHIEVEMENTS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_CRITERIAPROGRESS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_CRITERIA_PROGRESS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_EQUIPMENTSETS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_EQUIPMENT_SETS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_BGDATA);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_BG_DATA, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_GLYPHS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_GLYPHS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_TALENTS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_TALENTS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_PLAYER_ACCOUNT_DATA);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_ACCOUNT_DATA, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_SKILLS);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_SKILLS, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_RANDOMBG);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_RANDOM_BG, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_BANNED);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_BANNED, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_QUESTSTATUSREW);
stmt->setUInt32(0, lowGuid);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_QUEST_STATUS_REW, stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_ACCOUNT_INSTANCELOCKTIMES);
stmt->setUInt32(0, m_accountId);
res &= SetPreparedQuery(PLAYER_LOGIN_QUERY_LOAD_INSTANCE_LOCK_TIMES, stmt);
return res;
}
void WorldSession::HandleCharEnum(PreparedQueryResult result)
{
WorldPacket data(SMSG_CHAR_ENUM, 100); // we guess size
uint8 num = 0;
data << num;
_legitCharacters.clear();
if (result)
{
do
{
ObjectGuid guid(HIGHGUID_PLAYER, (*result)[0].GetUInt32());
TC_LOG_INFO("network", "Loading %s from account %u.", guid.ToString().c_str(), GetAccountId());
if (Player::BuildEnumData(result, &data))
{
// Do not allow banned characters to log in
if (!(*result)[20].GetUInt32())
_legitCharacters.insert(guid);
if (!sWorld->HasCharacterNameData(guid)) // This can happen if characters are inserted into the database manually. Core hasn't loaded name data yet.
sWorld->AddCharacterNameData(guid, (*result)[1].GetString(), (*result)[4].GetUInt8(), (*result)[2].GetUInt8(), (*result)[3].GetUInt8(), (*result)[7].GetUInt8());
++num;
}
}
while (result->NextRow());
}
data.put<uint8>(0, num);
SendPacket(&data);
}
void WorldSession::HandleCharEnumOpcode(WorldPacket& /*recvData*/)
{
// remove expired bans
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_EXPIRED_BANS);
CharacterDatabase.Execute(stmt);
/// get all the data necessary for loading all characters (along with their pets) on the account
if (sWorld->getBoolConfig(CONFIG_DECLINED_NAMES_USED))
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_ENUM_DECLINED_NAME);
else
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_ENUM);
stmt->setUInt8(0, PET_SAVE_AS_CURRENT);
stmt->setUInt32(1, GetAccountId());
_charEnumCallback = CharacterDatabase.AsyncQuery(stmt);
}
void WorldSession::HandleCharCreateOpcode(WorldPacket& recvData)
{
CharacterCreateInfo createInfo;
recvData >> createInfo.Name
>> createInfo.Race
>> createInfo.Class
>> createInfo.Gender
>> createInfo.Skin
>> createInfo.Face
>> createInfo.HairStyle
>> createInfo.HairColor
>> createInfo.FacialHair
>> createInfo.OutfitId;
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_TEAMMASK))
{
if (uint32 mask = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_DISABLED))
{
bool disabled = false;
switch (Player::TeamForRace(createInfo.Race))
{
case ALLIANCE:
disabled = (mask & (1 << 0)) != 0;
break;
case HORDE:
disabled = (mask & (1 << 1)) != 0;
break;
}
if (disabled)
{
SendCharCreate(CHAR_CREATE_DISABLED);
return;
}
}
}
ChrClassesEntry const* classEntry = sChrClassesStore.LookupEntry(createInfo.Class);
if (!classEntry)
{
TC_LOG_ERROR("network", "Class (%u) not found in DBC while creating new char for account (ID: %u): wrong DBC files or cheater?", createInfo.Class, GetAccountId());
SendCharCreate(CHAR_CREATE_FAILED);
return;
}
ChrRacesEntry const* raceEntry = sChrRacesStore.LookupEntry(createInfo.Race);
if (!raceEntry)
{
TC_LOG_ERROR("network", "Race (%u) not found in DBC while creating new char for account (ID: %u): wrong DBC files or cheater?", createInfo.Race, GetAccountId());
SendCharCreate(CHAR_CREATE_FAILED);
return;
}
// prevent character creating Expansion race without Expansion account
if (raceEntry->expansion > Expansion())
{
TC_LOG_ERROR("network", "Expansion %u account:[%d] tried to Create character with expansion %u race (%u)", Expansion(), GetAccountId(), raceEntry->expansion, createInfo.Race);
SendCharCreate(CHAR_CREATE_EXPANSION);
return;
}
// prevent character creating Expansion class without Expansion account
if (classEntry->expansion > Expansion())
{
TC_LOG_ERROR("network", "Expansion %u account:[%d] tried to Create character with expansion %u class (%u)", Expansion(), GetAccountId(), classEntry->expansion, createInfo.Class);
SendCharCreate(CHAR_CREATE_EXPANSION_CLASS);
return;
}
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RACEMASK))
{
uint32 raceMaskDisabled = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_DISABLED_RACEMASK);
if ((1 << (createInfo.Race - 1)) & raceMaskDisabled)
{
SendCharCreate(CHAR_CREATE_DISABLED);
return;
}
}
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_CLASSMASK))
{
uint32 classMaskDisabled = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_DISABLED_CLASSMASK);
if ((1 << (createInfo.Class - 1)) & classMaskDisabled)
{
SendCharCreate(CHAR_CREATE_DISABLED);
return;
}
}
// prevent character creating with invalid name
if (!normalizePlayerName(createInfo.Name))
{
TC_LOG_ERROR("network", "Account:[%d] but tried to Create character with empty [name] ", GetAccountId());
SendCharCreate(CHAR_NAME_NO_NAME);
return;
}
// check name limitations
ResponseCodes res = ObjectMgr::CheckPlayerName(createInfo.Name, true);
if (res != CHAR_NAME_SUCCESS)
{
SendCharCreate(res);
return;
}
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RESERVEDNAME) && sObjectMgr->IsReservedName(createInfo.Name))
{
SendCharCreate(CHAR_NAME_RESERVED);
return;
}
if (createInfo.Class == CLASS_DEATH_KNIGHT && !HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_HEROIC_CHARACTER))
{
// speedup check for heroic class disabled case
uint32 heroic_free_slots = sWorld->getIntConfig(CONFIG_HEROIC_CHARACTERS_PER_REALM);
if (heroic_free_slots == 0)
{
SendCharCreate(CHAR_CREATE_UNIQUE_CLASS_LIMIT);
return;
}
// speedup check for heroic class disabled case
uint32 req_level_for_heroic = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_MIN_LEVEL_FOR_HEROIC_CHARACTER);
if (req_level_for_heroic > sWorld->getIntConfig(CONFIG_MAX_PLAYER_LEVEL))
{
SendCharCreate(CHAR_CREATE_LEVEL_REQUIREMENT);
return;
}
}
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHECK_NAME);
stmt->setString(0, createInfo.Name);
delete _charCreateCallback.GetParam(); // Delete existing if any, to make the callback chain reset to stage 0
_charCreateCallback.SetParam(new CharacterCreateInfo(std::move(createInfo)));
_charCreateCallback.SetFutureResult(CharacterDatabase.AsyncQuery(stmt));
}
void WorldSession::HandleCharCreateCallback(PreparedQueryResult result, CharacterCreateInfo* createInfo)
{
/** This is a series of callbacks executed consecutively as a result from the database becomes available.
This is much more efficient than synchronous requests on packet handler, and much less DoS prone.
It also prevents data syncrhonisation errors.
*/
switch (_charCreateCallback.GetStage())
{
case 0:
{
if (result)
{
SendCharCreate(CHAR_CREATE_NAME_IN_USE);
delete createInfo;
_charCreateCallback.Reset();
return;
}
ASSERT(_charCreateCallback.GetParam() == createInfo);
PreparedStatement* stmt = LoginDatabase.GetPreparedStatement(LOGIN_SEL_SUM_REALM_CHARACTERS);
stmt->setUInt32(0, GetAccountId());
_charCreateCallback.FreeResult();
_charCreateCallback.SetFutureResult(LoginDatabase.AsyncQuery(stmt));
_charCreateCallback.NextStage();
break;
}
case 1:
{
uint16 acctCharCount = 0;
if (result)
{
Field* fields = result->Fetch();
// SELECT SUM(x) is MYSQL_TYPE_NEWDECIMAL - needs to be read as string
const char* ch = fields[0].GetCString();
if (ch)
acctCharCount = atoi(ch);
}
if (acctCharCount >= sWorld->getIntConfig(CONFIG_CHARACTERS_PER_ACCOUNT))
{
SendCharCreate(CHAR_CREATE_ACCOUNT_LIMIT);
delete createInfo;
_charCreateCallback.Reset();
return;
}
ASSERT(_charCreateCallback.GetParam() == createInfo);
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_SUM_CHARS);
stmt->setUInt32(0, GetAccountId());
_charCreateCallback.FreeResult();
_charCreateCallback.SetFutureResult(CharacterDatabase.AsyncQuery(stmt));
_charCreateCallback.NextStage();
break;
}
case 2:
{
if (result)
{
Field* fields = result->Fetch();
createInfo->CharCount = uint8(fields[0].GetUInt64()); // SQL's COUNT() returns uint64 but it will always be less than uint8.Max
if (createInfo->CharCount >= sWorld->getIntConfig(CONFIG_CHARACTERS_PER_REALM))
{
SendCharCreate(CHAR_CREATE_SERVER_LIMIT);
delete createInfo;
_charCreateCallback.Reset();
return;
}
}
bool allowTwoSideAccounts = !sWorld->IsPvPRealm() || HasPermission(rbac::RBAC_PERM_TWO_SIDE_CHARACTER_CREATION);
uint32 skipCinematics = sWorld->getIntConfig(CONFIG_SKIP_CINEMATICS);
_charCreateCallback.FreeResult();
if (!allowTwoSideAccounts || skipCinematics == 1 || createInfo->Class == CLASS_DEATH_KNIGHT)
{
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHAR_CREATE_INFO);
stmt->setUInt32(0, GetAccountId());
stmt->setUInt32(1, (skipCinematics == 1 || createInfo->Class == CLASS_DEATH_KNIGHT) ? 10 : 1);
_charCreateCallback.SetFutureResult(CharacterDatabase.AsyncQuery(stmt));
_charCreateCallback.NextStage();
return;
}
_charCreateCallback.NextStage();
HandleCharCreateCallback(PreparedQueryResult(NULL), createInfo); // Will jump to case 3
break;
}
case 3:
{
bool haveSameRace = false;
uint32 heroicReqLevel = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_MIN_LEVEL_FOR_HEROIC_CHARACTER);
bool hasHeroicReqLevel = (heroicReqLevel == 0);
bool allowTwoSideAccounts = !sWorld->IsPvPRealm() || HasPermission(rbac::RBAC_PERM_TWO_SIDE_CHARACTER_CREATION);
uint32 skipCinematics = sWorld->getIntConfig(CONFIG_SKIP_CINEMATICS);
bool checkHeroicReqs = createInfo->Class == CLASS_DEATH_KNIGHT && !HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_HEROIC_CHARACTER);
if (result)
{
uint32 team = Player::TeamForRace(createInfo->Race);
uint32 freeHeroicSlots = sWorld->getIntConfig(CONFIG_HEROIC_CHARACTERS_PER_REALM);
Field* field = result->Fetch();
uint8 accRace = field[1].GetUInt8();
if (checkHeroicReqs)
{
uint8 accClass = field[2].GetUInt8();
if (accClass == CLASS_DEATH_KNIGHT)
{
if (freeHeroicSlots > 0)
--freeHeroicSlots;
if (freeHeroicSlots == 0)
{
SendCharCreate(CHAR_CREATE_UNIQUE_CLASS_LIMIT);
delete createInfo;
_charCreateCallback.Reset();
return;
}
}
if (!hasHeroicReqLevel)
{
uint8 accLevel = field[0].GetUInt8();
if (accLevel >= heroicReqLevel)
hasHeroicReqLevel = true;
}
}
// need to check team only for first character
/// @todo what to if account already has characters of both races?
if (!allowTwoSideAccounts)
{
uint32 accTeam = 0;
if (accRace > 0)
accTeam = Player::TeamForRace(accRace);
if (accTeam != team)
{
SendCharCreate(CHAR_CREATE_PVP_TEAMS_VIOLATION);
delete createInfo;
_charCreateCallback.Reset();
return;
}
}
// search same race for cinematic or same class if need
/// @todo check if cinematic already shown? (already logged in?; cinematic field)
while ((skipCinematics == 1 && !haveSameRace) || createInfo->Class == CLASS_DEATH_KNIGHT)
{
if (!result->NextRow())
break;
field = result->Fetch();
accRace = field[1].GetUInt8();
if (!haveSameRace)
haveSameRace = createInfo->Race == accRace;
if (checkHeroicReqs)
{
uint8 acc_class = field[2].GetUInt8();
if (acc_class == CLASS_DEATH_KNIGHT)
{
if (freeHeroicSlots > 0)
--freeHeroicSlots;
if (freeHeroicSlots == 0)
{
SendCharCreate(CHAR_CREATE_UNIQUE_CLASS_LIMIT);
delete createInfo;
_charCreateCallback.Reset();
return;
}
}
if (!hasHeroicReqLevel)
{
uint8 acc_level = field[0].GetUInt8();
if (acc_level >= heroicReqLevel)
hasHeroicReqLevel = true;
}
}
}
}
if (checkHeroicReqs && !hasHeroicReqLevel)
{
SendCharCreate(CHAR_CREATE_LEVEL_REQUIREMENT);
delete createInfo;
_charCreateCallback.Reset();
return;
}
Player newChar(this);
newChar.GetMotionMaster()->Initialize();
if (!newChar.Create(sObjectMgr->GenerateLowGuid(HIGHGUID_PLAYER), createInfo))
{
// Player not create (race/class/etc problem?)
newChar.CleanupsBeforeDelete();
SendCharCreate(CHAR_CREATE_ERROR);
delete createInfo;
_charCreateCallback.Reset();
return;
}
if ((haveSameRace && skipCinematics == 1) || skipCinematics == 2)
newChar.setCinematic(1); // not show intro
newChar.SetAtLoginFlag(AT_LOGIN_FIRST); // First login
// Player created, save it now
newChar.SaveToDB(true);
createInfo->CharCount += 1;
SQLTransaction trans = LoginDatabase.BeginTransaction();
PreparedStatement* stmt = LoginDatabase.GetPreparedStatement(LOGIN_DEL_REALM_CHARACTERS_BY_REALM);
stmt->setUInt32(0, GetAccountId());
stmt->setUInt32(1, realmID);
trans->Append(stmt);
stmt = LoginDatabase.GetPreparedStatement(LOGIN_INS_REALM_CHARACTERS);
stmt->setUInt32(0, createInfo->CharCount);
stmt->setUInt32(1, GetAccountId());
stmt->setUInt32(2, realmID);
trans->Append(stmt);
LoginDatabase.CommitTransaction(trans);
SendCharCreate(CHAR_CREATE_SUCCESS);
TC_LOG_INFO("entities.player.character", "Account: %d (IP: %s) Create Character:[%s] (GUID: %u)", GetAccountId(), GetRemoteAddress().c_str(), createInfo->Name.c_str(), newChar.GetGUIDLow());
sScriptMgr->OnPlayerCreate(&newChar);
sWorld->AddCharacterNameData(newChar.GetGUID(), newChar.GetName(), newChar.getGender(), newChar.getRace(), newChar.getClass(), newChar.getLevel());
newChar.CleanupsBeforeDelete();
delete createInfo;
_charCreateCallback.Reset();
break;
}
}
}
void WorldSession::HandleCharDeleteOpcode(WorldPacket& recvData)
{
ObjectGuid guid;
recvData >> guid;
// Initiating
uint32 initAccountId = GetAccountId();
// can't delete loaded character
if (ObjectAccessor::FindPlayer(guid))
{
sScriptMgr->OnPlayerFailedDelete(guid, initAccountId);
return;
}
uint32 accountId = 0;
uint8 level = 0;
std::string name;
// is guild leader
if (sGuildMgr->GetGuildByLeader(guid))
{
sScriptMgr->OnPlayerFailedDelete(guid, initAccountId);
SendCharDelete(CHAR_DELETE_FAILED_GUILD_LEADER);
return;
}
// is arena team captain
if (sArenaTeamMgr->GetArenaTeamByCaptain(guid))
{
sScriptMgr->OnPlayerFailedDelete(guid, initAccountId);
SendCharDelete(CHAR_DELETE_FAILED_ARENA_CAPTAIN);
return;
}
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_DATA_BY_GUID);
stmt->setUInt32(0, guid.GetCounter());
if (PreparedQueryResult result = CharacterDatabase.Query(stmt))
{
Field* fields = result->Fetch();
accountId = fields[0].GetUInt32();
name = fields[1].GetString();
level = fields[2].GetUInt8();
}
// prevent deleting other players' characters using cheating tools
if (accountId != initAccountId)
{
sScriptMgr->OnPlayerFailedDelete(guid, initAccountId);
return;
}
TC_LOG_INFO("entities.player.character", "Account: %d, IP: %s deleted character: %s, %s, Level: %u", accountId, GetRemoteAddress().c_str(), name.c_str(), guid.ToString().c_str(), level);
// To prevent hook failure, place hook before removing reference from DB
sScriptMgr->OnPlayerDelete(guid, initAccountId); // To prevent race conditioning, but as it also makes sense, we hand the accountId over for successful delete.
// Shouldn't interfere with character deletion though
if (sLog->ShouldLog("entities.player.dump", LOG_LEVEL_INFO)) // optimize GetPlayerDump call
{
std::string dump;
if (PlayerDumpWriter().GetDump(guid.GetCounter(), dump))
sLog->outCharDump(dump.c_str(), accountId, guid.GetRawValue(), name.c_str());
}
sCalendarMgr->RemoveAllPlayerEventsAndInvites(guid);
Player::DeleteFromDB(guid, accountId);
SendCharDelete(CHAR_DELETE_SUCCESS);
}
void WorldSession::HandlePlayerLoginOpcode(WorldPacket& recvData)
{
if (PlayerLoading() || GetPlayer() != NULL)
{
TC_LOG_ERROR("network", "Player tries to login again, AccountId = %d", GetAccountId());
KickPlayer();
return;
}
m_playerLoading = true;
ObjectGuid playerGuid;
TC_LOG_DEBUG("network", "WORLD: Recvd Player Logon Message");
recvData >> playerGuid;
if (!IsLegitCharacterForAccount(playerGuid))
{
TC_LOG_ERROR("network", "Account (%u) can't login with that character (%s).", GetAccountId(), playerGuid.ToString().c_str());
KickPlayer();
return;
}
LoginQueryHolder *holder = new LoginQueryHolder(GetAccountId(), playerGuid);
if (!holder->Initialize())
{
delete holder; // delete all unprocessed queries
m_playerLoading = false;
return;
}
_charLoginCallback = CharacterDatabase.DelayQueryHolder(holder);
}
void WorldSession::HandlePlayerLogin(LoginQueryHolder* holder)
{
ObjectGuid playerGuid = holder->GetGuid();
Player* pCurrChar = new Player(this);
// for send server info and strings (config)
ChatHandler chH = ChatHandler(pCurrChar->GetSession());
// "GetAccountId() == db stored account id" checked in LoadFromDB (prevent login not own character using cheating tools)
if (!pCurrChar->LoadFromDB(playerGuid, holder))
{
SetPlayer(NULL);
KickPlayer(); // disconnect client, player no set to session and it will not deleted or saved at kick
delete pCurrChar; // delete it manually
delete holder; // delete all unprocessed queries
m_playerLoading = false;
return;
}
pCurrChar->GetMotionMaster()->Initialize();
pCurrChar->SendDungeonDifficulty(false);
WorldPacket data(SMSG_LOGIN_VERIFY_WORLD, 20);
data << pCurrChar->GetMapId();
data << pCurrChar->GetPositionX();
data << pCurrChar->GetPositionY();
data << pCurrChar->GetPositionZ();
data << pCurrChar->GetOrientation();
SendPacket(&data);
// load player specific part before send times
LoadAccountData(holder->GetPreparedResult(PLAYER_LOGIN_QUERY_LOAD_ACCOUNT_DATA), PER_CHARACTER_CACHE_MASK);
SendAccountDataTimes(PER_CHARACTER_CACHE_MASK);
data.Initialize(SMSG_FEATURE_SYSTEM_STATUS, 2); // added in 2.2.0
data << uint8(2); // unknown value
data << uint8(0); // enable(1)/disable(0) voice chat interface in client
SendPacket(&data);
// Send MOTD
{
data.Initialize(SMSG_MOTD, 50); // new in 2.0.1
data << (uint32)0;
uint32 linecount=0;
std::string str_motd = sWorld->GetMotd();
std::string::size_type pos, nextpos;
pos = 0;
while ((nextpos= str_motd.find('@', pos)) != std::string::npos)
{
if (nextpos != pos)
{
data << str_motd.substr(pos, nextpos-pos);
++linecount;
}
pos = nextpos+1;
}
if (pos<str_motd.length())
{
data << str_motd.substr(pos);
++linecount;
}
data.put(0, linecount);
SendPacket(&data);
TC_LOG_DEBUG("network", "WORLD: Sent motd (SMSG_MOTD)");
// send server info
if (sWorld->getIntConfig(CONFIG_ENABLE_SINFO_LOGIN) == 1)
chH.PSendSysMessage(_FULLVERSION);
TC_LOG_DEBUG("network", "WORLD: Sent server info");
}
//QueryResult* result = CharacterDatabase.PQuery("SELECT guildid, rank FROM guild_member WHERE guid = '%u'", pCurrChar->GetGUIDLow());
if (PreparedQueryResult resultGuild = holder->GetPreparedResult(PLAYER_LOGIN_QUERY_LOAD_GUILD))
{
Field* fields = resultGuild->Fetch();
pCurrChar->SetInGuild(fields[0].GetUInt32());
pCurrChar->SetRank(fields[1].GetUInt8());
}
else if (pCurrChar->GetGuildId()) // clear guild related fields in case wrong data about non existed membership
{
pCurrChar->SetInGuild(0);
pCurrChar->SetRank(0);
}
if (pCurrChar->GetGuildId() != 0)
{
if (Guild* guild = sGuildMgr->GetGuildById(pCurrChar->GetGuildId()))
guild->SendLoginInfo(this);
else
{
// remove wrong guild data
TC_LOG_ERROR("network", "Player %s (GUID: %u) marked as member of not existing guild (id: %u), removing guild membership for player.", pCurrChar->GetName().c_str(), pCurrChar->GetGUIDLow(), pCurrChar->GetGuildId());
pCurrChar->SetInGuild(0);
}
}
data.Initialize(SMSG_LEARNED_DANCE_MOVES, 4+4);
data << uint32(0);
data << uint32(0);
SendPacket(&data);
pCurrChar->SendInitialPacketsBeforeAddToMap();
//Show cinematic at the first time that player login
if (!pCurrChar->getCinematic())
{
pCurrChar->setCinematic(1);
if (ChrClassesEntry const* cEntry = sChrClassesStore.LookupEntry(pCurrChar->getClass()))
{
if (cEntry->CinematicSequence)
pCurrChar->SendCinematicStart(cEntry->CinematicSequence);
else if (ChrRacesEntry const* rEntry = sChrRacesStore.LookupEntry(pCurrChar->getRace()))
pCurrChar->SendCinematicStart(rEntry->CinematicSequence);
// send new char string if not empty
if (!sWorld->GetNewCharString().empty())
chH.PSendSysMessage("%s", sWorld->GetNewCharString().c_str());
}
}
if (!pCurrChar->GetMap()->AddPlayerToMap(pCurrChar) || !pCurrChar->CheckInstanceLoginValid())
{
AreaTrigger const* at = sObjectMgr->GetGoBackTrigger(pCurrChar->GetMapId());
if (at)
pCurrChar->TeleportTo(at->target_mapId, at->target_X, at->target_Y, at->target_Z, pCurrChar->GetOrientation());
else
pCurrChar->TeleportTo(pCurrChar->m_homebindMapId, pCurrChar->m_homebindX, pCurrChar->m_homebindY, pCurrChar->m_homebindZ, pCurrChar->GetOrientation());
}
sObjectAccessor->AddObject(pCurrChar);
//TC_LOG_DEBUG("Player %s added to Map.", pCurrChar->GetName().c_str());
pCurrChar->SendInitialPacketsAfterAddToMap();
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_ONLINE);
stmt->setUInt32(0, pCurrChar->GetGUIDLow());
CharacterDatabase.Execute(stmt);
stmt = LoginDatabase.GetPreparedStatement(LOGIN_UPD_ACCOUNT_ONLINE);
stmt->setUInt32(0, GetAccountId());
LoginDatabase.Execute(stmt);
pCurrChar->SetInGameTime(getMSTime());
// announce group about member online (must be after add to player list to receive announce to self)
if (Group* group = pCurrChar->GetGroup())
{
//pCurrChar->groupInfo.group->SendInit(this); // useless
group->SendUpdate();
group->ResetMaxEnchantingLevel();
}
// friend status
sSocialMgr->SendFriendStatus(pCurrChar, FRIEND_ONLINE, pCurrChar->GetGUIDLow(), true);
// Place character in world (and load zone) before some object loading
pCurrChar->LoadCorpse();
// setting Ghost+speed if dead
if (pCurrChar->m_deathState != ALIVE)
{
// not blizz like, we must correctly save and load player instead...
if (pCurrChar->getRace() == RACE_NIGHTELF)
pCurrChar->CastSpell(pCurrChar, 20584, true, nullptr);// auras SPELL_AURA_INCREASE_SPEED(+speed in wisp form), SPELL_AURA_INCREASE_SWIM_SPEED(+swim speed in wisp form), SPELL_AURA_TRANSFORM (to wisp form)
pCurrChar->CastSpell(pCurrChar, 8326, true, nullptr); // auras SPELL_AURA_GHOST, SPELL_AURA_INCREASE_SPEED(why?), SPELL_AURA_INCREASE_SWIM_SPEED(why?)
pCurrChar->SetMovement(MOVE_WATER_WALK);
}
pCurrChar->ContinueTaxiFlight();
// reset for all pets before pet loading
if (pCurrChar->HasAtLoginFlag(AT_LOGIN_RESET_PET_TALENTS))
Pet::resetTalentsForAllPetsOf(pCurrChar);
// Load pet if any (if player not alive and in taxi flight or another then pet will remember as temporary unsummoned)
pCurrChar->LoadPet();
// Set FFA PvP for non GM in non-rest mode
if (sWorld->IsFFAPvPRealm() && !pCurrChar->IsGameMaster() && !pCurrChar->HasFlag(PLAYER_FLAGS, PLAYER_FLAGS_RESTING))
pCurrChar->SetByteFlag(UNIT_FIELD_BYTES_2, 1, UNIT_BYTE2_FLAG_FFA_PVP);
if (pCurrChar->HasFlag(PLAYER_FLAGS, PLAYER_FLAGS_CONTESTED_PVP))
pCurrChar->SetContestedPvP();
// Apply at_login requests
if (pCurrChar->HasAtLoginFlag(AT_LOGIN_RESET_SPELLS))
{
pCurrChar->ResetSpells();
SendNotification(LANG_RESET_SPELLS);
}
if (pCurrChar->HasAtLoginFlag(AT_LOGIN_RESET_TALENTS))
{
pCurrChar->ResetTalents(true);
pCurrChar->SendTalentsInfoData(false); // original talents send already in to SendInitialPacketsBeforeAddToMap, resend reset state
SendNotification(LANG_RESET_TALENTS);
}
bool firstLogin = pCurrChar->HasAtLoginFlag(AT_LOGIN_FIRST);
if (firstLogin)
pCurrChar->RemoveAtLoginFlag(AT_LOGIN_FIRST);
// show time before shutdown if shutdown planned.
if (sWorld->IsShuttingDown())
sWorld->ShutdownMsg(true, pCurrChar);
if (sWorld->getBoolConfig(CONFIG_ALL_TAXI_PATHS))
pCurrChar->SetTaxiCheater(true);
if (pCurrChar->IsGameMaster())
SendNotification(LANG_GM_ON);
std::string IP_str = GetRemoteAddress();
TC_LOG_INFO("entities.player.character", "Account: %d (IP: %s) Login Character:[%s] (GUID: %u) Level: %d",
GetAccountId(), IP_str.c_str(), pCurrChar->GetName().c_str(), pCurrChar->GetGUIDLow(), pCurrChar->getLevel());
if (!pCurrChar->IsStandState() && !pCurrChar->HasUnitState(UNIT_STATE_STUNNED))
pCurrChar->SetStandState(UNIT_STAND_STATE_STAND);
m_playerLoading = false;
// Handle Login-Achievements (should be handled after loading)
_player->UpdateAchievementCriteria(ACHIEVEMENT_CRITERIA_TYPE_ON_LOGIN, 1);
sScriptMgr->OnPlayerLogin(pCurrChar, firstLogin);
delete holder;
}
void WorldSession::HandleSetFactionAtWar(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "WORLD: Received CMSG_SET_FACTION_ATWAR");
uint32 repListID;
uint8 flag;
recvData >> repListID;
recvData >> flag;
GetPlayer()->GetReputationMgr().SetAtWar(repListID, flag != 0);
}
//I think this function is never used :/ I dunno, but i guess this opcode not exists
void WorldSession::HandleSetFactionCheat(WorldPacket& /*recvData*/)
{
TC_LOG_ERROR("network", "WORLD SESSION: HandleSetFactionCheat, not expected call, please report.");
GetPlayer()->GetReputationMgr().SendStates();
}
void WorldSession::HandleTutorialFlag(WorldPacket& recvData)
{
uint32 data;
recvData >> data;
uint8 index = uint8(data / 32);
if (index >= MAX_ACCOUNT_TUTORIAL_VALUES)
return;
uint32 value = (data % 32);
uint32 flag = GetTutorialInt(index);
flag |= (1 << value);
SetTutorialInt(index, flag);
}
void WorldSession::HandleTutorialClear(WorldPacket& /*recvData*/)
{
for (uint8 i = 0; i < MAX_ACCOUNT_TUTORIAL_VALUES; ++i)
SetTutorialInt(i, 0xFFFFFFFF);
}
void WorldSession::HandleTutorialReset(WorldPacket& /*recvData*/)
{
for (uint8 i = 0; i < MAX_ACCOUNT_TUTORIAL_VALUES; ++i)
SetTutorialInt(i, 0x00000000);
}
void WorldSession::HandleSetWatchedFactionOpcode(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "WORLD: Received CMSG_SET_WATCHED_FACTION");
uint32 fact;
recvData >> fact;
GetPlayer()->SetUInt32Value(PLAYER_FIELD_WATCHED_FACTION_INDEX, fact);
}
void WorldSession::HandleSetFactionInactiveOpcode(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "WORLD: Received CMSG_SET_FACTION_INACTIVE");
uint32 replistid;
uint8 inactive;
recvData >> replistid >> inactive;
_player->GetReputationMgr().SetInactive(replistid, inactive != 0);
}
void WorldSession::HandleShowingHelmOpcode(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_SHOWING_HELM for %s", _player->GetName().c_str());
recvData.read_skip<uint8>(); // unknown, bool?
_player->ToggleFlag(PLAYER_FLAGS, PLAYER_FLAGS_HIDE_HELM);
}
void WorldSession::HandleShowingCloakOpcode(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_SHOWING_CLOAK for %s", _player->GetName().c_str());
recvData.read_skip<uint8>(); // unknown, bool?
_player->ToggleFlag(PLAYER_FLAGS, PLAYER_FLAGS_HIDE_CLOAK);
}
void WorldSession::HandleCharRenameOpcode(WorldPacket& recvData)
{
CharacterRenameInfo renameInfo;
recvData >> renameInfo.Guid
>> renameInfo.Name;
// prevent character rename to invalid name
if (!normalizePlayerName(renameInfo.Name))
{
SendCharRename(CHAR_NAME_NO_NAME, renameInfo);
return;
}
ResponseCodes res = ObjectMgr::CheckPlayerName(renameInfo.Name, true);
if (res != CHAR_NAME_SUCCESS)
{
SendCharRename(res, renameInfo);
return;
}
// check name limitations
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RESERVEDNAME) && sObjectMgr->IsReservedName(renameInfo.Name))
{
SendCharRename(CHAR_NAME_RESERVED, renameInfo);
return;
}
// Ensure that the character belongs to the current account, that rename at login is enabled
// and that there is no character with the desired new name
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_FREE_NAME);
stmt->setUInt32(0, renameInfo.Guid.GetCounter());
stmt->setUInt32(1, GetAccountId());
stmt->setUInt16(2, AT_LOGIN_RENAME);
stmt->setUInt16(3, AT_LOGIN_RENAME);
stmt->setString(4, renameInfo.Name);
delete _charRenameCallback.GetParam();
_charRenameCallback.SetParam(new CharacterRenameInfo(std::move(renameInfo)));
_charRenameCallback.SetFutureResult(CharacterDatabase.AsyncQuery(stmt));
}
void WorldSession::HandleChangePlayerNameOpcodeCallBack(PreparedQueryResult result, CharacterRenameInfo const* renameInfo)
{
if (!result)
{
SendCharRename(CHAR_CREATE_ERROR, *renameInfo);
return;
}
Field* fields = result->Fetch();
uint32 guidLow = fields[0].GetUInt32();
std::string oldName = fields[1].GetString();
// Update name and at_login flag in the db
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_NAME);
stmt->setString(0, renameInfo->Name);
stmt->setUInt16(1, AT_LOGIN_RENAME);
stmt->setUInt32(2, guidLow);
CharacterDatabase.Execute(stmt);
// Removed declined name from db
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_DECLINED_NAME);
stmt->setUInt32(0, guidLow);
CharacterDatabase.Execute(stmt);
TC_LOG_INFO("entities.player.character", "Account: %d (IP: %s) Character:[%s] (%s) Changed name to: %s", GetAccountId(), GetRemoteAddress().c_str(), oldName.c_str(), renameInfo->Guid.ToString().c_str(), renameInfo->Name.c_str());
SendCharRename(RESPONSE_SUCCESS, *renameInfo);
sWorld->UpdateCharacterNameData(renameInfo->Guid, renameInfo->Name);
}
void WorldSession::HandleSetPlayerDeclinedNames(WorldPacket& recvData)
{
ObjectGuid guid;
recvData >> guid;
// not accept declined names for unsupported languages
std::string name;
if (!sObjectMgr->GetPlayerNameByGUID(guid, name))
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
std::wstring wname;
if (!Utf8toWStr(name, wname))
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
if (!isCyrillicCharacter(wname[0])) // name already stored as only single alphabet using
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
std::string name2;
DeclinedName declinedname;
recvData >> name2;
if (name2 != name) // character have different name
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
for (int i = 0; i < MAX_DECLINED_NAME_CASES; ++i)
{
recvData >> declinedname.name[i];
if (!normalizePlayerName(declinedname.name[i]))
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
}
if (!ObjectMgr::CheckDeclinedNames(wname, declinedname))
{
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_ERROR, guid);
return;
}
for (int i = 0; i < MAX_DECLINED_NAME_CASES; ++i)
CharacterDatabase.EscapeString(declinedname.name[i]);
SQLTransaction trans = CharacterDatabase.BeginTransaction();
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_DECLINED_NAME);
stmt->setUInt32(0, guid.GetCounter());
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_INS_CHAR_DECLINED_NAME);
stmt->setUInt32(0, guid.GetCounter());
for (uint8 i = 0; i < 5; i++)
stmt->setString(i+1, declinedname.name[i]);
trans->Append(stmt);
CharacterDatabase.CommitTransaction(trans);
SendSetPlayerDeclinedNamesResult(DECLINED_NAMES_RESULT_SUCCESS, guid);
}
void WorldSession::HandleAlterAppearance(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_ALTER_APPEARANCE");
uint32 Hair, Color, FacialHair, SkinColor;
recvData >> Hair >> Color >> FacialHair >> SkinColor;
BarberShopStyleEntry const* bs_hair = sBarberShopStyleStore.LookupEntry(Hair);
if (!bs_hair || bs_hair->type != 0 || bs_hair->race != _player->getRace() || bs_hair->gender != _player->getGender())
return;
BarberShopStyleEntry const* bs_facialHair = sBarberShopStyleStore.LookupEntry(FacialHair);
if (!bs_facialHair || bs_facialHair->type != 2 || bs_facialHair->race != _player->getRace() || bs_facialHair->gender != _player->getGender())
return;
BarberShopStyleEntry const* bs_skinColor = sBarberShopStyleStore.LookupEntry(SkinColor);
if (bs_skinColor && (bs_skinColor->type != 3 || bs_skinColor->race != _player->getRace() || bs_skinColor->gender != _player->getGender()))
return;
if (!Player::ValidateAppearance(_player->getRace(), _player->getClass(), _player->getGender(), bs_hair->hair_id, Color, uint8(_player->GetUInt32Value(PLAYER_FLAGS) >> 8), bs_facialHair->hair_id, bs_skinColor ? bs_skinColor->hair_id : 0))
return;
GameObject* go = _player->FindNearestGameObjectOfType(GAMEOBJECT_TYPE_BARBER_CHAIR, 5.0f);
if (!go)
{
SendBarberShopResult(BARBER_SHOP_RESULT_NOT_ON_CHAIR);
return;
}
if (_player->getStandState() != UNIT_STAND_STATE_SIT_LOW_CHAIR + go->GetGOInfo()->barberChair.chairheight)
{
SendBarberShopResult(BARBER_SHOP_RESULT_NOT_ON_CHAIR);
return;
}
uint32 cost = _player->GetBarberShopCost(bs_hair->hair_id, Color, bs_facialHair->hair_id, bs_skinColor);
// 0 - ok
// 1, 3 - not enough money
// 2 - you have to seat on barber chair
if (!_player->HasEnoughMoney(cost))
{
SendBarberShopResult(BARBER_SHOP_RESULT_NO_MONEY);
return;
}
SendBarberShopResult(BARBER_SHOP_RESULT_SUCCESS);
_player->ModifyMoney(-int32(cost)); // it isn't free
_player->UpdateAchievementCriteria(ACHIEVEMENT_CRITERIA_TYPE_GOLD_SPENT_AT_BARBER, cost);
_player->SetByteValue(PLAYER_BYTES, 2, uint8(bs_hair->hair_id));
_player->SetByteValue(PLAYER_BYTES, 3, uint8(Color));
_player->SetByteValue(PLAYER_BYTES_2, 0, uint8(bs_facialHair->hair_id));
if (bs_skinColor)
_player->SetByteValue(PLAYER_BYTES, 0, uint8(bs_skinColor->hair_id));
_player->UpdateAchievementCriteria(ACHIEVEMENT_CRITERIA_TYPE_VISIT_BARBER_SHOP, 1);
_player->SetStandState(0); // stand up
}
void WorldSession::HandleRemoveGlyph(WorldPacket& recvData)
{
uint32 slot;
recvData >> slot;
if (slot >= MAX_GLYPH_SLOT_INDEX)
{
TC_LOG_DEBUG("network", "Client sent wrong glyph slot number in opcode CMSG_REMOVE_GLYPH %u", slot);
return;
}
if (uint32 glyph = _player->GetGlyph(slot))
{
if (GlyphPropertiesEntry const* gp = sGlyphPropertiesStore.LookupEntry(glyph))
{
_player->RemoveAurasDueToSpell(gp->SpellId);
_player->SetGlyph(slot, 0);
_player->SendTalentsInfoData(false);
}
}
}
void WorldSession::HandleCharCustomize(WorldPacket& recvData)
{
CharacterCustomizeInfo customizeInfo;
recvData >> customizeInfo.Guid;
if (!IsLegitCharacterForAccount(customizeInfo.Guid))
{
TC_LOG_ERROR("network", "Account %u, IP: %s tried to customise %s, but it does not belong to their account!",
GetAccountId(), GetRemoteAddress().c_str(), customizeInfo.Guid.ToString().c_str());
recvData.rfinish();
KickPlayer();
return;
}
recvData >> customizeInfo.Name
>> customizeInfo.Gender
>> customizeInfo.Skin
>> customizeInfo.HairColor
>> customizeInfo.HairStyle
>> customizeInfo.FacialHair
>> customizeInfo.Face;
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_NAME_DATA);
stmt->setUInt32(0, customizeInfo.Guid.GetCounter());
PreparedQueryResult result = CharacterDatabase.Query(stmt);
if (!result)
{
SendCharCustomize(CHAR_CREATE_ERROR, customizeInfo);
return;
}
Field* fields = result->Fetch();
uint8 plrRace = fields[0].GetUInt8();
uint8 plrClass = fields[1].GetUInt8();
uint8 plrGender = fields[2].GetUInt8();
if (!Player::ValidateAppearance(plrRace, plrClass, plrGender, customizeInfo.HairStyle, customizeInfo.HairColor, customizeInfo.Face, customizeInfo.FacialHair, customizeInfo.Skin, true))
{
SendCharCustomize(CHAR_CREATE_ERROR, customizeInfo);
return;
}
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_AT_LOGIN);
stmt->setUInt32(0, customizeInfo.Guid.GetCounter());
// TODO: Make async with callback
result = CharacterDatabase.Query(stmt);
if (!result)
{
SendCharCustomize(CHAR_CREATE_ERROR, customizeInfo);
return;
}
fields = result->Fetch();<|fim▁hole|> uint32 at_loginFlags = fields[0].GetUInt16();
if (!(at_loginFlags & AT_LOGIN_CUSTOMIZE))
{
SendCharCustomize(CHAR_CREATE_ERROR, customizeInfo);
return;
}
// prevent character rename to invalid name
if (!normalizePlayerName(customizeInfo.Name))
{
SendCharCustomize(CHAR_NAME_NO_NAME, customizeInfo);
return;
}
ResponseCodes res = ObjectMgr::CheckPlayerName(customizeInfo.Name, true);
if (res != CHAR_NAME_SUCCESS)
{
SendCharCustomize(res, customizeInfo);
return;
}
// check name limitations
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RESERVEDNAME) && sObjectMgr->IsReservedName(customizeInfo.Name))
{
SendCharCustomize(CHAR_NAME_RESERVED, customizeInfo);
return;
}
// character with this name already exist
if (ObjectGuid newGuid = sObjectMgr->GetPlayerGUIDByName(customizeInfo.Name))
{
if (newGuid != customizeInfo.Guid)
{
SendCharCustomize(CHAR_CREATE_NAME_IN_USE, customizeInfo);
return;
}
}
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHARACTER_NAME);
stmt->setUInt32(0, customizeInfo.Guid.GetCounter());
result = CharacterDatabase.Query(stmt);
if (result)
{
std::string oldname = result->Fetch()[0].GetString();
TC_LOG_INFO("entities.player.character", "Account: %d (IP: %s), Character[%s] (%s) Customized to: %s",
GetAccountId(), GetRemoteAddress().c_str(), oldname.c_str(), customizeInfo.Guid.ToString().c_str(), customizeInfo.Name.c_str());
}
SQLTransaction trans = CharacterDatabase.BeginTransaction();
Player::Customize(&customizeInfo, trans);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_NAME_AT_LOGIN);
stmt->setString(0, customizeInfo.Name);
stmt->setUInt16(1, uint16(AT_LOGIN_CUSTOMIZE));
stmt->setUInt32(2, customizeInfo.Guid.GetCounter());
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_DECLINED_NAME);
stmt->setUInt32(0, customizeInfo.Guid.GetCounter());
trans->Append(stmt);
CharacterDatabase.CommitTransaction(trans);
sWorld->UpdateCharacterNameData(customizeInfo.Guid, customizeInfo.Name, customizeInfo.Gender);
SendCharCustomize(RESPONSE_SUCCESS, customizeInfo);
}
void WorldSession::HandleEquipmentSetSave(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_EQUIPMENT_SET_SAVE");
uint64 setGuid;
recvData.readPackGUID(setGuid);
uint32 index;
recvData >> index;
if (index >= MAX_EQUIPMENT_SET_INDEX) // client set slots amount
return;
std::string name;
recvData >> name;
std::string iconName;
recvData >> iconName;
EquipmentSet eqSet;
eqSet.Guid = setGuid;
eqSet.Name = name;
eqSet.IconName = iconName;
eqSet.state = EQUIPMENT_SET_NEW;
for (uint32 i = 0; i < EQUIPMENT_SLOT_END; ++i)
{
ObjectGuid itemGuid;
recvData >> itemGuid.ReadAsPacked();
// equipment manager sends "1" (as raw GUID) for slots set to "ignore" (don't touch slot at equip set)
if (itemGuid.GetRawValue() == 1)
{
// ignored slots saved as bit mask because we have no free special values for Items[i]
eqSet.IgnoreMask |= 1 << i;
continue;
}
Item* item = _player->GetItemByPos(INVENTORY_SLOT_BAG_0, i);
if (!item && itemGuid) // cheating check 1
return;
if (item && item->GetGUID() != itemGuid) // cheating check 2
return;
eqSet.Items[i] = itemGuid.GetCounter();
}
_player->SetEquipmentSet(index, eqSet);
}
void WorldSession::HandleEquipmentSetDelete(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_EQUIPMENT_SET_DELETE");
uint64 setGuid;
recvData.readPackGUID(setGuid);
_player->DeleteEquipmentSet(setGuid);
}
void WorldSession::HandleEquipmentSetUse(WorldPacket& recvData)
{
TC_LOG_DEBUG("network", "CMSG_EQUIPMENT_SET_USE");
for (uint32 i = 0; i < EQUIPMENT_SLOT_END; ++i)
{
ObjectGuid itemGuid;
recvData >> itemGuid.ReadAsPacked();
uint8 srcbag, srcslot;
recvData >> srcbag >> srcslot;
TC_LOG_DEBUG("entities.player.items", "%s: srcbag %u, srcslot %u", itemGuid.ToString().c_str(), srcbag, srcslot);
// check if item slot is set to "ignored" (raw value == 1), must not be unequipped then
if (itemGuid.GetRawValue() == 1)
continue;
// Only equip weapons in combat
if (_player->IsInCombat() && i != EQUIPMENT_SLOT_MAINHAND && i != EQUIPMENT_SLOT_OFFHAND && i != EQUIPMENT_SLOT_RANGED)
continue;
Item* item = _player->GetItemByGuid(itemGuid);
uint16 dstpos = i | (INVENTORY_SLOT_BAG_0 << 8);
if (!item)
{
Item* uItem = _player->GetItemByPos(INVENTORY_SLOT_BAG_0, i);
if (!uItem)
continue;
ItemPosCountVec sDest;
InventoryResult msg = _player->CanStoreItem(NULL_BAG, NULL_SLOT, sDest, uItem, false);
if (msg == EQUIP_ERR_OK)
{
_player->RemoveItem(INVENTORY_SLOT_BAG_0, i, true);
_player->StoreItem(sDest, uItem, true);
}
else
_player->SendEquipError(msg, uItem, NULL);
continue;
}
if (item->GetPos() == dstpos)
continue;
_player->SwapItem(item->GetPos(), dstpos);
}
WorldPacket data(SMSG_EQUIPMENT_SET_USE_RESULT, 1);
data << uint8(0); // 4 - equipment swap failed - inventory is full
SendPacket(&data);
}
void WorldSession::HandleCharFactionOrRaceChange(WorldPacket& recvData)
{
CharacterFactionChangeInfo factionChangeInfo;
recvData >> factionChangeInfo.Guid;
if (!IsLegitCharacterForAccount(factionChangeInfo.Guid))
{
TC_LOG_ERROR("network", "Account %u, IP: %s tried to factionchange character %s, but it does not belong to their account!",
GetAccountId(), GetRemoteAddress().c_str(), factionChangeInfo.Guid.ToString().c_str());
recvData.rfinish();
KickPlayer();
return;
}
recvData >> factionChangeInfo.Name
>> factionChangeInfo.Gender
>> factionChangeInfo.Skin
>> factionChangeInfo.HairColor
>> factionChangeInfo.HairStyle
>> factionChangeInfo.FacialHair
>> factionChangeInfo.Face
>> factionChangeInfo.Race;
uint32 lowGuid = factionChangeInfo.Guid.GetCounter();
// get the players old (at this moment current) race
CharacterNameData const* nameData = sWorld->GetCharacterNameData(factionChangeInfo.Guid);
if (!nameData)
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
uint8 oldRace = nameData->m_race;
uint8 playerClass = nameData->m_class;
uint8 level = nameData->m_level;
// TO Do: Make async
PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHAR_AT_LOGIN_TITLES);
stmt->setUInt32(0, lowGuid);
PreparedQueryResult result = CharacterDatabase.Query(stmt);
if (!result)
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
Field* fields = result->Fetch();
uint32 at_loginFlags = fields[0].GetUInt16();
std::string knownTitlesStr = fields[1].GetString();
uint32 used_loginFlag = ((recvData.GetOpcode() == CMSG_CHAR_RACE_CHANGE) ? AT_LOGIN_CHANGE_RACE : AT_LOGIN_CHANGE_FACTION);
if (!sObjectMgr->GetPlayerInfo(factionChangeInfo.Race, playerClass))
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
if (!(at_loginFlags & used_loginFlag))
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RACEMASK))
{
uint32 raceMaskDisabled = sWorld->getIntConfig(CONFIG_CHARACTER_CREATING_DISABLED_RACEMASK);
if ((1 << (factionChangeInfo.Race - 1)) & raceMaskDisabled)
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
}
// prevent character rename to invalid name
if (!normalizePlayerName(factionChangeInfo.Name))
{
SendCharFactionChange(CHAR_NAME_NO_NAME, factionChangeInfo);
return;
}
ResponseCodes res = ObjectMgr::CheckPlayerName(factionChangeInfo.Name, true);
if (res != CHAR_NAME_SUCCESS)
{
SendCharFactionChange(res, factionChangeInfo);
return;
}
// check name limitations
if (!HasPermission(rbac::RBAC_PERM_SKIP_CHECK_CHARACTER_CREATION_RESERVEDNAME) && sObjectMgr->IsReservedName(factionChangeInfo.Name))
{
SendCharFactionChange(CHAR_NAME_RESERVED, factionChangeInfo);
return;
}
// character with this name already exist
if (ObjectGuid newGuid = sObjectMgr->GetPlayerGUIDByName(factionChangeInfo.Name))
{
if (newGuid != factionChangeInfo.Guid)
{
SendCharFactionChange(CHAR_CREATE_NAME_IN_USE, factionChangeInfo);
return;
}
}
// resurrect the character in case he's dead
sObjectAccessor->ConvertCorpseForPlayer(factionChangeInfo.Guid);
SQLTransaction trans = CharacterDatabase.BeginTransaction();
CharacterDatabase.EscapeString(factionChangeInfo.Name);
Player::Customize(&factionChangeInfo, trans);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_FACTION_OR_RACE);
stmt->setString(0, factionChangeInfo.Name);
stmt->setUInt8(1, factionChangeInfo.Race);
stmt->setUInt16(2, used_loginFlag);
stmt->setUInt32(3, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_DECLINED_NAME);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
sWorld->UpdateCharacterNameData(factionChangeInfo.Guid, factionChangeInfo.Name, factionChangeInfo.Gender, factionChangeInfo.Race);
if (oldRace != factionChangeInfo.Race)
{
TeamId team = TEAM_ALLIANCE;
// Search each faction is targeted
switch (factionChangeInfo.Race)
{
case RACE_ORC:
case RACE_TAUREN:
case RACE_UNDEAD_PLAYER:
case RACE_TROLL:
case RACE_BLOODELF:
team = TEAM_HORDE;
break;
default:
break;
}
// Switch Languages
// delete all languages first
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_SKILL_LANGUAGES);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
// Now add them back
stmt = CharacterDatabase.GetPreparedStatement(CHAR_INS_CHAR_SKILL_LANGUAGE);
stmt->setUInt32(0, lowGuid);
// Faction specific languages
if (team == TEAM_HORDE)
stmt->setUInt16(1, 109);
else
stmt->setUInt16(1, 98);
trans->Append(stmt);
// Race specific languages
if (factionChangeInfo.Race != RACE_ORC && factionChangeInfo.Race != RACE_HUMAN)
{
stmt = CharacterDatabase.GetPreparedStatement(CHAR_INS_CHAR_SKILL_LANGUAGE);
stmt->setUInt32(0, lowGuid);
switch (factionChangeInfo.Race)
{
case RACE_DWARF:
stmt->setUInt16(1, 111);
break;
case RACE_DRAENEI:
stmt->setUInt16(1, 759);
break;
case RACE_GNOME:
stmt->setUInt16(1, 313);
break;
case RACE_NIGHTELF:
stmt->setUInt16(1, 113);
break;
case RACE_UNDEAD_PLAYER:
stmt->setUInt16(1, 673);
break;
case RACE_TAUREN:
stmt->setUInt16(1, 115);
break;
case RACE_TROLL:
stmt->setUInt16(1, 315);
break;
case RACE_BLOODELF:
stmt->setUInt16(1, 137);
break;
}
trans->Append(stmt);
}
if (recvData.GetOpcode() == CMSG_CHAR_FACTION_CHANGE)
{
// Delete all Flypaths
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_TAXI_PATH);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
if (level > 7)
{
// Update Taxi path
// this doesn't seem to be 100% blizzlike... but it can't really be helped.
std::ostringstream taximaskstream;
uint32 numFullTaximasks = level / 7;
if (numFullTaximasks > 11)
numFullTaximasks = 11;
if (team == TEAM_ALLIANCE)
{
if (playerClass != CLASS_DEATH_KNIGHT)
{
for (uint8 i = 0; i < numFullTaximasks; ++i)
taximaskstream << uint32(sAllianceTaxiNodesMask[i]) << ' ';
}
else
{
for (uint8 i = 0; i < numFullTaximasks; ++i)
taximaskstream << uint32(sAllianceTaxiNodesMask[i] | sDeathKnightTaxiNodesMask[i]) << ' ';
}
}
else
{
if (playerClass != CLASS_DEATH_KNIGHT)
{
for (uint8 i = 0; i < numFullTaximasks; ++i)
taximaskstream << uint32(sHordeTaxiNodesMask[i]) << ' ';
}
else
{
for (uint8 i = 0; i < numFullTaximasks; ++i)
taximaskstream << uint32(sHordeTaxiNodesMask[i] | sDeathKnightTaxiNodesMask[i]) << ' ';
}
}
uint32 numEmptyTaximasks = 11 - numFullTaximasks;
for (uint8 i = 0; i < numEmptyTaximasks; ++i)
taximaskstream << "0 ";
taximaskstream << '0';
std::string taximask = taximaskstream.str();
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_TAXIMASK);
stmt->setString(0, taximask);
stmt->setUInt32(1, lowGuid);
trans->Append(stmt);
}
if (!sWorld->getBoolConfig(CONFIG_ALLOW_TWO_SIDE_INTERACTION_GUILD))
{
// Reset guild
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_GUILD_MEMBER);
stmt->setUInt32(0, lowGuid);
PreparedQueryResult result = CharacterDatabase.Query(stmt);
if (result)
if (Guild* guild = sGuildMgr->GetGuildById((result->Fetch()[0]).GetUInt32()))
guild->DeleteMember(factionChangeInfo.Guid, false, false, true);
Player::LeaveAllArenaTeams(factionChangeInfo.Guid);
}
if (!HasPermission(rbac::RBAC_PERM_TWO_SIDE_ADD_FRIEND))
{
// Delete Friend List
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_SOCIAL_BY_GUID);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_SOCIAL_BY_FRIEND);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
}
// Reset homebind and position
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_PLAYER_HOMEBIND);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_INS_PLAYER_HOMEBIND);
stmt->setUInt32(0, lowGuid);
WorldLocation loc;
uint16 zoneId = 0;
if (team == TEAM_ALLIANCE)
{
loc.WorldRelocate(0, -8867.68f, 673.373f, 97.9034f, 0.0f);
zoneId = 1519;
}
else
{
loc.WorldRelocate(1, 1633.33f, -4439.11f, 15.7588f, 0.0f);
zoneId = 1637;
}
stmt->setUInt16(1, loc.GetMapId());
stmt->setUInt16(2, zoneId);
stmt->setFloat(3, loc.GetPositionX());
stmt->setFloat(4, loc.GetPositionY());
stmt->setFloat(5, loc.GetPositionZ());
trans->Append(stmt);
Player::SavePositionInDB(loc, zoneId, factionChangeInfo.Guid, trans);
// Achievement conversion
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeAchievements.begin(); it != sObjectMgr->FactionChangeAchievements.end(); ++it)
{
uint32 achiev_alliance = it->first;
uint32 achiev_horde = it->second;
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_ACHIEVEMENT_BY_ACHIEVEMENT);
stmt->setUInt16(0, uint16(team == TEAM_ALLIANCE ? achiev_alliance : achiev_horde));
stmt->setUInt32(1, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_ACHIEVEMENT);
stmt->setUInt16(0, uint16(team == TEAM_ALLIANCE ? achiev_alliance : achiev_horde));
stmt->setUInt16(1, uint16(team == TEAM_ALLIANCE ? achiev_horde : achiev_alliance));
stmt->setUInt32(2, lowGuid);
trans->Append(stmt);
}
// Item conversion
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeItems.begin(); it != sObjectMgr->FactionChangeItems.end(); ++it)
{
uint32 item_alliance = it->first;
uint32 item_horde = it->second;
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_INVENTORY_FACTION_CHANGE);
stmt->setUInt32(0, (team == TEAM_ALLIANCE ? item_alliance : item_horde));
stmt->setUInt32(1, (team == TEAM_ALLIANCE ? item_horde : item_alliance));
stmt->setUInt32(2, lowGuid);
trans->Append(stmt);
}
// Delete all current quests
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_QUESTSTATUS);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
// Quest conversion
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeQuests.begin(); it != sObjectMgr->FactionChangeQuests.end(); ++it)
{
uint32 quest_alliance = it->first;
uint32 quest_horde = it->second;
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_QUESTSTATUS_REWARDED_BY_QUEST);
stmt->setUInt32(0, lowGuid);
stmt->setUInt32(1, (team == TEAM_ALLIANCE ? quest_alliance : quest_horde));
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_QUESTSTATUS_REWARDED_FACTION_CHANGE);
stmt->setUInt32(0, (team == TEAM_ALLIANCE ? quest_alliance : quest_horde));
stmt->setUInt32(1, (team == TEAM_ALLIANCE ? quest_horde : quest_alliance));
stmt->setUInt32(2, lowGuid);
trans->Append(stmt);
}
// Mark all rewarded quests as "active" (will count for completed quests achievements)
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_QUESTSTATUS_REWARDED_ACTIVE);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
// Disable all old-faction specific quests
{
ObjectMgr::QuestMap const& questTemplates = sObjectMgr->GetQuestTemplates();
for (ObjectMgr::QuestMap::const_iterator iter = questTemplates.begin(); iter != questTemplates.end(); ++iter)
{
Quest const* quest = iter->second;
uint32 newRaceMask = (team == TEAM_ALLIANCE) ? RACEMASK_ALLIANCE : RACEMASK_HORDE;
if (!(quest->GetRequiredRaces() & newRaceMask))
{
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_QUESTSTATUS_REWARDED_ACTIVE_BY_QUEST);
stmt->setUInt32(0, lowGuid);
stmt->setUInt32(1, quest->GetQuestId());
trans->Append(stmt);
}
}
}
// Spell conversion
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeSpells.begin(); it != sObjectMgr->FactionChangeSpells.end(); ++it)
{
uint32 spell_alliance = it->first;
uint32 spell_horde = it->second;
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_SPELL_BY_SPELL);
stmt->setUInt32(0, (team == TEAM_ALLIANCE ? spell_alliance : spell_horde));
stmt->setUInt32(1, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_SPELL_FACTION_CHANGE);
stmt->setUInt32(0, (team == TEAM_ALLIANCE ? spell_alliance : spell_horde));
stmt->setUInt32(1, (team == TEAM_ALLIANCE ? spell_horde : spell_alliance));
stmt->setUInt32(2, lowGuid);
trans->Append(stmt);
}
// Reputation conversion
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeReputation.begin(); it != sObjectMgr->FactionChangeReputation.end(); ++it)
{
uint32 reputation_alliance = it->first;
uint32 reputation_horde = it->second;
uint32 newReputation = (team == TEAM_ALLIANCE) ? reputation_alliance : reputation_horde;
uint32 oldReputation = (team == TEAM_ALLIANCE) ? reputation_horde : reputation_alliance;
// select old standing set in db
stmt = CharacterDatabase.GetPreparedStatement(CHAR_SEL_CHAR_REP_BY_FACTION);
stmt->setUInt32(0, oldReputation);
stmt->setUInt32(1, lowGuid);
if (PreparedQueryResult result = CharacterDatabase.Query(stmt))
{
Field* fields = result->Fetch();
int32 oldDBRep = fields[0].GetInt32();
FactionEntry const* factionEntry = sFactionStore.LookupEntry(oldReputation);
// old base reputation
int32 oldBaseRep = sObjectMgr->GetBaseReputationOf(factionEntry, oldRace, playerClass);
// new base reputation
int32 newBaseRep = sObjectMgr->GetBaseReputationOf(sFactionStore.LookupEntry(newReputation), factionChangeInfo.Race, playerClass);
// final reputation shouldnt change
int32 FinalRep = oldDBRep + oldBaseRep;
int32 newDBRep = FinalRep - newBaseRep;
stmt = CharacterDatabase.GetPreparedStatement(CHAR_DEL_CHAR_REP_BY_FACTION);
stmt->setUInt32(0, newReputation);
stmt->setUInt32(1, lowGuid);
trans->Append(stmt);
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_REP_FACTION_CHANGE);
stmt->setUInt16(0, uint16(newReputation));
stmt->setInt32(1, newDBRep);
stmt->setUInt16(2, uint16(oldReputation));
stmt->setUInt32(3, lowGuid);
trans->Append(stmt);
}
}
// Title conversion
if (!knownTitlesStr.empty())
{
const uint32 ktcount = KNOWN_TITLES_SIZE * 2;
uint32 knownTitles[ktcount];
Tokenizer tokens(knownTitlesStr, ' ', ktcount);
if (tokens.size() != ktcount)
{
SendCharFactionChange(CHAR_CREATE_ERROR, factionChangeInfo);
return;
}
for (uint32 index = 0; index < ktcount; ++index)
knownTitles[index] = atoul(tokens[index]);
for (std::map<uint32, uint32>::const_iterator it = sObjectMgr->FactionChangeTitles.begin(); it != sObjectMgr->FactionChangeTitles.end(); ++it)
{
uint32 title_alliance = it->first;
uint32 title_horde = it->second;
CharTitlesEntry const* atitleInfo = sCharTitlesStore.LookupEntry(title_alliance);
CharTitlesEntry const* htitleInfo = sCharTitlesStore.LookupEntry(title_horde);
// new team
if (team == TEAM_ALLIANCE)
{
uint32 bitIndex = htitleInfo->bit_index;
uint32 index = bitIndex / 32;
uint32 old_flag = 1 << (bitIndex % 32);
uint32 new_flag = 1 << (atitleInfo->bit_index % 32);
if (knownTitles[index] & old_flag)
{
knownTitles[index] &= ~old_flag;
// use index of the new title
knownTitles[atitleInfo->bit_index / 32] |= new_flag;
}
}
else
{
uint32 bitIndex = atitleInfo->bit_index;
uint32 index = bitIndex / 32;
uint32 old_flag = 1 << (bitIndex % 32);
uint32 new_flag = 1 << (htitleInfo->bit_index % 32);
if (knownTitles[index] & old_flag)
{
knownTitles[index] &= ~old_flag;
// use index of the new title
knownTitles[htitleInfo->bit_index / 32] |= new_flag;
}
}
std::ostringstream ss;
for (uint32 index = 0; index < ktcount; ++index)
ss << knownTitles[index] << ' ';
stmt = CharacterDatabase.GetPreparedStatement(CHAR_UPD_CHAR_TITLES_FACTION_CHANGE);
stmt->setString(0, ss.str().c_str());
stmt->setUInt32(1, lowGuid);
trans->Append(stmt);
// unset any currently chosen title
stmt = CharacterDatabase.GetPreparedStatement(CHAR_RES_CHAR_TITLES_FACTION_CHANGE);
stmt->setUInt32(0, lowGuid);
trans->Append(stmt);
}
}
}
}
CharacterDatabase.CommitTransaction(trans);
TC_LOG_DEBUG("entities.player", "%s (IP: %s) changed race from %u to %u", GetPlayerInfo().c_str(), GetRemoteAddress().c_str(), oldRace, factionChangeInfo.Race);
SendCharFactionChange(RESPONSE_SUCCESS, factionChangeInfo);
}
void WorldSession::SendCharCreate(ResponseCodes result)
{
WorldPacket data(SMSG_CHAR_CREATE, 1);
data << uint8(result);
SendPacket(&data);
}
void WorldSession::SendCharDelete(ResponseCodes result)
{
WorldPacket data(SMSG_CHAR_DELETE, 1);
data << uint8(result);
SendPacket(&data);
}
void WorldSession::SendCharRename(ResponseCodes result, CharacterRenameInfo const& renameInfo)
{
WorldPacket data(SMSG_CHAR_RENAME, 1 + 8 + renameInfo.Name.size() + 1);
data << uint8(result);
if (result == RESPONSE_SUCCESS)
{
data << renameInfo.Guid;
data << renameInfo.Name;
}
SendPacket(&data);
}
void WorldSession::SendCharCustomize(ResponseCodes result, CharacterCustomizeInfo const& customizeInfo)
{
WorldPacket data(SMSG_CHAR_CUSTOMIZE, 1 + 8 + customizeInfo.Name.size() + 1 + 6);
data << uint8(result);
if (result == RESPONSE_SUCCESS)
{
data << customizeInfo.Guid;
data << customizeInfo.Name;
data << uint8(customizeInfo.Gender);
data << uint8(customizeInfo.Skin);
data << uint8(customizeInfo.Face);
data << uint8(customizeInfo.HairStyle);
data << uint8(customizeInfo.HairColor);
data << uint8(customizeInfo.FacialHair);
}
SendPacket(&data);
}
void WorldSession::SendCharFactionChange(ResponseCodes result, CharacterFactionChangeInfo const& factionChangeInfo)
{
WorldPacket data(SMSG_CHAR_FACTION_CHANGE, 1 + 8 + factionChangeInfo.Name.size() + 1 + 7);
data << uint8(result);
if (result == RESPONSE_SUCCESS)
{
data << factionChangeInfo.Guid;
data << factionChangeInfo.Name;
data << uint8(factionChangeInfo.Gender);
data << uint8(factionChangeInfo.Skin);
data << uint8(factionChangeInfo.Face);
data << uint8(factionChangeInfo.HairStyle);
data << uint8(factionChangeInfo.HairColor);
data << uint8(factionChangeInfo.FacialHair);
data << uint8(factionChangeInfo.Race);
}
SendPacket(&data);
}
void WorldSession::SendSetPlayerDeclinedNamesResult(DeclinedNameResult result, ObjectGuid guid)
{
WorldPacket data(SMSG_SET_PLAYER_DECLINED_NAMES_RESULT, 4 + 8);
data << uint32(result);
data << guid;
SendPacket(&data);
}
void WorldSession::SendBarberShopResult(BarberShopResult result)
{
WorldPacket data(SMSG_BARBER_SHOP_RESULT, 4);
data << uint32(result);
SendPacket(&data);
}<|fim▁end|> | |
<|file_name|>0020_modify_citeria.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_criteria(apps, schema_editor):
Criterion = apps.get_model('activities', 'Criterion')<|fim▁hole|> ar_name="التعاون مع الرئاسة",
code_name="presidency",
instructions="",
category='P')
Criterion.objects.create(year=year_2015_2016,
ar_name="رفع مبكر",
code_name="early_submission",
instructions="",
category='P')
Criterion.objects.create(year=year_2015_2016,
ar_name="تأجيل",
code_name="postponed",
instructions="",
category='P')
Criterion.objects.get(code_name='time', category='P').delete()
def remove_criteria(apps, schema_editor):
Criterion = apps.get_model('activities', 'Criterion')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
Criterion.objects.filter(code_name__in=["presidency",
"early_submission", "postponed"]).delete()
class Migration(migrations.Migration):
dependencies = [
('activities', '0019_remove_alahsa_criteria'),
]
operations = [
migrations.RunPython(
add_criteria,
reverse_code=remove_criteria),
]<|fim▁end|> | StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
Criterion.objects.create(year=year_2015_2016, |
<|file_name|>reshape.py<|end_file_name|><|fim▁begin|># pylint: disable=E1101,E1103
# pylint: disable=W0703,W0622,W0613,W0201
from pandas.compat import range, zip
from pandas import compat
import itertools
import numpy as np
from pandas.core.series import Series
from pandas.core.frame import DataFrame
from pandas.core.sparse import SparseDataFrame, SparseSeries
from pandas.sparse.array import SparseArray
from pandas._sparse import IntIndex
from pandas.core.categorical import Categorical
from pandas.core.common import notnull, _ensure_platform_int, _maybe_promote
from pandas.core.groupby import get_group_index, _compress_group_index
import pandas.core.common as com
import pandas.algos as algos
from pandas.core.index import MultiIndex, _get_na_value
class _Unstacker(object):
"""
Helper class to unstack data / pivot with multi-level index
Parameters
----------
level : int or str, default last level
Level to "unstack". Accepts a name for the level.
Examples
--------
>>> import pandas as pd
>>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'),
... ('two', 'a'), ('two', 'b')])
>>> s = pd.Series(np.arange(1.0, 5.0), index=index)
>>> s
one a 1
b 2
two a 3
b 4
dtype: float64
>>> s.unstack(level=-1)
a b
one 1 2
two 3 4
>>> s.unstack(level=0)
one two
a 1 2
b 3 4
Returns
-------
unstacked : DataFrame
"""
def __init__(self, values, index, level=-1, value_columns=None):
self.is_categorical = None
if values.ndim == 1:
if isinstance(values, Categorical):
self.is_categorical = values
values = np.array(values)
values = values[:, np.newaxis]<|fim▁hole|> if value_columns is None and values.shape[1] != 1: # pragma: no cover
raise ValueError('must pass column labels for multi-column data')
self.index = index
if isinstance(self.index, MultiIndex):
if index._reference_duplicate_name(level):
msg = ("Ambiguous reference to {0}. The index "
"names are not unique.".format(level))
raise ValueError(msg)
self.level = self.index._get_level_number(level)
# when index includes `nan`, need to lift levels/strides by 1
self.lift = 1 if -1 in self.index.labels[self.level] else 0
self.new_index_levels = list(index.levels)
self.new_index_names = list(index.names)
self.removed_name = self.new_index_names.pop(self.level)
self.removed_level = self.new_index_levels.pop(self.level)
self._make_sorted_values_labels()
self._make_selectors()
def _make_sorted_values_labels(self):
v = self.level
labs = list(self.index.labels)
levs = list(self.index.levels)
to_sort = labs[:v] + labs[v + 1:] + [labs[v]]
sizes = [len(x) for x in levs[:v] + levs[v + 1:] + [levs[v]]]
comp_index, obs_ids = get_compressed_ids(to_sort, sizes)
ngroups = len(obs_ids)
indexer = algos.groupsort_indexer(comp_index, ngroups)[0]
indexer = _ensure_platform_int(indexer)
self.sorted_values = com.take_nd(self.values, indexer, axis=0)
self.sorted_labels = [l.take(indexer) for l in to_sort]
def _make_selectors(self):
new_levels = self.new_index_levels
# make the mask
remaining_labels = self.sorted_labels[:-1]
level_sizes = [len(x) for x in new_levels]
comp_index, obs_ids = get_compressed_ids(remaining_labels, level_sizes)
ngroups = len(obs_ids)
comp_index = _ensure_platform_int(comp_index)
stride = self.index.levshape[self.level] + self.lift
self.full_shape = ngroups, stride
selector = self.sorted_labels[-1] + stride * comp_index + self.lift
mask = np.zeros(np.prod(self.full_shape), dtype=bool)
mask.put(selector, True)
if mask.sum() < len(self.index):
raise ValueError('Index contains duplicate entries, '
'cannot reshape')
self.group_index = comp_index
self.mask = mask
self.unique_groups = obs_ids
self.compressor = comp_index.searchsorted(np.arange(ngroups))
def get_result(self):
# TODO: find a better way than this masking business
values, value_mask = self.get_new_values()
columns = self.get_new_columns()
index = self.get_new_index()
# filter out missing levels
if values.shape[1] > 0:
col_inds, obs_ids = _compress_group_index(self.sorted_labels[-1])
# rare case, level values not observed
if len(obs_ids) < self.full_shape[1]:
inds = (value_mask.sum(0) > 0).nonzero()[0]
values = com.take_nd(values, inds, axis=1)
columns = columns[inds]
# may need to coerce categoricals here
if self.is_categorical is not None:
values = [ Categorical.from_array(values[:,i],
categories=self.is_categorical.categories,
ordered=True)
for i in range(values.shape[-1]) ]
return DataFrame(values, index=index, columns=columns)
def get_new_values(self):
values = self.values
# place the values
length, width = self.full_shape
stride = values.shape[1]
result_width = width * stride
result_shape = (length, result_width)
# if our mask is all True, then we can use our existing dtype
if self.mask.all():
dtype = values.dtype
new_values = np.empty(result_shape, dtype=dtype)
else:
dtype, fill_value = _maybe_promote(values.dtype)
new_values = np.empty(result_shape, dtype=dtype)
new_values.fill(fill_value)
new_mask = np.zeros(result_shape, dtype=bool)
# is there a simpler / faster way of doing this?
for i in range(values.shape[1]):
chunk = new_values[:, i * width: (i + 1) * width]
mask_chunk = new_mask[:, i * width: (i + 1) * width]
chunk.flat[self.mask] = self.sorted_values[:, i]
mask_chunk.flat[self.mask] = True
return new_values, new_mask
def get_new_columns(self):
if self.value_columns is None:
if self.lift == 0:
return self.removed_level
lev = self.removed_level
return lev.insert(0, _get_na_value(lev.dtype.type))
stride = len(self.removed_level) + self.lift
width = len(self.value_columns)
propagator = np.repeat(np.arange(width), stride)
if isinstance(self.value_columns, MultiIndex):
new_levels = self.value_columns.levels + (self.removed_level,)
new_names = self.value_columns.names + (self.removed_name,)
new_labels = [lab.take(propagator)
for lab in self.value_columns.labels]
else:
new_levels = [self.value_columns, self.removed_level]
new_names = [self.value_columns.name, self.removed_name]
new_labels = [propagator]
new_labels.append(np.tile(np.arange(stride) - self.lift, width))
return MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
def get_new_index(self):
result_labels = [lab.take(self.compressor)
for lab in self.sorted_labels[:-1]]
# construct the new index
if len(self.new_index_levels) == 1:
lev, lab = self.new_index_levels[0], result_labels[0]
if (lab == -1).any():
lev = lev.insert(len(lev), _get_na_value(lev.dtype.type))
return lev.take(lab)
return MultiIndex(levels=self.new_index_levels,
labels=result_labels,
names=self.new_index_names,
verify_integrity=False)
def _unstack_multiple(data, clocs):
from pandas.core.groupby import decons_obs_group_ids
if len(clocs) == 0:
return data
# NOTE: This doesn't deal with hierarchical columns yet
index = data.index
clocs = [index._get_level_number(i) for i in clocs]
rlocs = [i for i in range(index.nlevels) if i not in clocs]
clevels = [index.levels[i] for i in clocs]
clabels = [index.labels[i] for i in clocs]
cnames = [index.names[i] for i in clocs]
rlevels = [index.levels[i] for i in rlocs]
rlabels = [index.labels[i] for i in rlocs]
rnames = [index.names[i] for i in rlocs]
shape = [len(x) for x in clevels]
group_index = get_group_index(clabels, shape, sort=False, xnull=False)
comp_ids, obs_ids = _compress_group_index(group_index, sort=False)
recons_labels = decons_obs_group_ids(comp_ids,
obs_ids, shape, clabels, xnull=False)
dummy_index = MultiIndex(levels=rlevels + [obs_ids],
labels=rlabels + [comp_ids],
names=rnames + ['__placeholder__'],
verify_integrity=False)
if isinstance(data, Series):
dummy = Series(data.values, index=dummy_index)
unstacked = dummy.unstack('__placeholder__')
new_levels = clevels
new_names = cnames
new_labels = recons_labels
else:
if isinstance(data.columns, MultiIndex):
result = data
for i in range(len(clocs)):
val = clocs[i]
result = result.unstack(val)
clocs = [val if i > val else val - 1 for val in clocs]
return result
dummy = DataFrame(data.values, index=dummy_index,
columns=data.columns)
unstacked = dummy.unstack('__placeholder__')
if isinstance(unstacked, Series):
unstcols = unstacked.index
else:
unstcols = unstacked.columns
new_levels = [unstcols.levels[0]] + clevels
new_names = [data.columns.name] + cnames
new_labels = [unstcols.labels[0]]
for rec in recons_labels:
new_labels.append(rec.take(unstcols.labels[-1]))
new_columns = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
if isinstance(unstacked, Series):
unstacked.index = new_columns
else:
unstacked.columns = new_columns
return unstacked
def pivot(self, index=None, columns=None, values=None):
"""
See DataFrame.pivot
"""
if values is None:
cols = [columns] if index is None else [index, columns]
append = index is None
indexed = self.set_index(cols, append=append)
return indexed.unstack(columns)
else:
if index is None:
index = self.index
else:
index = self[index]
indexed = Series(self[values].values,
index=MultiIndex.from_arrays([index,
self[columns]]))
return indexed.unstack(columns)
def pivot_simple(index, columns, values):
"""
Produce 'pivot' table based on 3 columns of this DataFrame.
Uses unique values from index / columns and fills with values.
Parameters
----------
index : ndarray
Labels to use to make new frame's index
columns : ndarray
Labels to use to make new frame's columns
values : ndarray
Values to use for populating new frame's values
Notes
-----
Obviously, all 3 of the input arguments must have the same length
Returns
-------
DataFrame
"""
if (len(index) != len(columns)) or (len(columns) != len(values)):
raise AssertionError('Length of index, columns, and values must be the'
' same')
if len(index) == 0:
return DataFrame(index=[])
hindex = MultiIndex.from_arrays([index, columns])
series = Series(values.ravel(), index=hindex)
series = series.sortlevel(0)
return series.unstack()
def _slow_pivot(index, columns, values):
"""
Produce 'pivot' table based on 3 columns of this DataFrame.
Uses unique values from index / columns and fills with values.
Parameters
----------
index : string or object
Column name to use to make new frame's index
columns : string or object
Column name to use to make new frame's columns
values : string or object
Column name to use for populating new frame's values
Could benefit from some Cython here.
"""
tree = {}
for i, (idx, col) in enumerate(zip(index, columns)):
if col not in tree:
tree[col] = {}
branch = tree[col]
branch[idx] = values[i]
return DataFrame(tree)
def unstack(obj, level):
if isinstance(level, (tuple, list)):
return _unstack_multiple(obj, level)
if isinstance(obj, DataFrame):
if isinstance(obj.index, MultiIndex):
return _unstack_frame(obj, level)
else:
return obj.T.stack(dropna=False)
else:
unstacker = _Unstacker(obj.values, obj.index, level=level)
return unstacker.get_result()
def _unstack_frame(obj, level):
from pandas.core.internals import BlockManager, make_block
if obj._is_mixed_type:
unstacker = _Unstacker(np.empty(obj.shape, dtype=bool), # dummy
obj.index, level=level,
value_columns=obj.columns)
new_columns = unstacker.get_new_columns()
new_index = unstacker.get_new_index()
new_axes = [new_columns, new_index]
new_blocks = []
mask_blocks = []
for blk in obj._data.blocks:
blk_items = obj._data.items[blk.mgr_locs.indexer]
bunstacker = _Unstacker(blk.values.T, obj.index, level=level,
value_columns=blk_items)
new_items = bunstacker.get_new_columns()
new_placement = new_columns.get_indexer(new_items)
new_values, mask = bunstacker.get_new_values()
mblk = make_block(mask.T, placement=new_placement)
mask_blocks.append(mblk)
newb = make_block(new_values.T, placement=new_placement)
new_blocks.append(newb)
result = DataFrame(BlockManager(new_blocks, new_axes))
mask_frame = DataFrame(BlockManager(mask_blocks, new_axes))
return result.ix[:, mask_frame.sum(0) > 0]
else:
unstacker = _Unstacker(obj.values, obj.index, level=level,
value_columns=obj.columns)
return unstacker.get_result()
def get_compressed_ids(labels, sizes):
from pandas.core.groupby import get_group_index
ids = get_group_index(labels, sizes, sort=True, xnull=False)
return _compress_group_index(ids, sort=True)
def stack(frame, level=-1, dropna=True):
"""
Convert DataFrame to Series with multi-level Index. Columns become the
second level of the resulting hierarchical index
Returns
-------
stacked : Series
"""
def factorize(index):
if index.is_unique:
return index, np.arange(len(index))
cat = Categorical(index, ordered=True)
return cat.categories, cat.codes
N, K = frame.shape
if isinstance(frame.columns, MultiIndex):
if frame.columns._reference_duplicate_name(level):
msg = ("Ambiguous reference to {0}. The column "
"names are not unique.".format(level))
raise ValueError(msg)
# Will also convert negative level numbers and check if out of bounds.
level_num = frame.columns._get_level_number(level)
if isinstance(frame.columns, MultiIndex):
return _stack_multi_columns(frame, level_num=level_num, dropna=dropna)
elif isinstance(frame.index, MultiIndex):
new_levels = list(frame.index.levels)
new_labels = [lab.repeat(K) for lab in frame.index.labels]
clev, clab = factorize(frame.columns)
new_levels.append(clev)
new_labels.append(np.tile(clab, N).ravel())
new_names = list(frame.index.names)
new_names.append(frame.columns.name)
new_index = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
else:
levels, (ilab, clab) = \
zip(*map(factorize, (frame.index, frame.columns)))
labels = ilab.repeat(K), np.tile(clab, N).ravel()
new_index = MultiIndex(levels=levels,
labels=labels,
names=[frame.index.name, frame.columns.name],
verify_integrity=False)
new_values = frame.values.ravel()
if dropna:
mask = notnull(new_values)
new_values = new_values[mask]
new_index = new_index[mask]
return Series(new_values, index=new_index)
def stack_multiple(frame, level, dropna=True):
# If all passed levels match up to column names, no
# ambiguity about what to do
if all(lev in frame.columns.names for lev in level):
result = frame
for lev in level:
result = stack(result, lev, dropna=dropna)
# Otherwise, level numbers may change as each successive level is stacked
elif all(isinstance(lev, int) for lev in level):
# As each stack is done, the level numbers decrease, so we need
# to account for that when level is a sequence of ints
result = frame
# _get_level_number() checks level numbers are in range and converts
# negative numbers to positive
level = [frame.columns._get_level_number(lev) for lev in level]
# Can't iterate directly through level as we might need to change
# values as we go
for index in range(len(level)):
lev = level[index]
result = stack(result, lev, dropna=dropna)
# Decrement all level numbers greater than current, as these
# have now shifted down by one
updated_level = []
for other in level:
if other > lev:
updated_level.append(other - 1)
else:
updated_level.append(other)
level = updated_level
else:
raise ValueError("level should contain all level names or all level numbers, "
"not a mixture of the two.")
return result
def _stack_multi_columns(frame, level_num=-1, dropna=True):
def _convert_level_number(level_num, columns):
"""
Logic for converting the level number to something
we can safely pass to swaplevel:
We generally want to convert the level number into
a level name, except when columns do not have names,
in which case we must leave as a level number
"""
if level_num in columns.names:
return columns.names[level_num]
else:
if columns.names[level_num] is None:
return level_num
else:
return columns.names[level_num]
this = frame.copy()
# this makes life much simpler
if level_num != frame.columns.nlevels - 1:
# roll levels to put selected level at end
roll_columns = this.columns
for i in range(level_num, frame.columns.nlevels - 1):
# Need to check if the ints conflict with level names
lev1 = _convert_level_number(i, roll_columns)
lev2 = _convert_level_number(i + 1, roll_columns)
roll_columns = roll_columns.swaplevel(lev1, lev2)
this.columns = roll_columns
if not this.columns.is_lexsorted():
# Workaround the edge case where 0 is one of the column names,
# which interferes with trying to sort based on the first
# level
level_to_sort = _convert_level_number(0, this.columns)
this = this.sortlevel(level_to_sort, axis=1)
# tuple list excluding level for grouping columns
if len(frame.columns.levels) > 2:
tuples = list(zip(*[
lev.take(lab) for lev, lab in
zip(this.columns.levels[:-1], this.columns.labels[:-1])
]))
unique_groups = [key for key, _ in itertools.groupby(tuples)]
new_names = this.columns.names[:-1]
new_columns = MultiIndex.from_tuples(unique_groups, names=new_names)
else:
new_columns = unique_groups = this.columns.levels[0]
# time to ravel the values
new_data = {}
level_vals = this.columns.levels[-1]
level_labels = sorted(set(this.columns.labels[-1]))
level_vals_used = level_vals[level_labels]
levsize = len(level_labels)
drop_cols = []
for key in unique_groups:
loc = this.columns.get_loc(key)
slice_len = loc.stop - loc.start
# can make more efficient?
if slice_len == 0:
drop_cols.append(key)
continue
elif slice_len != levsize:
chunk = this.ix[:, this.columns[loc]]
chunk.columns = level_vals.take(chunk.columns.labels[-1])
value_slice = chunk.reindex(columns=level_vals_used).values
else:
if frame._is_mixed_type:
value_slice = this.ix[:, this.columns[loc]].values
else:
value_slice = this.values[:, loc]
new_data[key] = value_slice.ravel()
if len(drop_cols) > 0:
new_columns = new_columns.difference(drop_cols)
N = len(this)
if isinstance(this.index, MultiIndex):
new_levels = list(this.index.levels)
new_names = list(this.index.names)
new_labels = [lab.repeat(levsize) for lab in this.index.labels]
else:
new_levels = [this.index]
new_labels = [np.arange(N).repeat(levsize)]
new_names = [this.index.name] # something better?
new_levels.append(frame.columns.levels[level_num])
new_labels.append(np.tile(level_labels, N))
new_names.append(frame.columns.names[level_num])
new_index = MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
result = DataFrame(new_data, index=new_index, columns=new_columns)
# more efficient way to go about this? can do the whole masking biz but
# will only save a small amount of time...
if dropna:
result = result.dropna(axis=0, how='all')
return result
def melt(frame, id_vars=None, value_vars=None,
var_name=None, value_name='value', col_level=None):
"""
"Unpivots" a DataFrame from wide format to long format, optionally leaving
identifier variables set.
This function is useful to massage a DataFrame into a format where one
or more columns are identifier variables (`id_vars`), while all other
columns, considered measured variables (`value_vars`), are "unpivoted" to
the row axis, leaving just two non-identifier columns, 'variable' and
'value'.
Parameters
----------
frame : DataFrame
id_vars : tuple, list, or ndarray, optional
Column(s) to use as identifier variables.
value_vars : tuple, list, or ndarray, optional
Column(s) to unpivot. If not specified, uses all columns that
are not set as `id_vars`.
var_name : scalar
Name to use for the 'variable' column. If None it uses
``frame.columns.name`` or 'variable'.
value_name : scalar, default 'value'
Name to use for the 'value' column.
col_level : int or string, optional
If columns are a MultiIndex then use this level to melt.
See also
--------
pivot_table
DataFrame.pivot
Examples
--------
>>> import pandas as pd
>>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'},
... 'B': {0: 1, 1: 3, 2: 5},
... 'C': {0: 2, 1: 4, 2: 6}})
>>> df
A B C
0 a 1 2
1 b 3 4
2 c 5 6
>>> pd.melt(df, id_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> pd.melt(df, id_vars=['A'], value_vars=['B', 'C'])
A variable value
0 a B 1
1 b B 3
2 c B 5
3 a C 2
4 b C 4
5 c C 6
The names of 'variable' and 'value' columns can be customized:
>>> pd.melt(df, id_vars=['A'], value_vars=['B'],
... var_name='myVarname', value_name='myValname')
A myVarname myValname
0 a B 1
1 b B 3
2 c B 5
If you have multi-index columns:
>>> df.columns = [list('ABC'), list('DEF')]
>>> df
A B C
D E F
0 a 1 2
1 b 3 4
2 c 5 6
>>> pd.melt(df, col_level=0, id_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> pd.melt(df, id_vars=[('A', 'D')], value_vars=[('B', 'E')])
(A, D) variable_0 variable_1 value
0 a B E 1
1 b B E 3
2 c B E 5
"""
# TODO: what about the existing index?
if id_vars is not None:
if not isinstance(id_vars, (tuple, list, np.ndarray)):
id_vars = [id_vars]
else:
id_vars = list(id_vars)
else:
id_vars = []
if value_vars is not None:
if not isinstance(value_vars, (tuple, list, np.ndarray)):
value_vars = [value_vars]
frame = frame.ix[:, id_vars + value_vars]
else:
frame = frame.copy()
if col_level is not None: # allow list or other?
# frame is a copy
frame.columns = frame.columns.get_level_values(col_level)
if var_name is None:
if isinstance(frame.columns, MultiIndex):
if len(frame.columns.names) == len(set(frame.columns.names)):
var_name = frame.columns.names
else:
var_name = ['variable_%s' % i for i in
range(len(frame.columns.names))]
else:
var_name = [frame.columns.name if frame.columns.name is not None
else 'variable']
if isinstance(var_name, compat.string_types):
var_name = [var_name]
N, K = frame.shape
K -= len(id_vars)
mdata = {}
for col in id_vars:
mdata[col] = np.tile(frame.pop(col).values, K)
mcolumns = id_vars + var_name + [value_name]
mdata[value_name] = frame.values.ravel('F')
for i, col in enumerate(var_name):
# asanyarray will keep the columns as an Index
mdata[col] = np.asanyarray(frame.columns.get_level_values(i)).repeat(N)
return DataFrame(mdata, columns=mcolumns)
def lreshape(data, groups, dropna=True, label=None):
"""
Reshape long-format data to wide. Generalized inverse of DataFrame.pivot
Parameters
----------
data : DataFrame
groups : dict
{new_name : list_of_columns}
dropna : boolean, default True
Examples
--------
>>> import pandas as pd
>>> data = pd.DataFrame({'hr1': [514, 573], 'hr2': [545, 526],
... 'team': ['Red Sox', 'Yankees'],
... 'year1': [2007, 2008], 'year2': [2008, 2008]})
>>> data
hr1 hr2 team year1 year2
0 514 545 Red Sox 2007 2008
1 573 526 Yankees 2007 2008
>>> pd.lreshape(data, {'year': ['year1', 'year2'], 'hr': ['hr1', 'hr2']})
team hr year
0 Red Sox 514 2007
1 Yankees 573 2007
2 Red Sox 545 2008
3 Yankees 526 2008
Returns
-------
reshaped : DataFrame
"""
if isinstance(groups, dict):
keys = list(groups.keys())
values = list(groups.values())
else:
keys, values = zip(*groups)
all_cols = list(set.union(*[set(x) for x in values]))
id_cols = list(data.columns.difference(all_cols))
K = len(values[0])
for seq in values:
if len(seq) != K:
raise ValueError('All column lists must be same length')
mdata = {}
pivot_cols = []
for target, names in zip(keys, values):
mdata[target] = com._concat_compat([data[col].values for col in names])
pivot_cols.append(target)
for col in id_cols:
mdata[col] = np.tile(data[col].values, K)
if dropna:
mask = np.ones(len(mdata[pivot_cols[0]]), dtype=bool)
for c in pivot_cols:
mask &= notnull(mdata[c])
if not mask.all():
mdata = dict((k, v[mask]) for k, v in compat.iteritems(mdata))
return DataFrame(mdata, columns=id_cols + pivot_cols)
def wide_to_long(df, stubnames, i, j):
"""
Wide panel to long format. Less flexible but more user-friendly than melt.
Parameters
----------
df : DataFrame
The wide-format DataFrame
stubnames : list
A list of stub names. The wide format variables are assumed to
start with the stub names.
i : str
The name of the id variable.
j : str
The name of the subobservation variable.
stubend : str
Regex to match for the end of the stubs.
Returns
-------
DataFrame
A DataFrame that contains each stub name as a variable as well as
variables for i and j.
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> np.random.seed(123)
>>> df = pd.DataFrame({"A1970" : {0 : "a", 1 : "b", 2 : "c"},
... "A1980" : {0 : "d", 1 : "e", 2 : "f"},
... "B1970" : {0 : 2.5, 1 : 1.2, 2 : .7},
... "B1980" : {0 : 3.2, 1 : 1.3, 2 : .1},
... "X" : dict(zip(range(3), np.random.randn(3)))
... })
>>> df["id"] = df.index
>>> df
A1970 A1980 B1970 B1980 X id
0 a d 2.5 3.2 -1.085631 0
1 b e 1.2 1.3 0.997345 1
2 c f 0.7 0.1 0.282978 2
>>> wide_to_long(df, ["A", "B"], i="id", j="year")
X A B
id year
0 1970 -1.085631 a 2.5
1 1970 0.997345 b 1.2
2 1970 0.282978 c 0.7
0 1980 -1.085631 d 3.2
1 1980 0.997345 e 1.3
2 1980 0.282978 f 0.1
Notes
-----
All extra variables are treated as extra id variables. This simply uses
`pandas.melt` under the hood, but is hard-coded to "do the right thing"
in a typicaly case.
"""
def get_var_names(df, regex):
return df.filter(regex=regex).columns.tolist()
def melt_stub(df, stub, i, j):
varnames = get_var_names(df, "^" + stub)
newdf = melt(df, id_vars=i, value_vars=varnames, value_name=stub,
var_name=j)
newdf_j = newdf[j].str.replace(stub, "")
try:
newdf_j = newdf_j.astype(int)
except ValueError:
pass
newdf[j] = newdf_j
return newdf
id_vars = get_var_names(df, "^(?!%s)" % "|".join(stubnames))
if i not in id_vars:
id_vars += [i]
newdf = melt_stub(df, stubnames[0], id_vars, j)
for stub in stubnames[1:]:
new = melt_stub(df, stub, id_vars, j)
newdf = newdf.merge(new, how="outer", on=id_vars + [j], copy=False)
return newdf.set_index([i, j])
def get_dummies(data, prefix=None, prefix_sep='_', dummy_na=False,
columns=None, sparse=False):
"""
Convert categorical variable into dummy/indicator variables
Parameters
----------
data : array-like, Series, or DataFrame
prefix : string, list of strings, or dict of strings, default None
String to append DataFrame column names
Pass a list with length equal to the number of columns
when calling get_dummies on a DataFrame. Alternativly, `prefix`
can be a dictionary mapping column names to prefixes.
prefix_sep : string, default '_'
If appending prefix, separator/delimiter to use. Or pass a
list or dictionary as with `prefix.`
dummy_na : bool, default False
Add a column to indicate NaNs, if False NaNs are ignored.
columns : list-like, default None
Column names in the DataFrame to be encoded.
If `columns` is None then all the columns with
`object` or `category` dtype will be converted.
sparse : bool, default False
Whether the dummy columns should be sparse or not. Returns
SparseDataFrame if `data` is a Series or if all columns are included.
Otherwise returns a DataFrame with some SparseBlocks.
.. versionadded:: 0.16.1
Returns
-------
dummies : DataFrame or SparseDataFrame
Examples
--------
>>> import pandas as pd
>>> s = pd.Series(list('abca'))
>>> get_dummies(s)
a b c
0 1 0 0
1 0 1 0
2 0 0 1
3 1 0 0
>>> s1 = ['a', 'b', np.nan]
>>> get_dummies(s1)
a b
0 1 0
1 0 1
2 0 0
>>> get_dummies(s1, dummy_na=True)
a b NaN
0 1 0 0
1 0 1 0
2 0 0 1
>>> df = DataFrame({'A': ['a', 'b', 'a'], 'B': ['b', 'a', 'c'],
'C': [1, 2, 3]})
>>> get_dummies(df, prefix=['col1', 'col2']):
C col1_a col1_b col2_a col2_b col2_c
0 1 1 0 0 1 0
1 2 0 1 1 0 0
2 3 1 0 0 0 1
See also ``Series.str.get_dummies``.
"""
from pandas.tools.merge import concat
from itertools import cycle
if isinstance(data, DataFrame):
# determine columns being encoded
if columns is None:
columns_to_encode = data.select_dtypes(include=['object',
'category']).columns
else:
columns_to_encode = columns
# validate prefixes and separator to avoid silently dropping cols
def check_len(item, name):
length_msg = ("Length of '{0}' ({1}) did "
"not match the length of the columns "
"being encoded ({2}).")
if com.is_list_like(item):
if not len(item) == len(columns_to_encode):
raise ValueError(length_msg.format(name, len(item),
len(columns_to_encode)))
check_len(prefix, 'prefix')
check_len(prefix_sep, 'prefix_sep')
if isinstance(prefix, compat.string_types):
prefix = cycle([prefix])
if isinstance(prefix, dict):
prefix = [prefix[col] for col in columns_to_encode]
if prefix is None:
prefix = columns_to_encode
# validate separators
if isinstance(prefix_sep, compat.string_types):
prefix_sep = cycle([prefix_sep])
elif isinstance(prefix_sep, dict):
prefix_sep = [prefix_sep[col] for col in columns_to_encode]
if set(columns_to_encode) == set(data.columns):
with_dummies = []
else:
with_dummies = [data.drop(columns_to_encode, axis=1)]
for (col, pre, sep) in zip(columns_to_encode, prefix, prefix_sep):
dummy = _get_dummies_1d(data[col], prefix=pre, prefix_sep=sep,
dummy_na=dummy_na, sparse=sparse)
with_dummies.append(dummy)
result = concat(with_dummies, axis=1)
else:
result = _get_dummies_1d(data, prefix, prefix_sep, dummy_na,
sparse=sparse)
return result
def _get_dummies_1d(data, prefix, prefix_sep='_', dummy_na=False, sparse=False):
# Series avoids inconsistent NaN handling
cat = Categorical.from_array(Series(data), ordered=True)
levels = cat.categories
# if all NaN
if not dummy_na and len(levels) == 0:
if isinstance(data, Series):
index = data.index
else:
index = np.arange(len(data))
if not sparse:
return DataFrame(index=index)
else:
return SparseDataFrame(index=index)
codes = cat.codes.copy()
if dummy_na:
codes[codes == -1] = len(cat.categories)
levels = np.append(cat.categories, np.nan)
number_of_cols = len(levels)
if prefix is not None:
dummy_cols = ['%s%s%s' % (prefix, prefix_sep, v)
for v in levels]
else:
dummy_cols = levels
if isinstance(data, Series):
index = data.index
else:
index = None
if sparse:
sparse_series = {}
N = len(data)
sp_indices = [ [] for _ in range(len(dummy_cols)) ]
for ndx, code in enumerate(codes):
if code == -1:
# Blank entries if not dummy_na and code == -1, #GH4446
continue
sp_indices[code].append(ndx)
for col, ixs in zip(dummy_cols, sp_indices):
sarr = SparseArray(np.ones(len(ixs)), sparse_index=IntIndex(N, ixs),
fill_value=0)
sparse_series[col] = SparseSeries(data=sarr, index=index)
return SparseDataFrame(sparse_series, index=index, columns=dummy_cols)
else:
dummy_mat = np.eye(number_of_cols).take(codes, axis=0)
if not dummy_na:
# reset NaN GH4446
dummy_mat[codes == -1] = 0
return DataFrame(dummy_mat, index=index, columns=dummy_cols)
def make_axis_dummies(frame, axis='minor', transform=None):
"""
Construct 1-0 dummy variables corresponding to designated axis
labels
Parameters
----------
frame : DataFrame
axis : {'major', 'minor'}, default 'minor'
transform : function, default None
Function to apply to axis labels first. For example, to
get "day of week" dummies in a time series regression
you might call::
make_axis_dummies(panel, axis='major',
transform=lambda d: d.weekday())
Returns
-------
dummies : DataFrame
Column names taken from chosen axis
"""
numbers = {
'major': 0,
'minor': 1
}
num = numbers.get(axis, axis)
items = frame.index.levels[num]
labels = frame.index.labels[num]
if transform is not None:
mapped_items = items.map(transform)
cat = Categorical.from_array(mapped_items.take(labels), ordered=True)
labels = cat.codes
items = cat.categories
values = np.eye(len(items), dtype=float)
values = values.take(labels, axis=0)
return DataFrame(values, columns=items, index=frame.index)<|fim▁end|> | self.values = values
self.value_columns = value_columns
|
<|file_name|>release_testing_test.go<|end_file_name|><|fim▁begin|>/*
Copyright The Helm Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at<|fim▁hole|>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"io"
"testing"
"github.com/spf13/cobra"
"k8s.io/helm/pkg/helm"
"k8s.io/helm/pkg/proto/hapi/release"
)
func TestReleaseTesting(t *testing.T) {
tests := []releaseCase{
{
name: "basic test",
args: []string{"example-release"},
flags: []string{},
responses: map[string]release.TestRun_Status{"PASSED: green lights everywhere": release.TestRun_SUCCESS},
err: false,
},
{
name: "test failure",
args: []string{"example-fail"},
flags: []string{},
responses: map[string]release.TestRun_Status{"FAILURE: red lights everywhere": release.TestRun_FAILURE},
err: true,
},
{
name: "test unknown",
args: []string{"example-unknown"},
flags: []string{},
responses: map[string]release.TestRun_Status{"UNKNOWN: yellow lights everywhere": release.TestRun_UNKNOWN},
err: false,
},
{
name: "test error",
args: []string{"example-error"},
flags: []string{},
responses: map[string]release.TestRun_Status{"ERROR: yellow lights everywhere": release.TestRun_FAILURE},
err: true,
},
{
name: "test running",
args: []string{"example-running"},
flags: []string{},
responses: map[string]release.TestRun_Status{"RUNNING: things are happpeningggg": release.TestRun_RUNNING},
err: false,
},
{
name: "multiple tests example",
args: []string{"example-suite"},
flags: []string{},
responses: map[string]release.TestRun_Status{
"RUNNING: things are happpeningggg": release.TestRun_RUNNING,
"PASSED: party time": release.TestRun_SUCCESS,
"RUNNING: things are happening again": release.TestRun_RUNNING,
"FAILURE: good thing u checked :)": release.TestRun_FAILURE,
"RUNNING: things are happpeningggg yet again": release.TestRun_RUNNING,
"PASSED: feel free to party again": release.TestRun_SUCCESS},
err: true,
},
}
runReleaseCases(t, tests, func(c *helm.FakeClient, out io.Writer) *cobra.Command {
return newReleaseTestCmd(c, out)
})
}<|fim▁end|> |
http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>containers.py<|end_file_name|><|fim▁begin|>import six
import warnings
from .. import errors
from ..utils.utils import (
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
format_environment, normalize_links, parse_bytes, parse_devices,
split_command, version_gte, version_lt,
)
from .base import DictType
from .healthcheck import Healthcheck
class LogConfigTypesEnum(object):
_values = (
'json-file',
'syslog',
'journald',
'gelf',
'fluentd',
'none'
)
JSON, SYSLOG, JOURNALD, GELF, FLUENTD, NONE = _values
class LogConfig(DictType):
types = LogConfigTypesEnum
def __init__(self, **kwargs):
log_driver_type = kwargs.get('type', kwargs.get('Type'))
config = kwargs.get('config', kwargs.get('Config')) or {}
if config and not isinstance(config, dict):
raise ValueError("LogConfig.config must be a dictionary")
super(LogConfig, self).__init__({
'Type': log_driver_type,
'Config': config
})
@property
def type(self):
return self['Type']
@type.setter
def type(self, value):
self['Type'] = value
@property
def config(self):
return self['Config']
<|fim▁hole|>
def unset_config(self, key):
if key in self.config:
del self.config[key]
class Ulimit(DictType):
def __init__(self, **kwargs):
name = kwargs.get('name', kwargs.get('Name'))
soft = kwargs.get('soft', kwargs.get('Soft'))
hard = kwargs.get('hard', kwargs.get('Hard'))
if not isinstance(name, six.string_types):
raise ValueError("Ulimit.name must be a string")
if soft and not isinstance(soft, int):
raise ValueError("Ulimit.soft must be an integer")
if hard and not isinstance(hard, int):
raise ValueError("Ulimit.hard must be an integer")
super(Ulimit, self).__init__({
'Name': name,
'Soft': soft,
'Hard': hard
})
@property
def name(self):
return self['Name']
@name.setter
def name(self, value):
self['Name'] = value
@property
def soft(self):
return self.get('Soft')
@soft.setter
def soft(self, value):
self['Soft'] = value
@property
def hard(self):
return self.get('Hard')
@hard.setter
def hard(self, value):
self['Hard'] = value
class HostConfig(dict):
def __init__(self, version, binds=None, port_bindings=None,
lxc_conf=None, publish_all_ports=False, links=None,
privileged=False, dns=None, dns_search=None,
volumes_from=None, network_mode=None, restart_policy=None,
cap_add=None, cap_drop=None, devices=None, extra_hosts=None,
read_only=None, pid_mode=None, ipc_mode=None,
security_opt=None, ulimits=None, log_config=None,
mem_limit=None, memswap_limit=None, mem_reservation=None,
kernel_memory=None, mem_swappiness=None, cgroup_parent=None,
group_add=None, cpu_quota=None, cpu_period=None,
blkio_weight=None, blkio_weight_device=None,
device_read_bps=None, device_write_bps=None,
device_read_iops=None, device_write_iops=None,
oom_kill_disable=False, shm_size=None, sysctls=None,
tmpfs=None, oom_score_adj=None, dns_opt=None, cpu_shares=None,
cpuset_cpus=None, userns_mode=None, pids_limit=None,
isolation=None):
if mem_limit is not None:
self['Memory'] = parse_bytes(mem_limit)
if memswap_limit is not None:
self['MemorySwap'] = parse_bytes(memswap_limit)
if mem_reservation:
if version_lt(version, '1.21'):
raise host_config_version_error('mem_reservation', '1.21')
self['MemoryReservation'] = parse_bytes(mem_reservation)
if kernel_memory:
if version_lt(version, '1.21'):
raise host_config_version_error('kernel_memory', '1.21')
self['KernelMemory'] = parse_bytes(kernel_memory)
if mem_swappiness is not None:
if version_lt(version, '1.20'):
raise host_config_version_error('mem_swappiness', '1.20')
if not isinstance(mem_swappiness, int):
raise host_config_type_error(
'mem_swappiness', mem_swappiness, 'int'
)
self['MemorySwappiness'] = mem_swappiness
if shm_size is not None:
if isinstance(shm_size, six.string_types):
shm_size = parse_bytes(shm_size)
self['ShmSize'] = shm_size
if pid_mode:
if version_lt(version, '1.24') and pid_mode != 'host':
raise host_config_value_error('pid_mode', pid_mode)
self['PidMode'] = pid_mode
if ipc_mode:
self['IpcMode'] = ipc_mode
if privileged:
self['Privileged'] = privileged
if oom_kill_disable:
if version_lt(version, '1.20'):
raise host_config_version_error('oom_kill_disable', '1.19')
self['OomKillDisable'] = oom_kill_disable
if oom_score_adj:
if version_lt(version, '1.22'):
raise host_config_version_error('oom_score_adj', '1.22')
if not isinstance(oom_score_adj, int):
raise host_config_type_error(
'oom_score_adj', oom_score_adj, 'int'
)
self['OomScoreAdj'] = oom_score_adj
if publish_all_ports:
self['PublishAllPorts'] = publish_all_ports
if read_only is not None:
self['ReadonlyRootfs'] = read_only
if dns_search:
self['DnsSearch'] = dns_search
if network_mode:
self['NetworkMode'] = network_mode
elif network_mode is None and version_gte(version, '1.20'):
self['NetworkMode'] = 'default'
if restart_policy:
if not isinstance(restart_policy, dict):
raise host_config_type_error(
'restart_policy', restart_policy, 'dict'
)
self['RestartPolicy'] = restart_policy
if cap_add:
self['CapAdd'] = cap_add
if cap_drop:
self['CapDrop'] = cap_drop
if devices:
self['Devices'] = parse_devices(devices)
if group_add:
if version_lt(version, '1.20'):
raise host_config_version_error('group_add', '1.20')
self['GroupAdd'] = [six.text_type(grp) for grp in group_add]
if dns is not None:
self['Dns'] = dns
if dns_opt is not None:
if version_lt(version, '1.21'):
raise host_config_version_error('dns_opt', '1.21')
self['DnsOptions'] = dns_opt
if security_opt is not None:
if not isinstance(security_opt, list):
raise host_config_type_error(
'security_opt', security_opt, 'list'
)
self['SecurityOpt'] = security_opt
if sysctls:
if not isinstance(sysctls, dict):
raise host_config_type_error('sysctls', sysctls, 'dict')
self['Sysctls'] = {}
for k, v in six.iteritems(sysctls):
self['Sysctls'][k] = six.text_type(v)
if volumes_from is not None:
if isinstance(volumes_from, six.string_types):
volumes_from = volumes_from.split(',')
self['VolumesFrom'] = volumes_from
if binds is not None:
self['Binds'] = convert_volume_binds(binds)
if port_bindings is not None:
self['PortBindings'] = convert_port_bindings(port_bindings)
if extra_hosts is not None:
if isinstance(extra_hosts, dict):
extra_hosts = [
'{0}:{1}'.format(k, v)
for k, v in sorted(six.iteritems(extra_hosts))
]
self['ExtraHosts'] = extra_hosts
if links is not None:
self['Links'] = normalize_links(links)
if isinstance(lxc_conf, dict):
formatted = []
for k, v in six.iteritems(lxc_conf):
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
if lxc_conf is not None:
self['LxcConf'] = lxc_conf
if cgroup_parent is not None:
self['CgroupParent'] = cgroup_parent
if ulimits is not None:
if not isinstance(ulimits, list):
raise host_config_type_error('ulimits', ulimits, 'list')
self['Ulimits'] = []
for l in ulimits:
if not isinstance(l, Ulimit):
l = Ulimit(**l)
self['Ulimits'].append(l)
if log_config is not None:
if not isinstance(log_config, LogConfig):
if not isinstance(log_config, dict):
raise host_config_type_error(
'log_config', log_config, 'LogConfig'
)
log_config = LogConfig(**log_config)
self['LogConfig'] = log_config
if cpu_quota:
if not isinstance(cpu_quota, int):
raise host_config_type_error('cpu_quota', cpu_quota, 'int')
if version_lt(version, '1.19'):
raise host_config_version_error('cpu_quota', '1.19')
self['CpuQuota'] = cpu_quota
if cpu_period:
if not isinstance(cpu_period, int):
raise host_config_type_error('cpu_period', cpu_period, 'int')
if version_lt(version, '1.19'):
raise host_config_version_error('cpu_period', '1.19')
self['CpuPeriod'] = cpu_period
if cpu_shares:
if version_lt(version, '1.18'):
raise host_config_version_error('cpu_shares', '1.18')
if not isinstance(cpu_shares, int):
raise host_config_type_error('cpu_shares', cpu_shares, 'int')
self['CpuShares'] = cpu_shares
if cpuset_cpus:
if version_lt(version, '1.18'):
raise host_config_version_error('cpuset_cpus', '1.18')
self['CpuSetCpus'] = cpuset_cpus
if blkio_weight:
if not isinstance(blkio_weight, int):
raise host_config_type_error(
'blkio_weight', blkio_weight, 'int'
)
if version_lt(version, '1.22'):
raise host_config_version_error('blkio_weight', '1.22')
self["BlkioWeight"] = blkio_weight
if blkio_weight_device:
if not isinstance(blkio_weight_device, list):
raise host_config_type_error(
'blkio_weight_device', blkio_weight_device, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('blkio_weight_device', '1.22')
self["BlkioWeightDevice"] = blkio_weight_device
if device_read_bps:
if not isinstance(device_read_bps, list):
raise host_config_type_error(
'device_read_bps', device_read_bps, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_read_bps', '1.22')
self["BlkioDeviceReadBps"] = device_read_bps
if device_write_bps:
if not isinstance(device_write_bps, list):
raise host_config_type_error(
'device_write_bps', device_write_bps, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_write_bps', '1.22')
self["BlkioDeviceWriteBps"] = device_write_bps
if device_read_iops:
if not isinstance(device_read_iops, list):
raise host_config_type_error(
'device_read_iops', device_read_iops, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_read_iops', '1.22')
self["BlkioDeviceReadIOps"] = device_read_iops
if device_write_iops:
if not isinstance(device_write_iops, list):
raise host_config_type_error(
'device_write_iops', device_write_iops, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_write_iops', '1.22')
self["BlkioDeviceWriteIOps"] = device_write_iops
if tmpfs:
if version_lt(version, '1.22'):
raise host_config_version_error('tmpfs', '1.22')
self["Tmpfs"] = convert_tmpfs_mounts(tmpfs)
if userns_mode:
if version_lt(version, '1.23'):
raise host_config_version_error('userns_mode', '1.23')
if userns_mode != "host":
raise host_config_value_error("userns_mode", userns_mode)
self['UsernsMode'] = userns_mode
if pids_limit:
if not isinstance(pids_limit, int):
raise host_config_type_error('pids_limit', pids_limit, 'int')
if version_lt(version, '1.23'):
raise host_config_version_error('pids_limit', '1.23')
self["PidsLimit"] = pids_limit
if isolation:
if not isinstance(isolation, six.string_types):
raise host_config_type_error('isolation', isolation, 'string')
if version_lt(version, '1.24'):
raise host_config_version_error('isolation', '1.24')
self['Isolation'] = isolation
def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
return TypeError(error_msg.format(param, expected, type(param_value)))
def host_config_version_error(param, version, less_than=True):
operator = '<' if less_than else '>'
error_msg = '{0} param is not supported in API versions {1} {2}'
return errors.InvalidVersion(error_msg.format(param, operator, version))
def host_config_value_error(param, param_value):
error_msg = 'Invalid value for {0} param: {1}'
return ValueError(error_msg.format(param, param_value))
class ContainerConfig(dict):
def __init__(
self, version, image, command, hostname=None, user=None, detach=False,
stdin_open=False, tty=False, mem_limit=None, ports=None, dns=None,
environment=None, volumes=None, volumes_from=None,
network_disabled=False, entrypoint=None, cpu_shares=None,
working_dir=None, domainname=None, memswap_limit=None, cpuset=None,
host_config=None, mac_address=None, labels=None, volume_driver=None,
stop_signal=None, networking_config=None, healthcheck=None,
):
if isinstance(command, six.string_types):
command = split_command(command)
if isinstance(entrypoint, six.string_types):
entrypoint = split_command(entrypoint)
if isinstance(environment, dict):
environment = format_environment(environment)
if labels is not None and version_lt(version, '1.18'):
raise errors.InvalidVersion(
'labels were only introduced in API version 1.18'
)
if cpuset is not None or cpu_shares is not None:
if version_gte(version, '1.18'):
warnings.warn(
'The cpuset_cpus and cpu_shares options have been moved to'
' host_config in API version 1.18, and will be removed',
DeprecationWarning
)
if stop_signal is not None and version_lt(version, '1.21'):
raise errors.InvalidVersion(
'stop_signal was only introduced in API version 1.21'
)
if healthcheck is not None and version_lt(version, '1.24'):
raise errors.InvalidVersion(
'Health options were only introduced in API version 1.24'
)
if version_lt(version, '1.19'):
if volume_driver is not None:
raise errors.InvalidVersion(
'Volume drivers were only introduced in API version 1.19'
)
mem_limit = mem_limit if mem_limit is not None else 0
memswap_limit = memswap_limit if memswap_limit is not None else 0
else:
if mem_limit is not None:
raise errors.InvalidVersion(
'mem_limit has been moved to host_config in API version'
' 1.19'
)
if memswap_limit is not None:
raise errors.InvalidVersion(
'memswap_limit has been moved to host_config in API '
'version 1.19'
)
if isinstance(labels, list):
labels = dict((lbl, six.text_type('')) for lbl in labels)
if mem_limit is not None:
mem_limit = parse_bytes(mem_limit)
if memswap_limit is not None:
memswap_limit = parse_bytes(memswap_limit)
if isinstance(ports, list):
exposed_ports = {}
for port_definition in ports:
port = port_definition
proto = 'tcp'
if isinstance(port_definition, tuple):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
exposed_ports['{0}/{1}'.format(port, proto)] = {}
ports = exposed_ports
if isinstance(volumes, six.string_types):
volumes = [volumes, ]
if isinstance(volumes, list):
volumes_dict = {}
for vol in volumes:
volumes_dict[vol] = {}
volumes = volumes_dict
if volumes_from:
if not isinstance(volumes_from, six.string_types):
volumes_from = ','.join(volumes_from)
else:
# Force None, an empty list or dict causes client.start to fail
volumes_from = None
if healthcheck and isinstance(healthcheck, dict):
healthcheck = Healthcheck(**healthcheck)
attach_stdin = False
attach_stdout = False
attach_stderr = False
stdin_once = False
if not detach:
attach_stdout = True
attach_stderr = True
if stdin_open:
attach_stdin = True
stdin_once = True
if version_gte(version, '1.10'):
message = ('{0!r} parameter has no effect on create_container().'
' It has been moved to host_config')
if dns is not None:
raise errors.InvalidVersion(message.format('dns'))
if volumes_from is not None:
raise errors.InvalidVersion(message.format('volumes_from'))
self.update({
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
'User': six.text_type(user) if user else None,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
'Memory': mem_limit,
'AttachStdin': attach_stdin,
'AttachStdout': attach_stdout,
'AttachStderr': attach_stderr,
'Env': environment,
'Cmd': command,
'Dns': dns,
'Image': image,
'Volumes': volumes,
'VolumesFrom': volumes_from,
'NetworkDisabled': network_disabled,
'Entrypoint': entrypoint,
'CpuShares': cpu_shares,
'Cpuset': cpuset,
'CpusetCpus': cpuset,
'WorkingDir': working_dir,
'MemorySwap': memswap_limit,
'HostConfig': host_config,
'NetworkingConfig': networking_config,
'MacAddress': mac_address,
'Labels': labels,
'VolumeDriver': volume_driver,
'StopSignal': stop_signal,
'Healthcheck': healthcheck,
})<|fim▁end|> | def set_config_value(self, key, value):
self.config[key] = value |
<|file_name|>github.com.js<|end_file_name|><|fim▁begin|>if (Zepto.ajax.restore) {
Zepto.ajax.restore();
}
sinon.stub(Zepto, "ajax")
.yieldsTo("success", {
responseStatus : 200,
responseDetails : null,
responseData : {
feed: {
link : "http://github.com",
title : "GitHub Public Timeline",
entries : [
{ title : "Croaky signed up",
link : "http://github.com/Croaky/openbeerdatabase",
author : "",
publishedDate : "Thu, 24 Nov 2011 19:00:00 -0600",
content : "\u003cstrong\u003eCroaky\u003c/strong\u003e signed up for GitHub.",
contentSnippet : "Croaky signed up for GitHub.",
categories : []
}
]<|fim▁hole|><|fim▁end|> | }
}
}); |
<|file_name|>publisher.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2013, salesforce.com, inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided
// that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this list of conditions and the
// following disclaimer.
//
// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
// the following disclaimer in the documentation and/or other materials provided with the distribution.
//
// Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
(function ($$){
var sr, mobile, postType, thumbnailUrl;
myPublisher = {
init : function(signedRequest, isMobile) {
sr = signedRequest;
mobile = isMobile;
},
// Auto resize the iframe to fit the current content.
resize : function() {
$$.client.resize(sr.client);
},
// Simply display incoming events in order
logEvent : function(name) {
var elem = $$.byId("events");
var sep = ($$.isNil(elem.value)) ? "" : ",";
elem.value += sep + name
},
selectPostType : function(e) {
console.log("got click", e);
postType = e;
// Enable the share button
$$.client.publish(sr.client, {name : "publisher.setValidForSubmit", payload : true});
},
clearPostTypes : function() {
var i, elements = $$.byClass('postType');
for (i = 0; i < elements.length; i+=1) {
elements[i].checked=false;
}
},
canvasOptions : function(elem, option) {
var bool = Sfdc.canvas.indexOf(sr.context.application.options, option) == -1;
elem.innerHTML = (bool) ? "✓" : "✗";
elem.style.color = (bool) ? "green" : "red";
},
updateContent : function() {
if (!mobile) {
$$.byId('name').innerHTML = sr.context.user.firstName + " " + sr.context.user.lastName;<|fim▁hole|> $$.byId('location').innerHTML = sr.context.environment.displayLocation;
myPublisher.canvasOptions($$.byId('header-enabled'), "HideHeader");
myPublisher.canvasOptions($$.byId('share-enabled'), "HideShare");
}
},
selectThumbnail: function(e) {
thumbnailUrl = (e === "none") ? null : window.location.origin + e;
console.log("Thumbnail URL " + thumbnailUrl);
},
handlers : function() {
var handlers = {
onSetupPanel : function (payload) {
myPublisher.resize(); // Do I want to do this on iphone?
myPublisher.logEvent("setupPanel");
},
onShowPanel : function(payload) {
myPublisher.logEvent("showPanel");
},
onClearPanelState : function(payload) {
myPublisher.logEvent("clearPanelState");
myPublisher.clearPostTypes();
// Clear all the text fields and reset radio buttons
},
onSuccess : function() {
myPublisher.logEvent("success");
},
onFailure : function (payload) {
myPublisher.logEvent("failure");
myPublisher.clearPostTypes();
if (payload && payload.errors && payload.errors.message) {
alert("Error: " + payload.errors.message);
}
},
onGetPayload : function() {
myPublisher.logEvent("getPayload");
var p = {};
if (postType === 'Text') {
// Example of a Text Post
p.feedItemType = "TextPost";
p.auxText = $$.byId('auxText').value;
}
else if (postType === 'Link') {
// Example of a Link Post
p.feedItemType = "LinkPost";
p.auxText = $$.byId('auxText').value;
p.url = "http://www.salesforce.com";
p.urlName = $$.byId('title').value;
}
else if (postType === 'Canvas') {
// Example of a Canvas Post
p.feedItemType = "CanvasPost";
p.auxText = $$.byId('auxText').value;
p.namespace = sr.context.application.namespace;
p.developerName = sr.context.application.developerName;
p.height = $$.byId('height').value;
p.title = $$.byId('title').value;
p.description = $$.byId('description').value;
p.parameters = $$.byId('parameters').value;
p.thumbnailUrl = thumbnailUrl;
}
$$.client.publish(sr.client, {name : 'publisher.setPayload', payload : p});
}
};
return {
subscriptions : [
{name : 'publisher.setupPanel', onData : handlers.onSetupPanel},
{name : 'publisher.showPanel', onData : handlers.onShowPanel},
{name : 'publisher.clearPanelState', onData : handlers.onClearPanelState},
{name : 'publisher.failure', onData : handlers.onFailure},
{name : 'publisher.success', onData : handlers.onSuccess},
{name : 'publisher.getPayload', onData : handlers.onGetPayload}
]
};
}
};
}(Sfdc.canvas));<|fim▁end|> | |
<|file_name|>hold.rs<|end_file_name|><|fim▁begin|>use std::io::{self, Read, Write};
use crate::BufferRedirect;
/// Hold output until dropped. On drop, the held output is sent to the stdout/stderr.
///
/// Note: This will ignore IO errors when printing held output.
pub struct Hold {
buf_redir: Option<BufferRedirect>,
is_stdout: bool,
}
impl Hold {
/// Hold stderr output.
pub fn stderr() -> io::Result<Hold> {
Ok(Hold {
buf_redir: Some(BufferRedirect::stderr()?),
is_stdout: false,
})
}
/// Hold stdout output.
pub fn stdout() -> io::Result<Hold> {
Ok(Hold {
buf_redir: Some(BufferRedirect::stdout()?),
is_stdout: true,
})
}
}
impl Drop for Hold {
fn drop(&mut self) {
fn read_into<R: Read, W: Write>(mut from: R, mut to: W) {
// TODO: use sendfile?
let mut buf = [0u8; 4096];
loop {
// Ignore errors
match from.read(&mut buf) {
Ok(0) => break,
Ok(size) => {
if to.write_all(&buf[..size]).is_err() {
break;
}
}
Err(_) => break,
}
}
// Just in case...
let _ = to.flush();
}
let from = self.buf_redir.take().unwrap().into_inner();
// Ignore errors.
if self.is_stdout {
let stdout = io::stdout();<|fim▁hole|> let stderr = io::stderr();
read_into(from, stderr.lock());
}
}
}<|fim▁end|> | read_into(from, stdout.lock());
} else { |
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import<|fim▁hole|>import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'assembly.settings.local')
app = Celery('assembly')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
)<|fim▁end|> | |
<|file_name|>PartContextMenu.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012-2015 S-Core Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Constructor
* PartContextMenu
*
* @see
* @since: 2015.07.15
* @author: [email protected]
*/
// @formatter:off
define([
'webida-lib/util/genetic',
'webida-lib/util/logger/logger-client'
], function (
genetic,
Logger
) {
'use strict';
// @formatter:on
/**
* @typedef {Object} Thenable
*/
var logger = new Logger();
//logger.setConfig('level', Logger.LEVELS.log);
//logger.off();
function PartContextMenu(allItems, part) {
logger.info('new PartContextMenu(allItems, part)');
this.setAllItems(allItems);
this.setPart(part);
}
genetic.inherits(PartContextMenu, Object, {
/**
* Creates Available Menu Items then return Thenable
* @return {Thenable}
* @abstract
*/
getAvailableItems: function () {
throw new Error('getAvailableItems() should be implemented by ' + this.constructor.name);
},
/**
* @param {Object}
*/
setAllItems: function (allItems) {
this.allItems = allItems;
},
/**
* @return {Object}
*/
getAllItems: function () {
return this.allItems;
},
/**
* @param {Part}
*/
setPart: function (part) {<|fim▁hole|> this.part = part;
},
/**
* @return {Part}
*/
getPart: function () {
return this.part;
},
/**
* Convenient method
* @return {PartRegistry}
*/
getPartRegistry: function () {
var workbench = require('webida-lib/plugins/workbench/plugin');
var page = workbench.getCurrentPage();
return page.getPartRegistry();
}
});
return PartContextMenu;
});<|fim▁end|> | |
<|file_name|>O2AInterfaceExample.java<|end_file_name|><|fim▁begin|>/*****************************************************************
<|fim▁hole|>
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package examples.O2AInterface;
import jade.core.Runtime;
import jade.core.Profile;
import jade.core.ProfileImpl;
import jade.wrapper.*;
/**
* This class shows an example of how to run JADE as a library from an external program
* and in particular how to start an agent and interact with it by means of the
* Object-to-Agent (O2A) interface.
*
* @author Giovanni Iavarone - Michele Izzo
*/
public class O2AInterfaceExample {
public static void main(String[] args) throws StaleProxyException, InterruptedException {
// Get a hold to the JADE runtime
Runtime rt = Runtime.instance();
// Launch the Main Container (with the administration GUI on top) listening on port 8888
System.out.println(">>>>>>>>>>>>>>> Launching the platform Main Container...");
Profile pMain = new ProfileImpl(null, 8888, null);
pMain.setParameter(Profile.GUI, "true");
ContainerController mainCtrl = rt.createMainContainer(pMain);
// Create and start an agent of class CounterAgent
System.out.println(">>>>>>>>>>>>>>> Starting up a CounterAgent...");
AgentController agentCtrl = mainCtrl.createNewAgent("CounterAgent", CounterAgent.class.getName(), new Object[0]);
agentCtrl.start();
// Wait a bit
System.out.println(">>>>>>>>>>>>>>> Wait a bit...");
Thread.sleep(10000);
try {
// Retrieve O2A interface CounterManager1 exposed by the agent to make it activate the counter
System.out.println(">>>>>>>>>>>>>>> Activate counter");
CounterManager1 o2a1 = agentCtrl.getO2AInterface(CounterManager1.class);
o2a1.activateCounter();
// Wait a bit
System.out.println(">>>>>>>>>>>>>>> Wait a bit...");
Thread.sleep(30000);
// Retrieve O2A interface CounterManager2 exposed by the agent to make it de-activate the counter
System.out.println(">>>>>>>>>>>>>>> Deactivate counter");
CounterManager2 o2a2 = agentCtrl.getO2AInterface(CounterManager2.class);
o2a2.deactivateCounter();
}
catch (StaleProxyException e) {
e.printStackTrace();
}
}
}<|fim▁end|> | JADE - Java Agent DEvelopment Framework is a framework to develop
multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
|
<|file_name|>event_details_converter.py<|end_file_name|><|fim▁begin|>from collections import defaultdict<|fim▁hole|>
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import TeamKey
from backend.common.queries.dict_converters.converter_base import ConverterBase
EventDetailsDict = NewType("EventDetailsDict", Dict)
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 3,
}
@classmethod
def _convert_list(cls, model_list: List[EventDetails], version: ApiMajorVersion):
CONVERTERS = {
3: cls.eventsDetailsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def eventsDetailsConverter_v3(cls, event_details: List[EventDetails]):
return list(map(cls.eventDetailsConverter_v3, event_details))
@classmethod
def eventDetailsConverter_v3(cls, event_details: EventDetails) -> EventDetailsDict:
normalized_oprs = defaultdict(dict)
if event_details and event_details.matchstats:
for stat_type, stats in event_details.matchstats.items():
if stat_type in {"oprs", "dprs", "ccwms"}:
for team, value in cast(Dict[TeamKey, float], stats).items():
if "frc" not in team: # Normalize output
team = "frc{}".format(team)
normalized_oprs[stat_type][team] = value
rankings = {}
if event_details:
rankings = event_details.renderable_rankings
else:
rankings = {
"extra_stats_info": [],
"rankings": [],
"sort_order_info": None,
}
event_details_dict = {
"alliances": event_details.alliance_selections if event_details else [],
"district_points": event_details.district_points if event_details else {},
"insights": event_details.insights
if event_details
else {"qual": {}, "playoff": {}},
"oprs": normalized_oprs if normalized_oprs else {}, # OPRs, DPRs, CCWMs
"predictions": event_details.predictions if event_details else {},
"rankings": rankings,
}
return EventDetailsDict(event_details_dict)<|fim▁end|> | from typing import cast, Dict, List, NewType |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import render_template
from . import auth
<|fim▁hole|><|fim▁end|> |
@auth.route('/login')
def login():
return render_template('auth/login.html') |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
mod checker;
#[cfg(fbcode_build)]
mod facebook;
mod identity;
mod membership;
#[cfg(not(fbcode_build))]
mod oss;
<|fim▁hole|>pub use membership::{
ArcMembershipChecker, BoxMembershipChecker, MembershipChecker, MembershipCheckerBuilder,
};<|fim▁end|> | pub use checker::{
ArcPermissionChecker, BoxPermissionChecker, PermissionChecker, PermissionCheckerBuilder,
};
pub use identity::{MononokeIdentity, MononokeIdentitySet, MononokeIdentitySetExt}; |
<|file_name|>row.rs<|end_file_name|><|fim▁begin|>use blas::{Copy, Gemv};
use Forward;
use onezero::{One, Zero};
use ops::{set, self};
use traits::Transpose as _0;
use traits::{Matrix, Set, Slice, SliceMut};
use {Product, RowMut, Row, RowVec, Scaled, SubMat, Transposed};
// NOTE Core
impl<'a, T> Set<T> for RowMut<'a, T> where T: Copy {
fn set(&mut self, value: T) {
let RowMut(Row(ref mut y)) = *self;
let ref x = value;
set::strided(x, y)
}
}
// NOTE Core
impl<'a, 'b, T> Set<Row<'a, T>> for RowMut<'b, T> where T: Copy {
fn set(&mut self, rhs: Row<T>) {
unsafe {
assert_eq!(self.ncols(), rhs.ncols());
let RowMut(Row(ref mut y)) = *self;
let Row(ref x) = rhs;
ops::copy_strided(x, y)
}
}
}
// NOTE Secondary
impl<'a, 'b, 'c, T> Set<Scaled<Product<Row<'b, T>, SubMat<'a, T>>>> for RowMut<'c, T> where
T: Gemv + Zero,
{<|fim▁hole|> self.slice_mut(..).t().set(rhs.t())
}
}
// NOTE Secondary
impl<'a, 'b, 'c, T>
Set<Scaled<Product<Row<'b, T>, Transposed<SubMat<'a, T>>>>> for RowMut<'c, T> where
T: Gemv + Zero,
{
fn set(&mut self, rhs: Scaled<Product<Row<T>, Transposed<SubMat<T>>>>) {
self.slice_mut(..).t().set(rhs.t())
}
}
// NOTE Secondary
impl<'a, 'b, 'c, T> Set<Product<Row<'b, T>, SubMat<'a, T>>> for RowMut<'c, T> where
T: Gemv + One + Zero,
{
fn set(&mut self, rhs: Product<Row<T>, SubMat<T>>) {
self.set(Scaled(T::one(), rhs))
}
}
// NOTE Secondary
impl<'a, 'b, 'c, T> Set<Product<Row<'b, T>, Transposed<SubMat<'a, T>>>> for RowMut<'c, T> where
T: Gemv + One + Zero,
{
fn set(&mut self, rhs: Product<Row<T>, Transposed<SubMat<T>>>) {
self.set(Scaled(T::one(), rhs))
}
}
// NOTE Forward
impl<T> Set<T> for RowVec<T> where T: Copy {
fn set(&mut self, value: T) {
self.slice_mut(..).set(value)
}
}
macro_rules! forward {
($lhs:ty { $($rhs:ty { $($bound:ident),+ }),+, }) => {
$(
// NOTE Forward
impl<'a, 'b, 'c, T> Set<$rhs> for $lhs where $(T: $bound),+ {
fn set(&mut self, rhs: $rhs) {
self.slice_mut(..).set(rhs.slice(..))
}
}
)+
}
}
forward!(RowMut<'a, T> {
&'b RowMut<'c, T> { Copy },
&'b RowVec<T> { Copy },
});
forward!(RowVec<T> {
&'a RowMut<'b, T> { Copy },
&'a RowVec<T> { Copy },
Product<Row<'a, T>, SubMat<'b, T>> { Gemv, One, Zero },
Product<Row<'a, T>, Transposed<SubMat<'b, T>>> { Gemv, One, Zero },
Scaled<Product<Row<'a, T>, SubMat<'b, T>>> { Gemv, Zero },
Scaled<Product<Row<'a, T>, Transposed<SubMat<'b, T>>>> { Gemv, Zero },
});<|fim▁end|> | fn set(&mut self, rhs: Scaled<Product<Row<T>, SubMat<T>>>) { |
<|file_name|>container_resize_test.go<|end_file_name|><|fim▁begin|>package client // import "github.com/docker/docker/client"
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"net/http"
"strings"
"testing"
"github.com/docker/docker/api/types"
)
func TestContainerResizeError(t *testing.T) {
client := &Client{
client: newMockClient(errorMock(http.StatusInternalServerError, "Server error")),
}
err := client.ContainerResize(context.Background(), "container_id", types.ResizeOptions{})
if err == nil || err.Error() != "Error response from daemon: Server error" {
t.Fatalf("expected a Server Error, got %v", err)
}
}
func TestContainerExecResizeError(t *testing.T) {
client := &Client{
client: newMockClient(errorMock(http.StatusInternalServerError, "Server error")),
}
err := client.ContainerExecResize(context.Background(), "exec_id", types.ResizeOptions{})
if err == nil || err.Error() != "Error response from daemon: Server error" {
t.Fatalf("expected a Server Error, got %v", err)
}
}
func TestContainerResize(t *testing.T) {
client := &Client{
client: newMockClient(resizeTransport("/containers/container_id/resize")),
}
err := client.ContainerResize(context.Background(), "container_id", types.ResizeOptions{
Height: 500,
Width: 600,
})
if err != nil {
t.Fatal(err)
}
}
func TestContainerExecResize(t *testing.T) {
client := &Client{
client: newMockClient(resizeTransport("/exec/exec_id/resize")),
}
err := client.ContainerExecResize(context.Background(), "exec_id", types.ResizeOptions{
Height: 500,
Width: 600,
})
if err != nil {
t.Fatal(err)
}
}
func resizeTransport(expectedURL string) func(req *http.Request) (*http.Response, error) {
return func(req *http.Request) (*http.Response, error) {
if !strings.HasPrefix(req.URL.Path, expectedURL) {
return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL)
}
query := req.URL.Query()
h := query.Get("h")
if h != "500" {
return nil, fmt.Errorf("h not set in URL query properly. Expected '500', got %s", h)
}
w := query.Get("w")
if w != "600" {
return nil, fmt.Errorf("w not set in URL query properly. Expected '600', got %s", w)
}
return &http.Response{<|fim▁hole|> StatusCode: http.StatusOK,
Body: ioutil.NopCloser(bytes.NewReader([]byte(""))),
}, nil
}
}<|fim▁end|> | |
<|file_name|>interrupt.rs<|end_file_name|><|fim▁begin|>use alloc::boxed::Box;
use collections::string::ToString;<|fim▁hole|>
use system::error::Result;
pub struct InterruptScheme;
static IRQ_NAME: [&'static str; 16] = [
"Programmable Interval Timer",
"Keyboard",
"Cascade",
"Serial 2 and 4",
"Serial 1 and 3",
"Parallel 2",
"Floppy",
"Parallel 1",
"Realtime Clock",
"PCI 1",
"PCI 2",
"PCI 3",
"Mouse",
"Coprocessor",
"IDE Primary",
"IDE Secondary",
];
impl KScheme for InterruptScheme {
fn scheme(&self) -> &str {
"interrupt"
}
fn open(&mut self, _: Url, _: usize) -> Result<Box<Resource>> {
let mut string = format!("{:<6}{:<16}{}\n", "INT", "COUNT", "DESCRIPTION");
{
let interrupts = unsafe { &mut *::env().interrupts.get() };
for interrupt in 0..interrupts.len() {
let count = interrupts[interrupt];
if count > 0 {
let description = match interrupt {
i @ 0x20 ... 0x30 => IRQ_NAME[i - 0x20],
0x80 => "System Call",
0x0 => "Divide by zero exception",
0x1 => "Debug exception",
0x2 => "Non-maskable interrupt",
0x3 => "Breakpoint exception",
0x4 => "Overflow exception",
0x5 => "Bound range exceeded exception",
0x6 => "Invalid opcode exception",
0x7 => "Device not available exception",
0x8 => "Double fault",
0xA => "Invalid TSS exception",
0xB => "Segment not present exception",
0xC => "Stack-segment fault",
0xD => "General protection fault",
0xE => "Page fault",
0x10 => "x87 floating-point exception",
0x11 => "Alignment check exception",
0x12 => "Machine check exception",
0x13 => "SIMD floating-point exception",
0x14 => "Virtualization exception",
0x1E => "Security exception",
_ => "Unknown Interrupt",
};
string.push_str(&format!("{:<6X}{:<16}{}\n", interrupt, count, description));
}
}
}
Ok(box VecResource::new("interrupt:".to_string(), string.into_bytes()))
}
}<|fim▁end|> |
use fs::{KScheme, Resource, Url, VecResource}; |
<|file_name|>drive.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0<Paste>
use super::super::VmmAction;
use crate::parsed_request::{checked_id, Error, ParsedRequest};
use crate::request::{Body, StatusCode};
use logger::{IncMetric, METRICS};
use vmm::vmm_config::drive::{BlockDeviceConfig, BlockDeviceUpdateConfig};
pub(crate) fn parse_put_drive(
body: &Body,
id_from_path: Option<&&str>,
) -> Result<ParsedRequest, Error> {
METRICS.put_api_requests.drive_count.inc();
let id = if let Some(id) = id_from_path {
checked_id(id)?
} else {
METRICS.put_api_requests.drive_fails.inc();
return Err(Error::EmptyID);
};
let device_cfg = serde_json::from_slice::<BlockDeviceConfig>(body.raw()).map_err(|e| {
METRICS.put_api_requests.drive_fails.inc();
Error::SerdeJson(e)
})?;
if id != device_cfg.drive_id {
METRICS.put_api_requests.drive_fails.inc();
Err(Error::Generic(
StatusCode::BadRequest,
"The id from the path does not match the id from the body!".to_string(),
))
} else {
Ok(ParsedRequest::new_sync(VmmAction::InsertBlockDevice(
device_cfg,
)))
}
}
pub(crate) fn parse_patch_drive(
body: &Body,
id_from_path: Option<&&str>,
) -> Result<ParsedRequest, Error> {
METRICS.patch_api_requests.drive_count.inc();
let id = if let Some(id) = id_from_path {
checked_id(id)?
} else {
METRICS.patch_api_requests.drive_fails.inc();
return Err(Error::EmptyID);
};
let block_device_update_cfg: BlockDeviceUpdateConfig =
serde_json::from_slice::<BlockDeviceUpdateConfig>(body.raw()).map_err(|e| {
METRICS.patch_api_requests.drive_fails.inc();
Error::SerdeJson(e)
})?;
if id != block_device_update_cfg.drive_id {
METRICS.patch_api_requests.drive_fails.inc();
return Err(Error::Generic(
StatusCode::BadRequest,
String::from("The id from the path does not match the id from the body!"),
));
}
// Validate request - we need to have at least one parameter set:
// - path_on_host
// - rate_limiter
if block_device_update_cfg.path_on_host.is_none()
&& block_device_update_cfg.rate_limiter.is_none()
{
METRICS.patch_api_requests.drive_fails.inc();
return Err(Error::Generic(
StatusCode::BadRequest,
String::from(
"Please specify at least one property to patch: path_on_host, rate_limiter.",
),
));
}
Ok(ParsedRequest::new_sync(VmmAction::UpdateBlockDevice(
block_device_update_cfg,
)))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsed_request::tests::vmm_action_from_request;
#[test]
fn test_parse_patch_drive_request() {
assert!(parse_patch_drive(&Body::new("invalid_payload"), None).is_err());
assert!(parse_patch_drive(&Body::new("invalid_payload"), Some(&"id")).is_err());
// PATCH with invalid fields.
let body = r#"{
"drive_id": "bar",<|fim▁hole|> "is_read_only": false
}"#;
assert!(parse_patch_drive(&Body::new(body), Some(&"2")).is_err());
// PATCH with invalid types on fields. Adding a drive_id as number instead of string.
let body = r#"{
"drive_id": 1000,
"path_on_host": "dummy"
}"#;
let res = parse_patch_drive(&Body::new(body), Some(&"1000"));
assert!(res.is_err());
// PATCH with invalid types on fields. Adding a path_on_host as bool instead of string.
let body = r#"{
"drive_id": 1000,
"path_on_host": true
}"#;
let res = parse_patch_drive(&Body::new(body), Some(&"1000"));
assert!(res.is_err());
// PATCH with missing path_on_host field.
let body = r#"{
"drive_id": "dummy_id"
}"#;
let res = parse_patch_drive(&Body::new(body), Some(&"dummy_id"));
assert!(res.is_err());
// PATCH with missing drive_id field.
let body = r#"{
"path_on_host": true
}"#;
let res = parse_patch_drive(&Body::new(body), Some(&"1000"));
assert!(res.is_err());
// PATCH that tries to update something else other than path_on_host.
let body = r#"{
"drive_id": "dummy_id",
"path_on_host": "dummy_host",
"is_read_only": false
}"#;
let res = parse_patch_drive(&Body::new(body), Some(&"1234"));
assert!(res.is_err());
// PATCH with payload that is not a json.
let body = r#"{
"fields": "dummy_field"
}"#;
assert!(parse_patch_drive(&Body::new(body), Some(&"1234")).is_err());
let body = r#"{
"drive_id": "foo",
"path_on_host": "dummy"
}"#;
#[allow(clippy::match_wild_err_arm)]
match vmm_action_from_request(parse_patch_drive(&Body::new(body), Some(&"foo")).unwrap()) {
VmmAction::UpdateBlockDevice(cfg) => {
assert_eq!(cfg.drive_id, "foo".to_string());
assert_eq!(cfg.path_on_host.unwrap(), "dummy".to_string());
}
_ => panic!("Test failed: Invalid parameters"),
};
let body = r#"{
"drive_id": "foo",
"path_on_host": "dummy"
}"#;
// Must fail since the drive id differs from id_from_path (foo vs bar).
assert!(parse_patch_drive(&Body::new(body), Some(&"bar")).is_err());
let body = r#"{
"drive_id": "foo",
"rate_limiter": {
"bandwidth": {
"size": 5000,
"refill_time": 100
},
"ops": {
"size": 500,
"refill_time": 100
}
}
}"#;
// Validate that updating just the ratelimiter works.
assert!(parse_patch_drive(&Body::new(body), Some(&"foo")).is_ok());
let body = r#"{
"drive_id": "foo",
"path_on_host": "/there",
"rate_limiter": {
"bandwidth": {
"size": 5000,
"refill_time": 100
},
"ops": {
"size": 500,
"refill_time": 100
}
}
}"#;
// Validate that updating both path and rate limiter succeds.
assert!(parse_patch_drive(&Body::new(body), Some(&"foo")).is_ok());
let body = r#"{
"drive_id": "foo",
"path_on_host": "/there",
"rate_limiter": {
"ops": {
"size": 100
}
}
}"#;
// Validate that parse_patch_drive fails for invalid rate limiter cfg.
assert!(parse_patch_drive(&Body::new(body), Some(&"foo")).is_err());
}
#[test]
fn test_parse_put_drive_request() {
assert!(parse_put_drive(&Body::new("invalid_payload"), None).is_err());
assert!(parse_put_drive(&Body::new("invalid_payload"), Some(&"id")).is_err());
// PUT with invalid fields.
let body = r#"{
"drive_id": "bar",
"is_read_only": false
}"#;
assert!(parse_put_drive(&Body::new(body), Some(&"2")).is_err());
// PUT with missing all optional fields.
let body = r#"{
"drive_id": "1000",
"path_on_host": "dummy",
"is_root_device": true,
"is_read_only": true
}"#;
assert!(parse_put_drive(&Body::new(body), Some(&"1000")).is_ok());
// PUT with invalid types on fields. Adding a drive_id as number instead of string.
assert!(parse_put_drive(&Body::new(body), Some(&"foo")).is_err());
// PUT with the complete configuration.
let body = r#"{
"drive_id": "1000",
"path_on_host": "dummy",
"is_root_device": true,
"partuuid": "string",
"is_read_only": true,
"cache_type": "Unsafe",
"io_engine": "Sync",
"rate_limiter": {
"bandwidth": {
"size": 0,
"one_time_burst": 0,
"refill_time": 0
},
"ops": {
"size": 0,
"one_time_burst": 0,
"refill_time": 0
}
}
}"#;
assert!(parse_put_drive(&Body::new(body), Some(&"1000")).is_ok());
}
}<|fim▁end|> | |
<|file_name|>stdio.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! Non-blocking access to stdin, stdout, and stderr.
This module provides bindings to the local event loop's TTY interface, using it
to offer synchronous but non-blocking versions of stdio. These handles can be
inspected for information about terminal dimensions or for related information
about the stream or terminal to which it is attached.
# Example
```rust
# #![allow(unused_must_use)]
use std::io;
let mut out = io::stdout();
out.write(b"Hello, world!");
```
*/
use failure::local_stderr;
use fmt;
use io::{Reader, Writer, IoResult, IoError, OtherIoError,
standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
use iter::Iterator;
use kinds::Send;
use libc;
use option::{Option, Some, None};
use boxed::Box;
use result::{Ok, Err};
use rt;
use rt::local::Local;
use rt::task::Task;
use rt::rtio::{DontClose, IoFactory, LocalIo, RtioFileStream, RtioTTY};
use slice::ImmutableSlice;
use str::StrSlice;
use uint;
// And so begins the tale of acquiring a uv handle to a stdio stream on all
// platforms in all situations. Our story begins by splitting the world into two
// categories, windows and unix. Then one day the creators of unix said let
// there be redirection! And henceforth there was redirection away from the
// console for standard I/O streams.
//
// After this day, the world split into four factions:
//
// 1. Unix with stdout on a terminal.
// 2. Unix with stdout redirected.
// 3. Windows with stdout on a terminal.
// 4. Windows with stdout redirected.
//
// Many years passed, and then one day the nation of libuv decided to unify this
// world. After months of toiling, uv created three ideas: TTY, Pipe, File.
// These three ideas propagated throughout the lands and the four great factions
// decided to settle among them.
//
// The groups of 1, 2, and 3 all worked very hard towards the idea of TTY. Upon
// doing so, they even enhanced themselves further then their Pipe/File
// brethren, becoming the dominant powers.
//
// The group of 4, however, decided to work independently. They abandoned the
// common TTY belief throughout, and even abandoned the fledgling Pipe belief.
// The members of the 4th faction decided to only align themselves with File.
//
// tl;dr; TTY works on everything but when windows stdout is redirected, in that
// case pipe also doesn't work, but magically file does!
enum StdSource {
TTY(Box<RtioTTY + Send>),
File(Box<RtioFileStream + Send>),
}
fn src<T>(fd: libc::c_int, readable: bool, f: |StdSource| -> T) -> T {
LocalIo::maybe_raise(|io| {
Ok(match io.tty_open(fd, readable) {
Ok(tty) => f(TTY(tty)),
Err(_) => f(File(io.fs_from_raw_fd(fd, DontClose))),
})
}).map_err(IoError::from_rtio_error).unwrap()
}
local_data_key!(local_stdout: Box<Writer + Send>)
/// Creates a new non-blocking handle to the stdin of the current process.
///
/// The returned handled is buffered by default with a `BufferedReader`. If
/// buffered access is not desired, the `stdin_raw` function is provided to
/// provided unbuffered access to stdin.
///
/// Care should be taken when creating multiple handles to the stdin of a
/// process. Because this is a buffered reader by default, it's possible for
/// pending input to be unconsumed in one reader and unavailable to other
/// readers. It is recommended that only one handle at a time is created for the
/// stdin of a process.
///
/// See `stdout()` for more notes about this function.
pub fn stdin() -> BufferedReader<StdReader> {
// The default buffer capacity is 64k, but apparently windows doesn't like
// 64k reads on stdin. See #13304 for details, but the idea is that on
// windows we use a slightly smaller buffer that's been seen to be
// acceptable.
if cfg!(windows) {
BufferedReader::with_capacity(8 * 1024, stdin_raw())
} else {
BufferedReader::new(stdin_raw())
}
}
/// Creates a new non-blocking handle to the stdin of the current process.
///
/// Unlike `stdin()`, the returned reader is *not* a buffered reader.
///
/// See `stdout()` for more notes about this function.
pub fn stdin_raw() -> StdReader {
src(libc::STDIN_FILENO, true, |src| StdReader { inner: src })
}
/// Creates a line-buffered handle to the stdout of the current process.
///
/// Note that this is a fairly expensive operation in that at least one memory
/// allocation is performed. Additionally, this must be called from a runtime
/// task context because the stream returned will be a non-blocking object using
/// the local scheduler to perform the I/O.
///
/// Care should be taken when creating multiple handles to an output stream for
/// a single process. While usage is still safe, the output may be surprising if
/// no synchronization is performed to ensure a sane output.
pub fn stdout() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stdout_raw())
}
/// Creates an unbuffered handle to the stdout of the current process
///
/// See notes in `stdout()` for more information.
pub fn stdout_raw() -> StdWriter {
src(libc::STDOUT_FILENO, false, |src| StdWriter { inner: src })
}
/// Creates a line-buffered handle to the stderr of the current process.
///
/// See `stdout()` for notes about this function.
pub fn stderr() -> LineBufferedWriter<StdWriter> {
LineBufferedWriter::new(stderr_raw())
}
/// Creates an unbuffered handle to the stderr of the current process
///
/// See notes in `stdout()` for more information.
pub fn stderr_raw() -> StdWriter {
src(libc::STDERR_FILENO, false, |src| StdWriter { inner: src })
}
/// Resets the task-local stdout handle to the specified writer
///
/// This will replace the current task's stdout handle, returning the old
/// handle. All future calls to `print` and friends will emit their output to
/// this specified handle.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stdout stream.
pub fn set_stdout(stdout: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
local_stdout.replace(Some(stdout)).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
/// Resets the task-local stderr handle to the specified writer
///
/// This will replace the current task's stderr handle, returning the old
/// handle. Currently, the stderr handle is used for printing failure messages
/// during task failure.
///
/// Note that this does not need to be called for all new tasks; the default
/// output handle is to the process's stderr stream.
pub fn set_stderr(stderr: Box<Writer + Send>) -> Option<Box<Writer + Send>> {
local_stderr.replace(Some(stderr)).and_then(|mut s| {
let _ = s.flush();
Some(s)
})
}
// Helper to access the local task's stdout handle
//
// Note that this is not a safe function to expose because you can create an
// aliased pointer very easily:
//
// with_task_stdout(|io1| {
// with_task_stdout(|io2| {
// // io1 aliases io2
// })
// })
fn with_task_stdout(f: |&mut Writer| -> IoResult<()>) {
let result = if Local::exists(None::<Task>) {
let mut my_stdout = local_stdout.replace(None).unwrap_or_else(|| {
box stdout() as Box<Writer + Send>
});
let result = f(&mut *my_stdout);
local_stdout.replace(Some(my_stdout));
result
} else {
let mut io = rt::Stdout;
f(&mut io as &mut Writer)
};
match result {
Ok(()) => {}
Err(e) => fail!("failed printing to stdout: {}", e),
}
}
/// Flushes the local task's stdout handle.
///
/// By default, this stream is a line-buffering stream, so flushing may be
/// necessary to ensure that all output is printed to the screen (if there are
/// no newlines printed).<|fim▁hole|>/// messages are always terminated in a newline (no need to flush).
pub fn flush() {
with_task_stdout(|io| io.flush())
}
/// Prints a string to the stdout of the current process. No newline is emitted
/// after the string is printed.
pub fn print(s: &str) {
with_task_stdout(|io| io.write(s.as_bytes()))
}
/// Prints a string to the stdout of the current process. A literal
/// `\n` character is printed to the console after the string.
pub fn println(s: &str) {
with_task_stdout(|io| {
io.write(s.as_bytes()).and_then(|()| io.write([b'\n']))
})
}
/// Similar to `print`, but takes a `fmt::Arguments` structure to be compatible
/// with the `format_args!` macro.
pub fn print_args(fmt: &fmt::Arguments) {
with_task_stdout(|io| write!(io, "{}", fmt))
}
/// Similar to `println`, but takes a `fmt::Arguments` structure to be
/// compatible with the `format_args!` macro.
pub fn println_args(fmt: &fmt::Arguments) {
with_task_stdout(|io| writeln!(io, "{}", fmt))
}
/// Representation of a reader of a standard input stream
pub struct StdReader {
inner: StdSource
}
impl StdReader {
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Reader for StdReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let ret = match self.inner {
TTY(ref mut tty) => {
// Flush the task-local stdout so that weird issues like a
// print!'d prompt not being shown until after the user hits
// enter.
flush();
tty.read(buf)
},
File(ref mut file) => file.read(buf).map(|i| i as uint),
}.map_err(IoError::from_rtio_error);
match ret {
// When reading a piped stdin, libuv will return 0-length reads when
// stdin reaches EOF. For pretty much all other streams it will
// return an actual EOF error, but apparently for stdin it's a
// little different. Hence, here we convert a 0 length read to an
// end-of-file indicator so the caller knows to stop reading.
Ok(0) => { Err(standard_error(EndOfFile)) }
ret @ Ok(..) | ret @ Err(..) => ret,
}
}
}
/// Representation of a writer to a standard output stream
pub struct StdWriter {
inner: StdSource
}
impl StdWriter {
/// Gets the size of this output window, if possible. This is typically used
/// when the writer is attached to something like a terminal, this is used
/// to fetch the dimensions of the terminal.
///
/// If successful, returns `Ok((width, height))`.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn winsize(&mut self) -> IoResult<(int, int)> {
match self.inner {
TTY(ref mut tty) => {
tty.get_winsize().map_err(IoError::from_rtio_error)
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Controls whether this output stream is a "raw stream" or simply a normal
/// stream.
///
/// # Error
///
/// This function will return an error if the output stream is not actually
/// connected to a TTY instance, or if querying the TTY instance fails.
pub fn set_raw(&mut self, raw: bool) -> IoResult<()> {
match self.inner {
TTY(ref mut tty) => {
tty.set_raw(raw).map_err(IoError::from_rtio_error)
}
File(..) => {
Err(IoError {
kind: OtherIoError,
desc: "stream is not a tty",
detail: None,
})
}
}
}
/// Returns whether this stream is attached to a TTY instance or not.
pub fn isatty(&self) -> bool {
match self.inner {
TTY(..) => true,
File(..) => false,
}
}
}
impl Writer for StdWriter {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, chunk the output
// buffer on windows, but on unix we can just write the whole buffer all
// at once.
let max_size = if cfg!(windows) {64 * 1024} else {uint::MAX};
for chunk in buf.chunks(max_size) {
try!(match self.inner {
TTY(ref mut tty) => tty.write(chunk),
File(ref mut file) => file.write(chunk),
}.map_err(IoError::from_rtio_error))
}
Ok(())
}
}
#[cfg(test)]
mod tests {
iotest!(fn smoke() {
// Just make sure we can acquire handles
stdin();
stdout();
stderr();
})
iotest!(fn capture_stdout() {
use io::{ChanReader, ChanWriter};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
spawn(proc() {
set_stdout(box w);
println!("hello!");
});
assert_eq!(r.read_to_string().unwrap(), "hello!\n".to_string());
})
iotest!(fn capture_stderr() {
use realstd::comm::channel;
use realstd::io::{Writer, ChanReader, ChanWriter, Reader};
let (tx, rx) = channel();
let (mut r, w) = (ChanReader::new(rx), ChanWriter::new(tx));
spawn(proc() {
::realstd::io::stdio::set_stderr(box w);
fail!("my special message");
});
let s = r.read_to_string().unwrap();
assert!(s.as_slice().contains("my special message"));
})
}<|fim▁end|> | ///
/// Note that logging macros do not use this stream. Using the logging macros
/// will emit output to stderr, and while they are line buffered the log |
<|file_name|>convolutional.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
import theano
import theano.tensor as T
from theano.tensor.signal import downsample
from .. import activations, initializations
from ..utils.theano_utils import shared_zeros
from ..layers.core import Layer
class Convolution1D(Layer):
def __init__(self, nb_filter, stack_size, filter_length,
init='uniform', activation='linear', weights=None,
border_mode='valid', subsample_length=1,
W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None):
nb_row = 1
nb_col = filter_length
self.nb_filter = nb_filter
self.stack_size = stack_size
self.filter_length = filter_length
self.subsample_length = subsample_length
self.init = initializations.get(init)
self.activation = activations.get(activation)
self.subsample = (1, subsample_length)
self.border_mode = border_mode
self.input = T.tensor4()
self.W_shape = (nb_filter, stack_size, nb_row, nb_col)
self.W = self.init(self.W_shape)
self.b = shared_zeros((nb_filter,))
self.params = [self.W, self.b]
self.regularizers = []
if W_regularizer:<|fim▁hole|> if b_regularizer:
b_regularizer.set_param(self.b)
self.regularizers.append(b_regularizer)
if activity_regularizer:
activity_regularizer.set_layer(self)
self.regularizers.append(activity_regularizer)
self.constraints = [W_constraint, b_constraint]
if weights is not None:
self.set_weights(weights)
def get_output(self, train):
X = self.get_input(train)
conv_out = theano.tensor.nnet.conv.conv2d(X, self.W,
border_mode=self.border_mode, subsample=self.subsample)
output = self.activation(conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
return output
def get_config(self):
return {"name":self.__class__.__name__,
"nb_filter":self.nb_filter,
"stack_size":self.stack_size,
"filter_length":self.filter_length,
"init":self.init.__name__,
"activation":self.activation.__name__,
"border_mode":self.border_mode,
"subsample_length":self.subsample_length}
class MaxPooling1D(Layer):
def __init__(self, pool_length=2, ignore_border=True):
self.pool_length = pool_length
self.poolsize = (1, pool_length)
self.ignore_border = ignore_border
self.input = T.tensor4()
self.params = []
def get_output(self, train):
X = self.get_input(train)
output = downsample.max_pool_2d(X, self.poolsize, ignore_border=self.ignore_border)
return output
def get_config(self):
return {"name":self.__class__.__name__,
"pool_length":self.pool_length,
"ignore_border":self.ignore_border}
class Convolution2D(Layer):
def __init__(self, nb_filter, stack_size, nb_row, nb_col,
init='glorot_uniform', activation='linear', weights=None,
border_mode='valid', subsample=(1, 1),
W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None):
super(Convolution2D,self).__init__()
self.init = initializations.get(init)
self.activation = activations.get(activation)
self.subsample = subsample
self.border_mode = border_mode
self.nb_filter = nb_filter
self.stack_size = stack_size
self.nb_row = nb_row
self.nb_col = nb_col
self.input = T.tensor4()
self.W_shape = (nb_filter, stack_size, nb_row, nb_col)
self.W = self.init(self.W_shape)
self.b = shared_zeros((nb_filter,))
self.params = [self.W, self.b]
self.regularizers = []
if W_regularizer:
W_regularizer.set_param(self.W)
self.regularizers.append(W_regularizer)
if b_regularizer:
b_regularizer.set_param(self.b)
self.regularizers.append(b_regularizer)
if activity_regularizer:
activity_regularizer.set_layer(self)
self.regularizers.append(activity_regularizer)
self.constraints = [W_constraint, b_constraint]
if weights is not None:
self.set_weights(weights)
def get_output(self, train):
X = self.get_input(train)
conv_out = theano.tensor.nnet.conv.conv2d(X, self.W,
border_mode=self.border_mode, subsample=self.subsample)
output = self.activation(conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
return output
def get_config(self):
return {"name":self.__class__.__name__,
"nb_filter":self.nb_filter,
"stack_size":self.stack_size,
"nb_row":self.nb_row,
"nb_col":self.nb_col,
"init":self.init.__name__,
"activation":self.activation.__name__,
"border_mode":self.border_mode,
"subsample":self.subsample}
class MaxPooling2D(Layer):
def __init__(self, poolsize=(2, 2), ignore_border=True):
super(MaxPooling2D,self).__init__()
self.input = T.tensor4()
self.poolsize = poolsize
self.ignore_border = ignore_border
def get_output(self, train):
X = self.get_input(train)
output = downsample.max_pool_2d(X, self.poolsize, ignore_border=self.ignore_border)
return output
def get_config(self):
return {"name":self.__class__.__name__,
"poolsize":self.poolsize,
"ignore_border":self.ignore_border}
# class ZeroPadding2D(Layer): TODO
# class Convolution3D: TODO
# class MaxPooling3D: TODO<|fim▁end|> | W_regularizer.set_param(self.W)
self.regularizers.append(W_regularizer) |
<|file_name|>privatethreads.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from misago.threads.views.privatethreads import PrivateThreadsView
urlpatterns = patterns('',
url(r'^private-threads/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/$', PrivateThreadsView.as_view(), name='private_threads'),
url(r'^private-threads/sort-(?P<sort>[\w-]+)/show-(?P<show>[\w-]+)/(?P<page>\d+)/$', PrivateThreadsView.as_view(), name='private_threads'),
)
# thread view
from misago.threads.views.privatethreads import ThreadView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/$', ThreadView.as_view(), name='private_thread'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/(?P<page>\d+)/$', ThreadView.as_view(), name='private_thread'),
)
# goto views
from misago.threads.views.privatethreads import (GotoLastView, GotoNewView,
GotoPostView)
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/last/$', GotoLastView.as_view(), name='private_thread_last'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/new/$', GotoNewView.as_view(), name='private_thread_new'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/post-(?P<post_id>\d+)/$', GotoPostView.as_view(), name='private_thread_post'),
)
# reported posts views
from misago.threads.views.privatethreads import ReportedPostsListView
urlpatterns += patterns('',
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/reported-posts/$', ReportedPostsListView.as_view(), name='private_thread_reported'),
)
# participants views
from misago.threads.views.privatethreads import (ThreadParticipantsView,
EditThreadParticipantsView,
AddThreadParticipantsView,<|fim▁hole|> url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/participants/$', ThreadParticipantsView.as_view(), name='private_thread_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/edit-participants/$', EditThreadParticipantsView.as_view(), name='private_thread_edit_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/remove-participant/(?P<user_id>\d+)/$', RemoveThreadParticipantView.as_view(), name='private_thread_remove_participant'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/add-participants/$', AddThreadParticipantsView.as_view(), name='private_thread_add_participants'),
url(r'^private-thread/(?P<thread_slug>[\w\d-]+)-(?P<thread_id>\d+)/leave/$', LeaveThreadView.as_view(), name='private_thread_leave'),
)
# post views
from misago.threads.views.privatethreads import (QuotePostView, HidePostView,
UnhidePostView,
DeletePostView,
ReportPostView)
urlpatterns += patterns('',
url(r'^private-post/(?P<post_id>\d+)/quote/$', QuotePostView.as_view(), name='quote_private_post'),
url(r'^private-post/(?P<post_id>\d+)/unhide/$', UnhidePostView.as_view(), name='unhide_private_post'),
url(r'^private-post/(?P<post_id>\d+)/hide/$', HidePostView.as_view(), name='hide_private_post'),
url(r'^private-post/(?P<post_id>\d+)/delete/$', DeletePostView.as_view(), name='delete_private_post'),
url(r'^private-post/(?P<post_id>\d+)/report/$', ReportPostView.as_view(), name='report_private_post'),
)
# events view
from misago.threads.views.privatethreads import EventsView
urlpatterns += patterns('',
url(r'^edit-private-event/(?P<event_id>\d+)/$', EventsView.as_view(), name='edit_private_event'),
)
# posting views
from misago.threads.views.privatethreads import PostingView
urlpatterns += patterns('',
url(r'^start-private-thread/$', PostingView.as_view(), name='start_private_thread'),
url(r'^reply-private-thread/(?P<thread_id>\d+)/$', PostingView.as_view(), name='reply_private_thread'),
url(r'^edit-private_post/(?P<thread_id>\d+)/(?P<post_id>\d+)/edit/$', PostingView.as_view(), name='edit_private_post'),
)<|fim▁end|> | RemoveThreadParticipantView,
LeaveThreadView)
urlpatterns += patterns('', |
<|file_name|>manual.rs<|end_file_name|><|fim▁begin|>extern crate ralloc;
use std::ptr;
fn main() {
let ptr1 = ralloc::alloc(30, 3);
let ptr2 = ralloc::alloc(500, 20);
assert_eq!(0, ptr1 as usize % 3);
assert_eq!(0, ptr2 as usize % 20);
unsafe {
ptr::write_bytes(ptr1, 0x22, 30);
for i in 0..500 {
*ptr2.offset(i) = i as u8;
}
assert_eq!(*ptr1, 0x22);
assert_eq!(*ptr1.offset(5), 0x22);
assert_eq!(*ptr2, 0);
assert_eq!(*ptr2.offset(15), 15);
let ptr1 = ralloc::realloc(ptr1, 30, 300, 3);
for i in 0..300 {
*ptr1.offset(i) = i as u8;
}
assert_eq!(*ptr1, 0);
assert_eq!(*ptr1.offset(200), 200);
<|fim▁hole|> }
}<|fim▁end|> | ralloc::free(ptr1, 30);
ralloc::free(ptr2, 500); |
<|file_name|>NotFound.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
function NotFound() {
return <div>Not found!</div>;
}
<|fim▁hole|><|fim▁end|> | export default NotFound; |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class FontIsasMisc(Package):
"""X.org isas-misc font."""
homepage = "http://cgit.freedesktop.org/xorg/font/isas-misc"
url = "https://www.x.org/archive/individual/font/font-isas-misc-1.0.3.tar.gz"
version('1.0.3', 'ecc3b6fbe8f5721ddf5c7fc66f73e76f')
depends_on('font-util')
depends_on('fontconfig', type='build')
depends_on('mkfontdir', type='build')
depends_on('bdftopcf', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')
# `make install` copies the files to the font-util installation.
# Create a fake directory to convince Spack that we actually<|fim▁hole|><|fim▁end|> | # installed something.
mkdir(prefix.lib) |
<|file_name|>inner.rs<|end_file_name|><|fim▁begin|>use crate::types::*;
use ndarray::*;
/// Inner Product
///
/// Differenct from `Dot` trait, this take complex conjugate of `self` elements
///
pub trait InnerProduct {
type Elem: Scalar;
/// Inner product `(self.conjugate, rhs)
fn inner<S>(&self, rhs: &ArrayBase<S, Ix1>) -> Self::Elem
where
S: Data<Elem = Self::Elem>;
}
impl<A, S> InnerProduct for ArrayBase<S, Ix1>
where
A: Scalar,
S: Data<Elem = A>,<|fim▁hole|> assert_eq!(self.len(), rhs.len());
Zip::from(self)
.and(rhs)
.fold_while(A::zero(), |acc, s, r| {
FoldWhile::Continue(acc + s.conj() * *r)
})
.into_inner()
}
}<|fim▁end|> | {
type Elem = A;
fn inner<St: Data<Elem = A>>(&self, rhs: &ArrayBase<St, Ix1>) -> A { |
<|file_name|>cleanup_streams.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand
from stream_analysis.utils import cleanup
<|fim▁hole|> """
help = "Removes streaming data we no longer need."
def handle(self, *args, **options):
cleanup()<|fim▁end|> | class Command(BaseCommand):
"""
Removes streaming data we no longer need. |
<|file_name|>asmwriter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# convert LLVM GenAsmWriter.inc for Capstone disassembler.
# by Nguyen Anh Quynh, 2019
import sys
if len(sys.argv) == 1:
print("Syntax: %s <GenAsmWriter.inc> <Output-GenAsmWriter.inc> <Output-GenRegisterName.inc> <arch>" %sys.argv[0])
sys.exit(1)
arch = sys.argv[4]
f = open(sys.argv[1])
lines = f.readlines()
f.close()
f1 = open(sys.argv[2], 'w+')
f2 = open(sys.argv[3], 'w+')
f1.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n")
f1.write("/* By Nguyen Anh Quynh <[email protected]>, 2013-2019 */\n")
f1.write("\n")
f2.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n")
f2.write("/* By Nguyen Anh Quynh <[email protected]>, 2013-2019 */\n")
f2.write("\n")
need_endif = False
in_getRegisterName = False
in_printAliasInstr = False
fragment_no = None
skip_printing = False
skip_line = 0
skip_count = 0
def replace_getOp(line):
line2 = line
if 'MI->getOperand(0)' in line:
line2 = line.replace('MI->getOperand(0)', 'MCInst_getOperand(MI, 0)')
elif 'MI->getOperand(1)' in line:
line2 = line.replace('MI->getOperand(1)', 'MCInst_getOperand(MI, 1)')
elif 'MI->getOperand(2)' in line:
line2 = line.replace('MI->getOperand(2)', 'MCInst_getOperand(MI, 2)')
elif 'MI->getOperand(3)' in line:
line2 = line.replace('MI->getOperand(3)', 'MCInst_getOperand(MI, 3)')
elif 'MI->getOperand(4)' in line:
line2 = line.replace('MI->getOperand(4)', 'MCInst_getOperand(MI, 4)')
elif 'MI->getOperand(5)' in line:
line2 = line.replace('MI->getOperand(5)', 'MCInst_getOperand(MI, 5)')
elif 'MI->getOperand(6)' in line:
line2 = line.replace('MI->getOperand(6)', 'MCInst_getOperand(MI, 6)')
elif 'MI->getOperand(7)' in line:
line2 = line.replace('MI->getOperand(7)', 'MCInst_getOperand(MI, 7)')
elif 'MI->getOperand(8)' in line:
line2 = line.replace('MI->getOperand(8)', 'MCInst_getOperand(MI, 8)')
return line2
def replace_getReg(line):
line2 = line
if 'MI->getOperand(0).getReg()' in line:
line2 = line.replace('MI->getOperand(0).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 0))')
elif 'MI->getOperand(1).getReg()' in line:
line2 = line.replace('MI->getOperand(1).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 1))')
elif 'MI->getOperand(2).getReg()' in line:
line2 = line.replace('MI->getOperand(2).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 2))')
elif 'MI->getOperand(3).getReg()' in line:
line2 = line.replace('MI->getOperand(3).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 3))')
elif 'MI->getOperand(4).getReg()' in line:
line2 = line.replace('MI->getOperand(4).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 4))')
elif 'MI->getOperand(5).getReg()' in line:
line2 = line.replace('MI->getOperand(5).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 5))')
elif 'MI->getOperand(6).getReg()' in line:
line2 = line.replace('MI->getOperand(6).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 6))')
elif 'MI->getOperand(7).getReg()' in line:
line2 = line.replace('MI->getOperand(7).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 7))')
elif 'MI->getOperand(8).getReg()' in line:
line2 = line.replace('MI->getOperand(8).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 8))')
return line2
# extract param between text()
# MRI.getRegClass(AArch64::GPR32spRegClassID).contains(MI->getOperand(1).getReg()))
def extract_paren(line, text):
i = line.index(text)
return line[line.index('(', i)+1 : line.index(')', i)]
# extract text between <>
# printSVERegOp<'q'>
def extract_brackets(line):
if '<' in line:
return line[line.index('<')+1 : line.index('>')]
else:
return ''
# delete text between <>, including <>
# printSVERegOp<'q'>
def del_brackets(line):
if '<' in line:
return line[:line.index('<')] + line[line.index('>') + 1:]
else:
return line
def print_line(line):
line = line.replace('::', '_')
line = line.replace('nullptr', 'NULL')
if not skip_printing:
if in_getRegisterName:
f2.write(line + "\n")
else:
f1.write(line + "\n")
for line in lines:
line = line.rstrip()
#print("@", line)
# skip Alias
if arch.upper() == 'X86':
if 'PRINT_ALIAS_INSTR' in line:
# done<|fim▁hole|> if skip_line:
skip_count += 1
if skip_count <= skip_line:
# skip this line
continue
else:
# skip enough number of lines, reset counters
skip_line = 0
skip_count = 0
if "::printInstruction" in line:
if arch.upper() in ('AARCH64', 'ARM64'):
#print_line("static void printInstruction(MCInst *MI, SStream *O, MCRegisterInfo *MRI)\n{")
print_line("static void printInstruction(MCInst *MI, SStream *O)\n{")
else:
print_line("static void printInstruction(MCInst *MI, SStream *O)\n{")
elif 'const char *AArch64InstPrinter::' in line:
continue
elif 'getRegisterName(' in line:
if 'unsigned AltIdx' in line:
print_line("static const char *getRegisterName(unsigned RegNo, unsigned AltIdx)\n{")
else:
print_line("static const char *getRegisterName(unsigned RegNo)\n{")
elif 'getRegisterName' in line:
in_getRegisterName = True
print_line(line)
elif '::printAliasInstr' in line:
if arch.upper() in ('AARCH64', 'PPC'):
print_line("static char *printAliasInstr(MCInst *MI, SStream *OS, MCRegisterInfo *MRI)\n{")
print_line(' #define GETREGCLASS_CONTAIN(_class, _reg) MCRegisterClass_contains(MCRegisterInfo_getRegClass(MRI, _class), MCOperand_getReg(MCInst_getOperand(MI, _reg)))')
else:
print_line("static bool printAliasInstr(MCInst *MI, SStream *OS)\n{")
print_line(" unsigned int I = 0, OpIdx, PrintMethodIdx;")
print_line(" char *tmpString;")
in_printAliasInstr = True
elif 'STI.getFeatureBits()[' in line:
if arch.upper() == 'ARM':
line2 = line.replace('STI.getFeatureBits()[', 'ARM_getFeatureBits(MI->csh->mode, ')
elif arch.upper() == 'AARCH64':
line2 = line.replace('STI.getFeatureBits()[', 'AArch64_getFeatureBits(')
line2 = line2.replace(']', ')')
print_line(line2)
elif ', STI, ' in line:
line2 = line.replace(', STI, ', ', ')
if 'printSVELogicalImm<' in line:
if 'int16' in line:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm16')
line2 = line2.replace('<int16_t>', '')
elif 'int32' in line:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm32')
line2 = line2.replace('<int32_t>', '')
else:
line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm64')
line2 = line2.replace('<int64_t>', '')
if 'MI->getOperand(' in line:
line2 = replace_getOp(line2)
# C++ template
if 'printPrefetchOp' in line2:
param = extract_brackets(line2)
if param == '':
param = 'false'
line2 = del_brackets(line2)
line2 = line2.replace(', O);', ', O, %s);' %param)
line2 = line2.replace(', OS);', ', OS, %s);' %param)
elif '<false>' in line2:
line2 = line2.replace('<false>', '')
line2 = line2.replace(', O);', ', O, false);')
line2 = line2.replace('STI, ', '')
elif '<true>' in line:
line2 = line2.replace('<true>', '')
line2 = line2.replace(', O);', ', O, true);')
line2 = line2.replace('STI, ', '')
elif 'printAdrLabelOperand' in line:
# C++ template
if '<0>' in line:
line2 = line2.replace('<0>', '')
line2 = line2.replace(', O);', ', O, 0);')
elif '<1>' in line:
line2 = line2.replace('<1>', '')
line2 = line2.replace(', O);', ', O, 1);')
elif '<2>' in line:
line2 = line2.replace('<2>', '')
line2 = line2.replace(', O);', ', O, 2);')
elif 'printImm8OptLsl' in line2:
param = extract_brackets(line2)
line2 = del_brackets(line2)
if '8' in param or '16' in param or '32' in param:
line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl32')
elif '64' in param:
line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl64')
elif 'printLogicalImm' in line2:
param = extract_brackets(line2)
line2 = del_brackets(line2)
if '8' in param or '16' in param or '32' in param:
line2 = line2.replace('printLogicalImm', 'printLogicalImm32')
elif '64' in param:
line2 = line2.replace('printLogicalImm', 'printLogicalImm64')
elif 'printSVERegOp' in line2 or 'printGPRSeqPairsClassOperand' in line2 or 'printTypedVectorList' in line2 or 'printPostIncOperand' in line2 or 'printImmScale' in line2 or 'printRegWithShiftExtend' in line2 or 'printUImm12Offset' in line2 or 'printExactFPImm' in line2 or 'printMemExtend' in line2 or 'printZPRasFPR' in line2:
param = extract_brackets(line2)
if param == '':
param = '0'
line2 = del_brackets(line2)
line2 = line2.replace(', O);', ', O, %s);' %param)
line2 = line2.replace(', OS);', ', OS, %s);' %param)
elif 'printComplexRotationOp' in line:
# printComplexRotationOp<90, 0>(MI, 5, STI, O);
bracket_content = line2[line2.index('<') + 1 : line2.index('>')]
line2 = line2.replace('<' + bracket_content + '>', '')
line2 = line2.replace(' O);', ' O, %s);' %bracket_content)
print_line(line2)
elif "static const char AsmStrs[]" in line:
print_line("#ifndef CAPSTONE_DIET")
print_line(" static const char AsmStrs[] = {")
need_endif = True
elif "static const char AsmStrsNoRegAltName[]" in line:
print_line("#ifndef CAPSTONE_DIET")
print_line(" static const char AsmStrsNoRegAltName[] = {")
need_endif = True
elif line == ' O << "\\t";':
print_line(" unsigned int opcode = MCInst_getOpcode(MI);")
print_line(' // printf("opcode = %u\\n", opcode);');
elif 'MI->getOpcode()' in line:
if 'switch' in line:
line2 = line.replace('MI->getOpcode()', 'MCInst_getOpcode(MI)')
else:
line2 = line.replace('MI->getOpcode()', 'opcode')
print_line(line2)
elif 'O << ' in line:
if '"' in line:
line2 = line.lower()
line2 = line2.replace('o << ', 'SStream_concat0(O, ');
else:
line2 = line.replace('O << ', 'SStream_concat0(O, ');
line2 = line2.replace("'", '"')
line2 = line2.replace(';', ');')
if '" : "' in line2: # "segment : offset" in X86
line2 = line2.replace('" : "', '":"')
# ARM
print_line(line2)
if '", #0"' in line2:
print_line(' op_addImm(MI, 0);')
if '", #1"' in line2:
print_line(' op_addImm(MI, 1);')
# PowerPC
if '", 268"' in line2:
print_line(' op_addImm(MI, 268);')
elif '", 256"' in line2:
print_line(' op_addImm(MI, 256);')
elif '", 0, "' in line2 or '", 0"' in line2:
print_line(' op_addImm(MI, 0);')
elif '", -1"' in line2:
print_line(' op_addImm(MI, -1);')
if '[' in line2:
if not '[]' in line2:
print_line(' set_mem_access(MI, true);')
if ']' in line2:
if not '[]' in line2:
print_line(' set_mem_access(MI, false);')
if '".f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64);')
elif '".f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32);')
elif '".f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16);')
elif '".s64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S64);')
elif '".s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32);')
elif '".s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16);')
elif '".s8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S8);')
elif '".u64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U64);')
elif '".u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32);')
elif '".u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16);')
elif '".u8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U8);')
elif '".i64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I64);')
elif '".i32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I32);')
elif '".i16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I16);')
elif '".i8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I8);')
elif '".f16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F64);')
elif '".f64.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F16);')
elif '".f16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F32);')
elif '".f32.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F16);')
elif '".f64.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F32);')
elif '".f32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F64);')
elif '".s32.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F32);')
elif '".f32.s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S32);')
elif '".u32.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F32);')
elif '".f32.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U32);')
elif '".p8\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_P8);')
elif '".f64.s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S16);')
elif '".s16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F64);')
elif '".f32.s16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S16);')
elif '".s16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F32);')
elif '".f64.s32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S32);')
elif '".s32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F64);')
elif '".f64.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U16);')
elif '".u16.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F64);')
elif '".f32.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U16);')
elif '".u16.f32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F32);')
elif '".f64.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U32);')
elif '".u32.f64\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F64);')
elif '".f16.u32\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U32);')
elif '".u32.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F16);')
elif '".f16.u16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U16);')
elif '".u16.f16\\t"' in line2:
print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F16);')
elif '"\\tlr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_LR);')
elif '"\\tapsr_nzcv, fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_APSR_NZCV);')
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '"\\tpc, lr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_PC);')
print_line(' ARM_addReg(MI, ARM_REG_LR);')
elif '"\\tfpscr, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '"\\tfpexc, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPEXC);')
elif '"\\tfpinst, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST);')
elif '"\\tfpinst2, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST2);')
elif '"\\tfpsid, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSID);')
elif '"\\tsp, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SP);')
elif '"\\tsp!, "' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SP);')
elif '", apsr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_APSR);')
elif '", spsr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_SPSR);')
elif '", fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '", fpscr"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSCR);')
elif '", fpexc"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPEXC);')
elif '", fpinst"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST);')
elif '", fpinst2"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPINST2);')
elif '", fpsid"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_FPSID);')
elif '", mvfr0"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR0);')
elif '", mvfr1"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR1);')
elif '", mvfr2"' in line2:
print_line(' ARM_addReg(MI, ARM_REG_MVFR2);')
elif '.8\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 8);')
elif '.16\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 16);')
elif '.32\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 32);')
elif '.64\\t' in line2:
print_line(' ARM_addVectorDataSize(MI, 64);')
elif '" ^"' in line2:
print_line(' ARM_addUserMode(MI);')
if '.16b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_16B);')
elif '.8b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8B);')
elif '.4b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4B);')
elif '.b' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1B);')
elif '.8h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8H);')
elif '.4h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4H);')
elif '.2h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2H);')
elif '.h' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1H);')
elif '.4s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4S);')
elif '.2s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2S);')
elif '.s' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1S);')
elif '.2d' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2D);')
elif '.1d' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1D);')
elif '.1q' in line2:
print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1Q);')
if '#0.0' in line2:
print_line(' arm64_op_addFP(MI, 0);')
elif '#0' in line2:
print_line(' arm64_op_addImm(MI, 0);')
elif '#8' in line2:
print_line(' arm64_op_addImm(MI, 8);')
elif '#16' in line2:
print_line(' arm64_op_addImm(MI, 16);')
elif '#32' in line2:
print_line(' arm64_op_addImm(MI, 32);')
# X86
if '", %rax"' in line2 or '", rax"' in line2:
print_line(' op_addReg(MI, X86_REG_RAX);')
elif '", %eax"' in line2 or '", eax"' in line2:
print_line(' op_addReg(MI, X86_REG_EAX);')
elif '", %ax"' in line2 or '", ax"' in line2:
print_line(' op_addReg(MI, X86_REG_AX);')
elif '", %al"' in line2 or '", al"' in line2:
print_line(' op_addReg(MI, X86_REG_AL);')
elif '", %dx"' in line2 or '", dx"' in line2:
print_line(' op_addReg(MI, X86_REG_DX);')
elif '", %st(0)"' in line2 or '", st(0)"' in line2:
print_line(' op_addReg(MI, X86_REG_ST0);')
elif '", 1"' in line2:
print_line(' op_addImm(MI, 1);')
elif '", cl"' in line2:
print_line(' op_addReg(MI, X86_REG_CL);')
elif '"{1to2}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_2);')
elif '"{1to4}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_4);')
elif '"{1to8}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_8);')
elif '"{1to16}, "' in line2:
print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_16);')
elif '{z}{sae}' in line2:
print_line(' op_addAvxSae(MI);')
print_line(' op_addAvxZeroOpmask(MI);')
elif ('{z}' in line2):
print_line(' op_addAvxZeroOpmask(MI);')
elif '{sae}' in line2:
print_line(' op_addAvxSae(MI);')
elif 'llvm_unreachable("Invalid command number.");' in line:
line2 = line.replace('llvm_unreachable("Invalid command number.");', '// unreachable')
print_line(line2)
elif ('assert(' in line) or ('assert (' in line):
pass
elif 'Invalid alt name index' in line:
pass
elif '::' in line and 'case ' in line:
#print_line(line2)
print_line(line)
elif 'MI->getNumOperands()' in line:
line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)')
print_line(line2)
elif 'const MCOperand &MCOp' in line:
line2 = line.replace('const MCOperand &MCOp', 'MCOperand *MCOp')
print_line(line2)
elif 'MI->getOperand(0).isImm()' in line:
line2 = line.replace('MI->getOperand(0).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 0))')
print_line(line2)
elif 'MI->getOperand(1).isImm()' in line:
line2 = line.replace('MI->getOperand(1).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 1))')
print_line(line2)
elif 'MI->getOperand(2).isImm()' in line:
line2 = line.replace('MI->getOperand(2).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 2))')
print_line(line2)
elif 'MI->getOperand(3).isImm()' in line:
line2 = line.replace('MI->getOperand(3).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 3))')
print_line(line2)
elif 'MI->getOperand(4).isImm()' in line:
line2 = line.replace('MI->getOperand(4).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 4))')
print_line(line2)
elif 'MI->getOperand(5).isImm()' in line:
line2 = line.replace('MI->getOperand(5).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 5))')
print_line(line2)
elif 'MI->getOperand(6).isImm()' in line:
line2 = line.replace('MI->getOperand(6).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 6))')
print_line(line2)
elif 'MI->getOperand(7).isImm()' in line:
line2 = line.replace('MI->getOperand(7).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 7))')
print_line(line2)
elif 'MI->getOperand(8).isImm()' in line:
line2 = line.replace('MI->getOperand(8).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 8))')
print_line(line2)
elif 'MI->getOperand(0).getImm()' in line:
line2 = line.replace('MI->getOperand(0).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 0))')
print_line(line2)
elif 'MI->getOperand(1).getImm()' in line:
line2 = line.replace('MI->getOperand(1).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 1))')
print_line(line2)
elif 'MI->getOperand(2).getImm()' in line:
line2 = line.replace('MI->getOperand(2).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 2))')
print_line(line2)
elif 'MI->getOperand(3).getImm()' in line:
line2 = line.replace('MI->getOperand(3).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 3))')
print_line(line2)
elif 'MI->getOperand(4).getImm()' in line:
line2 = line.replace('MI->getOperand(4).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 4))')
print_line(line2)
elif 'MI->getOperand(5).getImm()' in line:
line2 = line.replace('MI->getOperand(5).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 5))')
print_line(line2)
elif 'MI->getOperand(6).getImm()' in line:
line2 = line.replace('MI->getOperand(6).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 6))')
print_line(line2)
elif 'MI->getOperand(7).getImm()' in line:
line2 = line.replace('MI->getOperand(7).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 7))')
print_line(line2)
elif 'MI->getOperand(8).getImm()' in line:
line2 = line.replace('MI->getOperand(8).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 8))')
print_line(line2)
elif 'MRI.getRegClass(' in line:
classid = extract_paren(line, 'getRegClass(')
operand = extract_paren(line, 'getOperand')
line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)')
line2 = ' GETREGCLASS_CONTAIN(%s, %s)' %(classid, operand)
if line.endswith('())) {'):
line2 += ') {'
elif line.endswith(' {'):
line2 += ' {'
elif line.endswith(' &&'):
line2 += ' &&'
print_line(line2)
elif 'MI->getOperand(' in line and 'isReg' in line:
operand = extract_paren(line, 'getOperand')
line2 = ' MCOperand_isReg(MCInst_getOperand(MI, %s))' %(operand)
# MI->getOperand(1).isReg() &&
if line.endswith(' {'):
line2 += ' {'
elif line.endswith(' &&'):
line2 += ' &&'
print_line(line2)
elif 'MI->getOperand(' in line and 'getReg' in line:
line2 = replace_getReg(line)
# one more time
line2 = replace_getReg(line2)
print_line(line2)
elif ' return false;' in line and in_printAliasInstr:
print_line(' return NULL;')
elif 'MCOp.isImm()' in line:
line2 = line.replace('MCOp.isImm()', 'MCOperand_isImm(MCOp)')
print_line(line2)
elif 'MCOp.getImm()' in line:
line2 = line.replace('MCOp.getImm()', 'MCOperand_getImm(MCOp)')
if 'int64_t Val =' in line:
line2 = line2.replace('int64_t Val =', 'Val =')
print_line(line2)
elif 'isSVEMaskOfIdenticalElements<' in line:
if 'int8' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements8')
line2 = line2.replace('<int8_t>', '')
elif 'int16' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements16')
line2 = line2.replace('<int16_t>', '')
elif 'int32' in line:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements32')
line2 = line2.replace('<int32_t>', '')
else:
line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements64')
line2 = line2.replace('<int64_t>', '')
print_line(line2)
elif 'switch (PredicateIndex) {' in line:
print_line(' int64_t Val;')
print_line(line)
elif 'unsigned I = 0;' in line and in_printAliasInstr:
print_line("""
tmpString = cs_strdup(AsmString);
while (AsmString[I] != ' ' && AsmString[I] != '\\t' &&
AsmString[I] != '$' && AsmString[I] != '\\0')
++I;
tmpString[I] = 0;
SStream_concat0(OS, tmpString);
if (AsmString[I] != '\\0') {
if (AsmString[I] == ' ' || AsmString[I] == '\\t') {
SStream_concat0(OS, " ");
++I;
}
do {
if (AsmString[I] == '$') {
++I;
if (AsmString[I] == (char)0xff) {
++I;
OpIdx = AsmString[I++] - 1;
PrintMethodIdx = AsmString[I++] - 1;
printCustomAliasOperand(MI, OpIdx, PrintMethodIdx, OS);
} else
printOperand(MI, (unsigned)(AsmString[I++]) - 1, OS);
} else {
SStream_concat1(OS, AsmString[I++]);
}
} while (AsmString[I] != '\\0');
}
return tmpString;
}
""")
in_printAliasInstr = False
# skip next few lines
skip_printing = True
elif '::printCustomAliasOperand' in line:
# print again
skip_printing = False
print_line('static void printCustomAliasOperand(')
elif 'const MCSubtargetInfo &STI' in line:
pass
elif 'const MCInst *MI' in line:
line2 = line.replace('const MCInst *MI', 'MCInst *MI')
print_line(line2)
elif 'llvm_unreachable("' in line:
if 'default: ' in line:
print_line(' default:')
elif 'llvm_unreachable("Unknown MCOperandPredicate kind")' in line:
print_line(' return false; // never reach')
else:
pass
elif 'raw_ostream &' in line:
line2 = line.replace('raw_ostream &', 'SStream *')
if line2.endswith(' {'):
line2 = line2.replace(' {', '\n{')
print_line(line2)
elif 'printPredicateOperand(' in line and 'STI, ' in line:
line2 = line.replace('STI, ', '')
print_line(line2)
elif '// Fragment ' in line:
# // Fragment 0 encoded into 6 bits for 51 unique commands.
tmp = line.strip().split(' ')
fragment_no = tmp[2]
print_line(line)
elif ('switch ((' in line or 'if ((' in line) and 'Bits' in line:
# switch ((Bits >> 14) & 63) {
bits = line.strip()
bits = bits.replace('switch ', '')
bits = bits.replace('if ', '')
bits = bits.replace('{', '')
bits = bits.strip()
print_line(' // printf("Fragment %s: %%"PRIu64"\\n", %s);' %(fragment_no, bits))
print_line(line)
elif not skip_printing:
print_line(line)
if line == ' };':
if need_endif and not in_getRegisterName:
# endif only for AsmStrs when we are not inside getRegisterName()
print_line("#endif")
need_endif = False
elif 'return AsmStrs+RegAsmOffset[RegNo-1];' in line:
if in_getRegisterName:
# return NULL for register name on Diet mode
print_line("#else")
print_line(" return NULL;")
print_line("#endif")
print_line("}")
need_endif = False
in_getRegisterName = False
# skip 1 line
skip_line = 1
elif line == ' }':
# ARM64
if in_getRegisterName:
# return NULL for register name on Diet mode
print_line("#else")
print_line(" return NULL;")
print_line("#endif")
print_line("}")
need_endif = False
in_getRegisterName = False
# skip 1 line
skip_line = 1
elif 'default:' in line:
# ARM64
if in_getRegisterName:
# get the size of RegAsmOffsetvreg[]
print_line(" return (const char *)(sizeof(RegAsmOffsetvreg)/sizeof(RegAsmOffsetvreg[0]));")
f1.close()
f2.close()<|fim▁end|> | break
|
<|file_name|>PaletteFile.py<|end_file_name|><|fim▁begin|>#
# Python Imaging Library
# $Id$
#
# stuff to read simple, teragon-style palette files
#
# History:
# 97-08-23 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from PIL._binary import o8
##
# File handler for Teragon-style palette files.
class PaletteFile(object):
rawmode = "RGB"
def __init__(self, fp):
self.palette = [(i, i, i) for i in range(256)]
while True:
s = fp.readline()
<|fim▁hole|> continue
if len(s) > 100:
raise SyntaxError("bad palette file")
v = [int(x) for x in s.split()]
try:
[i, r, g, b] = v
except ValueError:
[i, r] = v
g = b = r
if 0 <= i <= 255:
self.palette[i] = o8(r) + o8(g) + o8(b)
self.palette = b"".join(self.palette)
def getpalette(self):
return self.palette, self.rawmode<|fim▁end|> | if not s:
break
if s[0:1] == b"#": |
<|file_name|>resource_aws_apigatewayv2_route_test.go<|end_file_name|><|fim▁begin|>package aws
import (
"fmt"
"strings"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/apigatewayv2"
"github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccAWSAPIGatewayV2Route_basic(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_basic(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func TestAccAWSAPIGatewayV2Route_disappears(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_basic(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
testAccCheckAWSAPIGatewayV2RouteDisappears(&apiId, &v),
),
ExpectNonEmptyPlan: true,
},
},
})
}
func TestAccAWSAPIGatewayV2Route_Authorizer(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
authorizerResourceName := "aws_apigatewayv2_authorizer.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_authorizer(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_scopes.#", "0"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeCustom),
resource.TestCheckResourceAttrPair(resourceName, "authorizer_id", authorizerResourceName, "id"),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$connect"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccAWSAPIGatewayV2RouteConfig_authorizerUpdated(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_scopes.#", "0"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeAwsIam),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$connect"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
},
})
}
func TestAccAWSAPIGatewayV2Route_JwtAuthorization(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
authorizerResourceName := "aws_apigatewayv2_authorizer.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_jwtAuthorization(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_scopes.#", "2"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeJwt),
resource.TestCheckResourceAttrPair(resourceName, "authorizer_id", authorizerResourceName, "id"),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$connect"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccAWSAPIGatewayV2RouteConfig_jwtAuthorizationUpdated(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_scopes.#", "1"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeJwt),
resource.TestCheckResourceAttrPair(resourceName, "authorizer_id", authorizerResourceName, "id"),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$connect"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
},
})
}
func TestAccAWSAPIGatewayV2Route_Model(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
modelResourceName := "aws_apigatewayv2_model.test"
// Model name must be alphanumeric.
rName := strings.ReplaceAll(acctest.RandomWithPrefix("tf-acc-test"), "-", "")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_model(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", "action"),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "1"),
resource.TestCheckResourceAttrPair(resourceName, "request_models.test", modelResourceName, "name"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func TestAccAWSAPIGatewayV2Route_SimpleAttributes(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_simpleAttributes(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "true"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", "GET"),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", "$default"),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
Config: testAccAWSAPIGatewayV2RouteConfig_basic(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "request_parameters.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
Config: testAccAWSAPIGatewayV2RouteConfig_simpleAttributes(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "true"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", "GET"),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", "$default"),
resource.TestCheckResourceAttr(resourceName, "target", ""),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func TestAccAWSAPIGatewayV2Route_Target(t *testing.T) {
var apiId string
var v apigatewayv2.GetRouteOutput
resourceName := "aws_apigatewayv2_route.test"
integrationResourceName := "aws_apigatewayv2_integration.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayV2RouteDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSAPIGatewayV2RouteConfig_target(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayV2RouteExists(resourceName, &apiId, &v),
resource.TestCheckResourceAttr(resourceName, "api_key_required", "false"),
resource.TestCheckResourceAttr(resourceName, "authorization_type", apigatewayv2.AuthorizationTypeNone),
resource.TestCheckResourceAttr(resourceName, "authorizer_id", ""),
resource.TestCheckResourceAttr(resourceName, "model_selection_expression", ""),
resource.TestCheckResourceAttr(resourceName, "operation_name", ""),
resource.TestCheckResourceAttr(resourceName, "request_models.%", "0"),
resource.TestCheckResourceAttr(resourceName, "route_key", "$default"),
resource.TestCheckResourceAttr(resourceName, "route_response_selection_expression", ""),
testAccCheckAWSAPIGatewayV2RouteTarget(resourceName, integrationResourceName),
),
},
{
ResourceName: resourceName,
ImportStateIdFunc: testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName),
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func testAccCheckAWSAPIGatewayV2RouteDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).apigatewayv2conn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_apigatewayv2_route" {
continue
}
_, err := conn.GetRoute(&apigatewayv2.GetRouteInput{
ApiId: aws.String(rs.Primary.Attributes["api_id"]),
RouteId: aws.String(rs.Primary.ID),
})
if isAWSErr(err, apigatewayv2.ErrCodeNotFoundException, "") {
continue
}
if err != nil {
return err
}
return fmt.Errorf("API Gateway v2 route %s still exists", rs.Primary.ID)
}
return nil
}
func testAccCheckAWSAPIGatewayV2RouteDisappears(apiId *string, v *apigatewayv2.GetRouteOutput) resource.TestCheckFunc {
return func(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).apigatewayv2conn
_, err := conn.DeleteRoute(&apigatewayv2.DeleteRouteInput{
ApiId: apiId,
RouteId: v.RouteId,
})
return err
}
}
func testAccCheckAWSAPIGatewayV2RouteExists(n string, vApiId *string, v *apigatewayv2.GetRouteOutput) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No API Gateway v2 route ID is set")
}
conn := testAccProvider.Meta().(*AWSClient).apigatewayv2conn
apiId := aws.String(rs.Primary.Attributes["api_id"])
resp, err := conn.GetRoute(&apigatewayv2.GetRouteInput{
ApiId: apiId,
RouteId: aws.String(rs.Primary.ID),
})
if err != nil {
return err
}
*vApiId = *apiId
*v = *resp
return nil
}
}
func testAccAWSAPIGatewayV2RouteImportStateIdFunc(resourceName string) resource.ImportStateIdFunc {
return func(s *terraform.State) (string, error) {
rs, ok := s.RootModule().Resources[resourceName]
if !ok {
return "", fmt.Errorf("Not Found: %s", resourceName)
}
return fmt.Sprintf("%s/%s", rs.Primary.Attributes["api_id"], rs.Primary.ID), nil
}
}
func testAccCheckAWSAPIGatewayV2RouteTarget(resourceName, integrationResourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[integrationResourceName]
if !ok {
return fmt.Errorf("Not Found: %s", integrationResourceName)
}
return resource.TestCheckResourceAttr(resourceName, "target", fmt.Sprintf("integrations/%s", rs.Primary.ID))(s)
}
}
func testAccAWSAPIGatewayV2RouteConfig_apiWebSocket(rName string) string {
return fmt.Sprintf(`
resource "aws_apigatewayv2_api" "test" {
name = %[1]q
protocol_type = "WEBSOCKET"
route_selection_expression = "$request.body.action"
}
`, rName)
}
func testAccAWSAPIGatewayV2RouteConfig_basic(rName string) string {
return testAccAWSAPIGatewayV2RouteConfig_apiWebSocket(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$default"
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_authorizer(rName string) string {
return testAccAWSAPIGatewayV2AuthorizerConfig_basic(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$connect"
authorization_type = "CUSTOM"
authorizer_id = "${aws_apigatewayv2_authorizer.test.id}"
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_authorizerUpdated(rName string) string {
return testAccAWSAPIGatewayV2AuthorizerConfig_basic(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$connect"
authorization_type = "AWS_IAM"
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_jwtAuthorization(rName string) string {
return testAccAWSAPIGatewayV2AuthorizerConfig_jwt(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$connect"
authorization_type = "JWT"
authorizer_id = "${aws_apigatewayv2_authorizer.test.id}"
authorization_scopes = ["user.id", "user.email"]
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_jwtAuthorizationUpdated(rName string) string {
return testAccAWSAPIGatewayV2AuthorizerConfig_jwt(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$connect"
authorization_type = "JWT"
authorizer_id = "${aws_apigatewayv2_authorizer.test.id}"
authorization_scopes = ["user.email"]
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_model(rName string) string {
schema := `
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "ExampleModel",
"type": "object",
"properties": {
"id": {
"type": "string"
}
}<|fim▁hole|>`
return testAccAWSAPIGatewayV2ModelConfig_basic(rName, schema) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$default"
model_selection_expression = "action"
request_models = {
"test" = "${aws_apigatewayv2_model.test.name}"
}
}
`)
}
// Simple attributes - No authorization, models or targets.
func testAccAWSAPIGatewayV2RouteConfig_simpleAttributes(rName string) string {
return testAccAWSAPIGatewayV2RouteConfig_apiWebSocket(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$default"
api_key_required = true
operation_name = "GET"
route_response_selection_expression = "$default"
}
`)
}
func testAccAWSAPIGatewayV2RouteConfig_target(rName string) string {
return testAccAWSAPIGatewayV2IntegrationConfig_basic(rName) + fmt.Sprintf(`
resource "aws_apigatewayv2_route" "test" {
api_id = "${aws_apigatewayv2_api.test.id}"
route_key = "$default"
target = "integrations/${aws_apigatewayv2_integration.test.id}"
}
`)
}<|fim▁end|> | } |
<|file_name|>validators.js<|end_file_name|><|fim▁begin|>/**
* Created by suman on 09/05/16.
*/
var core = require('chanakya'),
http = require('http'),
Q = require('q');
core.validator('isPhoneno', function (message) {
var deferred = Q.defer();
http.get('http://apilayer.net/api/validate?access_key=eba101687da317945a45f798464256da&number=' + message + '&country_code=&format=1', function (res) {
res.setEncoding('utf8');
res.on('data', function (d) {
d = JSON.parse(d);
deferred.resolve(d.valid);
});
}).on('error', function (e) {
deferred.reject(new Error(e));
});
return deferred.promise;
});
core.validator('isOTP', function (message) {
return Q.fcall(function () {
return message == 1234;
});
});
core.validator('isStatement', function (message) {
// var deferred = Q.defer();
//
// http.get('http://demo1036853.mockable.io/wit', function (res) {
// res.setEncoding('utf8');
// res.on('data', function (d) {
// d = JSON.parse(d);
// deferred.resolve(d.intent);
// });
// }).on('error', function (e) {
// deferred.reject(new Error(e));
// });
// return deferred.promise;
return Q.fcall(function () {
return (message == 'hi' || message == 'Hi');
});
});
core.validator('isOffer', function (message) {
return Q.fcall(function () {
return (message == 'offer' || message == 'Offer');<|fim▁hole|>});<|fim▁end|> | }); |
<|file_name|>InvertedIndexUtils.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.util;
import java.util.List;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
import org.apache.hyracks.storage.am.btree.tuples.BTreeTypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMPageWriteCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
import org.apache.hyracks.storage.am.lsm.common.frames.LSMComponentFilterFrameFactory;
import org.apache.hyracks.storage.am.lsm.common.impls.BTreeFactory;
import org.apache.hyracks.storage.am.lsm.common.impls.ComponentFilterHelper;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFilterManager;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.fulltext.IFullTextConfigEvaluatorFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexFileManager;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.PartitionedLSMInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.inmemory.PartitionedInMemoryInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.InvertedListBuilderFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndexFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.fixedsize.FixedSizeElementInvertedListBuilder;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.fixedsize.FixedSizeInvertedListSearchResultFrameTupleAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.fixedsize.FixedSizeInvertedListTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.variablesize.VariableSizeInvertedListSearchResultFrameTupleAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.variablesize.VariableSizeInvertedListTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.util.trace.ITracer;
public class InvertedIndexUtils {
public static final String EXPECT_ALL_FIX_GET_VAR_SIZE =
"expecting all type traits to be fixed-size while getting at least one variable-length one";
public static final String EXPECT_VAR_GET_ALL_FIX_SIZE =
"expecting at least one variable-size type trait while all are fixed-size";
public static InMemoryInvertedIndex createInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
IPageManager virtualFreePageManager, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory, FileReference btreeFileRef)
throws HyracksDataException {
return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, invListCmpFactories,
tokenTypeTraits, tokenCmpFactories, tokenizerFactory, fullTextConfigEvaluatorFactory, btreeFileRef);
}
public static InMemoryInvertedIndex createPartitionedInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
IPageManager virtualFreePageManager, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory, FileReference btreeFileRef)<|fim▁hole|> invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
fullTextConfigEvaluatorFactory, btreeFileRef);
}
public static OnDiskInvertedIndex createOnDiskInvertedIndex(IIOManager ioManager, IBufferCache bufferCache,
ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
IPageManagerFactory pageManagerFactory) throws HyracksDataException {
IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
return new OnDiskInvertedIndex(bufferCache, builder, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
tokenCmpFactories, btreeFile, invListsFile, pageManagerFactory);
}
public static PartitionedOnDiskInvertedIndex createPartitionedOnDiskInvertedIndex(IIOManager ioManager,
IBufferCache bufferCache, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
IPageManagerFactory pageManagerFactory) throws HyracksDataException {
IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
return new PartitionedOnDiskInvertedIndex(bufferCache, builder, invListTypeTraits, invListCmpFactories,
tokenTypeTraits, tokenCmpFactories, btreeFile, invListsFile, pageManagerFactory);
}
public static FileReference getBTreeFile(IIOManager ioManager, FileReference invListsFile)
throws HyracksDataException {
return ioManager.resolveAbsolutePath(invListsFile.getFile().getPath() + "_btree");
}
public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, IBufferCache diskBufferCache,
IPageManagerFactory freePageManagerFactory) throws HyracksDataException {
BTreeTypeAwareTupleWriterFactory tupleWriterFactory =
new BTreeTypeAwareTupleWriterFactory(invListTypeTraits, false);
ITreeIndexFrameFactory leafFrameFactory =
BTreeUtils.getLeafFrameFactory(tupleWriterFactory, BTreeLeafFrameType.REGULAR_NSM);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
return new BTreeFactory(ioManager, diskBufferCache, freePageManagerFactory, interiorFrameFactory,
leafFrameFactory, invListCmpFactories, invListCmpFactories.length);
}
public static LSMInvertedIndex createLSMInvertedIndex(IIOManager ioManager,
List<IVirtualBufferCache> virtualBufferCaches, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory, IBufferCache diskBufferCache,
String absoluteOnDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackFactory ioOpCallbackFactory, ILSMPageWriteCallbackFactory pageWriteCallbackFactory,
int[] invertedIndexFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
boolean durable, IMetadataPageManagerFactory pageManagerFactory, ITracer tracer)
throws HyracksDataException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, invListTypeTraits,
invListCmpFactories, diskBufferCache, pageManagerFactory);
int[] bloomFilterKeyFields = new int[invListCmpFactories.length];
for (int i = 0; i < invListCmpFactories.length; i++) {
bloomFilterKeyFields[i] = i;
}
BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, bloomFilterKeyFields);
FileReference onDiskDirFileRef = ioManager.resolveAbsolutePath(absoluteOnDiskDir);
LSMInvertedIndexFileManager fileManager =
new LSMInvertedIndexFileManager(ioManager, onDiskDirFileRef, deletedKeysBTreeFactory);
IInvertedListBuilderFactory invListBuilderFactory =
new InvertedListBuilderFactory(tokenTypeTraits, invListTypeTraits);
OnDiskInvertedIndexFactory invIndexFactory =
new OnDiskInvertedIndexFactory(ioManager, diskBufferCache, invListBuilderFactory, invListTypeTraits,
invListCmpFactories, tokenTypeTraits, tokenCmpFactories, fileManager, pageManagerFactory);
ComponentFilterHelper filterHelper = null;
LSMComponentFilterFrameFactory filterFrameFactory = null;
LSMComponentFilterManager filterManager = null;
if (filterCmpFactories != null) {
TypeAwareTupleWriterFactory filterTupleWriterFactory = new TypeAwareTupleWriterFactory(filterTypeTraits);
filterHelper = new ComponentFilterHelper(filterTupleWriterFactory, filterCmpFactories);
filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
filterManager = new LSMComponentFilterManager(filterFrameFactory);
}
ILSMDiskComponentFactory componentFactory = new LSMInvertedIndexDiskComponentFactory(invIndexFactory,
deletedKeysBTreeFactory, bloomFilterFactory, filterHelper);
return new LSMInvertedIndex(ioManager, virtualBufferCaches, componentFactory, filterHelper, filterFrameFactory,
filterManager, bloomFilterFalsePositiveRate, diskBufferCache, fileManager, invListTypeTraits,
invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
fullTextConfigEvaluatorFactory, mergePolicy, opTracker, ioScheduler, ioOpCallbackFactory,
pageWriteCallbackFactory, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps,
invertedIndexFieldsForNonBulkLoadOps, durable, tracer);
}
public static PartitionedLSMInvertedIndex createPartitionedLSMInvertedIndex(IIOManager ioManager,
List<IVirtualBufferCache> virtualBufferCaches, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory, IBufferCache diskBufferCache,
String absoluteOnDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackFactory ioOpCallbackFactory, ILSMPageWriteCallbackFactory pageWriteCallbackFactory,
int[] invertedIndexFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
boolean durable, IPageManagerFactory pageManagerFactory, ITracer tracer) throws HyracksDataException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, invListTypeTraits,
invListCmpFactories, diskBufferCache, pageManagerFactory);
int[] bloomFilterKeyFields = new int[invListCmpFactories.length];
for (int i = 0; i < invListCmpFactories.length; i++) {
bloomFilterKeyFields[i] = i;
}
BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, bloomFilterKeyFields);
FileReference onDiskDirFileRef = ioManager.resolveAbsolutePath(absoluteOnDiskDir);
LSMInvertedIndexFileManager fileManager =
new LSMInvertedIndexFileManager(ioManager, onDiskDirFileRef, deletedKeysBTreeFactory);
IInvertedListBuilderFactory invListBuilderFactory =
new InvertedListBuilderFactory(tokenTypeTraits, invListTypeTraits);
PartitionedOnDiskInvertedIndexFactory invIndexFactory = new PartitionedOnDiskInvertedIndexFactory(ioManager,
diskBufferCache, invListBuilderFactory, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
tokenCmpFactories, fileManager, pageManagerFactory);
ComponentFilterHelper filterHelper = null;
LSMComponentFilterFrameFactory filterFrameFactory = null;
LSMComponentFilterManager filterManager = null;
if (filterCmpFactories != null) {
TypeAwareTupleWriterFactory filterTupleWriterFactory = new TypeAwareTupleWriterFactory(filterTypeTraits);
filterHelper = new ComponentFilterHelper(filterTupleWriterFactory, filterCmpFactories);
filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
filterManager = new LSMComponentFilterManager(filterFrameFactory);
}
ILSMDiskComponentFactory componentFactory = new LSMInvertedIndexDiskComponentFactory(invIndexFactory,
deletedKeysBTreeFactory, bloomFilterFactory, filterHelper);
return new PartitionedLSMInvertedIndex(ioManager, virtualBufferCaches, componentFactory, filterHelper,
filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, diskBufferCache, fileManager,
invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
fullTextConfigEvaluatorFactory, mergePolicy, opTracker, ioScheduler, ioOpCallbackFactory,
pageWriteCallbackFactory, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps,
invertedIndexFieldsForNonBulkLoadOps, durable, tracer);
}
public static boolean checkTypeTraitsAllFixed(ITypeTraits[] typeTraits) {
for (int i = 0; i < typeTraits.length; i++) {
if (!typeTraits[i].isFixedLength()) {
return false;
}
}
return true;
}
public static void verifyAllFixedSizeTypeTrait(ITypeTraits[] typeTraits) throws HyracksDataException {
if (InvertedIndexUtils.checkTypeTraitsAllFixed(typeTraits) == false) {
throw HyracksDataException.create(ErrorCode.INVALID_INVERTED_LIST_TYPE_TRAITS,
InvertedIndexUtils.EXPECT_ALL_FIX_GET_VAR_SIZE);
}
}
public static void verifyHasVarSizeTypeTrait(ITypeTraits[] typeTraits) throws HyracksDataException {
if (InvertedIndexUtils.checkTypeTraitsAllFixed(typeTraits) == true) {
throw HyracksDataException.create(ErrorCode.INVALID_INVERTED_LIST_TYPE_TRAITS,
InvertedIndexUtils.EXPECT_VAR_GET_ALL_FIX_SIZE);
}
}
public static IInvertedListTupleReference createInvertedListTupleReference(ITypeTraits[] typeTraits)
throws HyracksDataException {
if (checkTypeTraitsAllFixed(typeTraits)) {
return new FixedSizeInvertedListTupleReference(typeTraits);
} else {
return new VariableSizeInvertedListTupleReference(typeTraits);
}
}
public static IFrameTupleAccessor createInvertedListFrameTupleAccessor(int frameSize, ITypeTraits[] typeTraits)
throws HyracksDataException {
if (checkTypeTraitsAllFixed(typeTraits)) {
return new FixedSizeInvertedListSearchResultFrameTupleAccessor(frameSize, typeTraits);
} else {
return new VariableSizeInvertedListSearchResultFrameTupleAccessor(frameSize, typeTraits);
}
}
public static void setInvertedListFrameEndOffset(byte[] bytes, int pos) {
int off = bytes.length - 4;
bytes[off++] = (byte) (pos >> 24);
bytes[off++] = (byte) (pos >> 16);
bytes[off++] = (byte) (pos >> 8);
bytes[off] = (byte) (pos);
}
public static int getInvertedListFrameEndOffset(byte[] bytes) {
int p = bytes.length - 4;
int offsetFrameEnd = 0;
for (int i = 0; i < 4; i++) {
offsetFrameEnd = (offsetFrameEnd << 8) + (bytes[p++] & 0xFF);
}
return offsetFrameEnd;
}
}<|fim▁end|> | throws HyracksDataException {
return new PartitionedInMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, |
<|file_name|>Python_To_Arduino_Communication.py<|end_file_name|><|fim▁begin|><|fim▁hole|>ser = serial.Serial('COM9', 9600)
ser.write(b'5~')
ser.close()<|fim▁end|> | #!/usr/bin/python
import serial |
<|file_name|>launcher.rs<|end_file_name|><|fim▁begin|>use services::*;
use super::prelude::*;
use models::launcher::*;
use utils::convertor::VecConvertor;
use params::*;
pub fn init_router(router: &mut Router) {
router.get("/api/launcher/check_update/", do_check_update);
router.get("/api/launcher/home/", do_kind_home);
router.get("/api/launcher/secondary/", do_kind_secondary);
router.get("/api/launcher/app/category/", do_get_apk_category);
router.get("/api/launcher/app/", do_get_apk);
}
fn do_get_apk_category(req: &mut Request) -> IronResult<Response> {
#[derive(RustcEncodable)]
struct A {
id: i32,
title: Option<String>,
version_code: i32,
version_name: Option<String>,
package_name: Option<String>,
img_url: Option<String>,
}
#[derive(RustcEncodable,Default)]
struct Result {
status: i32,
apps: Vec<A>,
}
let mut result = Result::default();
result.status = 1;
let category_id = req.param::<i32>("category_id").unwrap_or(0);
if category_id > 0 {
result.apps = launcher::get_apk_list_by_category(category_id).convert(|app| {
A {
id: app.id,
title: app.title,
version_code: app.version_code,
version_name: app.version_name,
package_name: app.package_name,
img_url: app.recommend_screen_url,
}
});
result.status = 0;
}
response::ok_json(&json::encode(&result).unwrap())
}
fn do_get_apk(req: &mut Request) -> IronResult<Response> {
#[derive(RustcEncodable,Default)]
struct Result {
status: i32,
app: Option<LauncherApk>,
}
let mut result = Result::default();
result.status = 1;
let id = req.param::<i32>("id").unwrap_or(0);
if id > 0 {
result.app = launcher::get_apk_by_id(id);
result.status = 0;
}
response::ok_json(&json::encode(&result).unwrap())
}
fn do_check_update(req: &mut Request) -> IronResult<Response> {
#[derive(RustcEncodable,Default)]
struct Result {
status: i32,
version_code: i32,
version_name: Option<String>,
apk_url: Option<String>,
update_desc: Option<String>,
}
let mut result = Result::default();
result.status = 1;
let version_code = req.param::<i32>("version_code").unwrap_or(0);
let imei = req.param::<String>("imei");
if version_code > 0 {
if let Some(imei) = imei {
if let Some(device) = common::get_device_by_imei(imei) {
if let Some(launcher) = launcher::check_update(device.channel_id, version_code) {
result.status = 0;
result.version_code = launcher.version_code;<|fim▁hole|> }
}
}
}
response::ok_json(&json::encode(&result).unwrap())
}
fn do_kind_home(req: &mut Request) -> IronResult<Response> {
#[derive(RustcEncodable)]
struct I {
img_url: Option<String>,
title: Option<String>,
}
#[derive(RustcEncodable)]
struct K {
type_id: i32,
title: Option<String>,
items: Vec<I>,
}
#[derive(RustcEncodable,Default)]
struct Result {
status: i32,
items: Vec<K>,
}
let mut result = Result::default();
result.status = 1;
if let Some(imei) = req.param::<String>("imei") {
if let Some(device) = common::get_device_by_imei(imei) {
let vec = launcher::get_by_channel(device.channel_id).convert(|kind| {
K {
type_id: kind.id,
title: kind.title,
items: kind.items.convert(|item| {
I {
img_url: item.img_url,
title: item.title,
}
}),
}
});
result.status = 0;
result.items = vec;
}
}
response::ok_json(&json::encode(&result).unwrap())
}
fn do_kind_secondary(req: &mut Request) -> IronResult<Response> {
#[derive(RustcEncodable)]
struct R {
id: i32,
title: Option<String>,
package_name: Option<String>,
img_url: Option<String>,
}
#[derive(RustcEncodable)]
struct C {
id: i32,
title: Option<String>,
img_url: Option<String>,
}
#[derive(RustcEncodable,Default)]
struct Result {
status: i32,
recommend: Vec<R>,
category: Vec<C>,
}
let mut result = Result::default();
result.status = 1;
let kind_id = req.param::<i32>("type_id").unwrap_or(0);
if kind_id > 0 {
result.recommend = launcher::get_apk_recommend_list_by_kind(kind_id).convert(|app| {
R {
id: app.id,
title: app.title,
package_name: app.package_name,
img_url: app.recommend_screen_url,
}
});
result.category = launcher::get_category_list_by_kind(kind_id).convert(|c| {
C {
id: c.id,
title: c.title,
img_url: c.img_url,
}
});
result.status = 0;
}
response::ok_json(&json::encode(&result).unwrap())
}<|fim▁end|> | result.version_name = launcher.version_name;
result.apk_url = launcher.apk_url;
result.update_desc = launcher.update_desc; |
<|file_name|>BindingTest.cc<|end_file_name|><|fim▁begin|>// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
#include "Diadem/Test/WindowTestBase.h"
#include "Diadem/Binding.h"
#include "Diadem/Value.h"
class TestBindings : public WindowTestBase {};
TEST_F(TestBindings, testRemoveObserver) {
class Ob : public Diadem::ValueObserver {
public:
Ob() : observe_count_(0) {}
void ObserveImp(const char*, const Diadem::Value&)
{ ++observe_count_; }
unsigned int observe_count_;
};
Diadem::ChangeMessenger messenger;
Ob ob1, ob2;
Diadem::Value v;
<|fim▁hole|> EXPECT_EQ(0, ob2.observe_count_);
messenger.AddObserver("", &ob1);
messenger.AddObserver("a", &ob2);
messenger.NotifyChange("", v);
EXPECT_EQ(1, ob1.observe_count_);
EXPECT_EQ(0, ob2.observe_count_);
messenger.NotifyChange("a", v);
EXPECT_EQ(2, ob1.observe_count_);
EXPECT_EQ(1, ob2.observe_count_);
messenger.RemoveObserver(&ob1);
messenger.NotifyChange("", v);
EXPECT_EQ(2, ob1.observe_count_);
EXPECT_EQ(1, ob2.observe_count_);
messenger.NotifyChange("a", v);
EXPECT_EQ(2, ob1.observe_count_);
EXPECT_EQ(2, ob2.observe_count_);
messenger.RemoveObserver(&ob2);
messenger.NotifyChange("a", v);
EXPECT_EQ(2, ob1.observe_count_);
EXPECT_EQ(2, ob2.observe_count_);
}
TEST_F(TestBindings, testValueToEnable) {
ReadWindowData(
"<window text='testValueToEnable'>"
"<label text='Label' name='label'>"
"<bind source='check' prop='enabled'/>"
"</label>"
"<check title='Check' name='check'/>"
"</window>");
ASSERT_EQ(2, windowRoot_->ChildrenCount());
Diadem::Entity *label = windowRoot_->FindByName("label");
Diadem::Entity *checkbox = windowRoot_->FindByName("check");
ASSERT_EQ(1, label->ChildrenCount());
Diadem::Binding *binding = dynamic_cast<Diadem::Binding*>(label->ChildAt(0));
ASSERT_FALSE(binding == NULL);
const Diadem::EntityController &controller = binding->GetController();
EXPECT_EQ(label, controller.GetEntity());
EXPECT_STREQ("enabled", controller.GetPropertyName());
EXPECT_EQ(0, checkbox->GetProperty(Diadem::kPropValue).Coerce<int32_t>());
EXPECT_FALSE(label->GetProperty(Diadem::kPropEnabled).Coerce<bool>());
checkbox->SetProperty(Diadem::kPropValue, 1);
EXPECT_TRUE(label->GetProperty(Diadem::kPropEnabled).Coerce<bool>());
}<|fim▁end|> | EXPECT_EQ(0, ob1.observe_count_); |
<|file_name|>key_names.go<|end_file_name|><|fim▁begin|><|fim▁hole|>Copyright 2020, Cossack Labs Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filesystem
import (
keystore2 "github.com/cossacklabs/acra/keystore"
)
func getSymmetricKeyName(id string) string {
return id + `_sym`
}
func getTokenSymmetricKeyName(id []byte, ownerType keystore2.KeyOwnerType) string {
var name string
switch ownerType {
case keystore2.KeyOwnerTypeClient:
name = getClientIDSymmetricKeyName(id)
case keystore2.KeyOwnerTypeZone:
name = getZoneIDSymmetricKeyName(id)
default:
name = string(id)
}
return name + ".token"
}
func getClientIDSymmetricKeyName(id []byte) string {
return getSymmetricKeyName(GetServerDecryptionKeyFilename(id))
}
func getZoneIDSymmetricKeyName(id []byte) string {
return getSymmetricKeyName(GetZoneKeyFilename(id))
}<|fim▁end|> | /* |
<|file_name|>10.26.cc<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | //同一个键的值连续存储,使用find产生的迭代器连续删除n次,n是该键对应的值的个数 |
<|file_name|>conf_generator.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import subprocess
import os
import commands
CLIENT_CN = sys.argv[1]
SEP = '==========> '
OVPN_ROOT='/etc/openvpn/'
ERSA=OVPN_ROOT+'easy-rsa-release-2.x/easy-rsa/2.0/keys/'
IP_PLACEHOLDER = 'placeholder_ip'
TA_PLACEHOLDER = 'placeholder_ta'
CA_PLACEHOLDER = 'placeholder_ca'
CL_KEY_PLACEHOLDER = 'placeholder_ck'
CL_CR_PLACEHOLDER = 'placeholder_crt'
CONFIG_TEMPLATE = OVPN_ROOT+'client/template.ovpn'
TA_FILE = OVPN_ROOT+'keys/ta.key'
CA = OVPN_ROOT+'keys/ca.crt'
RES_FILE = OVPN_ROOT+'client/'+CLIENT_CN+".ovpn"
CLIENT_KEY_FILE = ERSA+CLIENT_CN+".key"
CLIENT_CER_FILE = ERSA+CLIENT_CN+".crt"
if not os.geteuid() == 0:
sys.exit('Script must be run as root')
cmd = 'cd easy-rsa-release-2.x/easy-rsa/2.0/; source ./vars; ./pkitool ' + CLIENT_CN
print SEP+cmd
if subprocess.call(cmd, shell=True):
print SEP+"CN in use"
sys.exit()
print SEP+"Data has been generated: "+CLIENT_CN
#contents = open(CONFIG_TEMPLATE, "r+").read()
conf = open(RES_FILE, 'w+')
# tls key
taKey = open(TA_FILE, 'r').read()[:-1]
# CA isnt encrypted
serverCa = open(CA, 'r').read()[:-1]
# client key
clientKey = open(CLIENT_KEY_FILE, 'r').read()[:-1]
# client cert
clientCert = open(CLIENT_CER_FILE, 'r').read()[:-1]
# server ip
serverIp=commands.getoutput("/sbin/ifconfig").split("\n")[1].split()[1][0:]
resultConfig = open(CONFIG_TEMPLATE, "r+").read().replace(TA_PLACEHOLDER, taKey)
resultConfig = resultConfig.replace(CA_PLACEHOLDER, serverCa)
resultConfig = resultConfig.replace(CL_KEY_PLACEHOLDER, clientKey)
resultConfig = resultConfig.replace(CL_CR_PLACEHOLDER, clientCert)
resultConfig = resultConfig.replace(IP_PLACEHOLDER, serverIp)
conf.write(resultConfig)<|fim▁hole|>print SEP+"Client: "+RES_FILE<|fim▁end|> | conf.close()
open(OVPN_ROOT+'ccd/'+CLIENT_CN, 'w').close()
|
<|file_name|>CmsEntity.java<|end_file_name|><|fim▁begin|>/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (C) Alkacon Software (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.acacia.shared;
import org.opencms.acacia.shared.CmsEntityChangeEvent.ChangeType;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import com.google.common.collect.Lists;
import com.google.gwt.event.logical.shared.HasValueChangeHandlers;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.EventHandler;
import com.google.gwt.event.shared.GwtEvent;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.event.shared.SimpleEventBus;
/**
* Serializable entity implementation.<p>
*/
public class CmsEntity implements HasValueChangeHandlers<CmsEntity>, Serializable {
/**
* Handles child entity changes.<p>
*/
protected class EntityChangeHandler implements ValueChangeHandler<CmsEntity> {
/**
* @see com.google.gwt.event.logical.shared.ValueChangeHandler#onValueChange(com.google.gwt.event.logical.shared.ValueChangeEvent)
*/
public void onValueChange(ValueChangeEvent<CmsEntity> event) {
ChangeType type = ((CmsEntityChangeEvent)event).getChangeType();
fireChange(type);
}
}
/** The serial version id. */
private static final long serialVersionUID = -6933931178070025267L;
/** The entity attribute values. */
private Map<String, List<CmsEntity>> m_entityAttributes;
/** The entity id. */
private String m_id;
/** The simple attribute values. */
private Map<String, List<String>> m_simpleAttributes;
/** The type name. */
private String m_typeName;
/** The event bus. */
private transient SimpleEventBus m_eventBus;
/** The child entites change handler. */
private transient EntityChangeHandler m_childChangeHandler = new EntityChangeHandler();
/** The handler registrations. */
private transient Map<String, HandlerRegistration> m_changeHandlerRegistry;
/**
* Constructor.<p>
*
* @param id the entity id/URI
* @param typeName the entity type name
*/
public CmsEntity(String id, String typeName) {
this();
m_id = id;
m_typeName = typeName;
}
/**
* Constructor. For serialization only.<p>
*/
protected CmsEntity() {
m_simpleAttributes = new HashMap<String, List<String>>();
m_entityAttributes = new HashMap<String, List<CmsEntity>>();
m_changeHandlerRegistry = new HashMap<String, HandlerRegistration>();
}
/**
* Returns the value of a simple attribute for the given path or <code>null</code>, if the value does not exist.<p>
*
* @param entity the entity to get the value from
* @param pathElements the path elements
*
* @return the value
*/
public static String getValueForPath(CmsEntity entity, String[] pathElements) {
String result = null;
if ((pathElements != null) && (pathElements.length >= 1)) {
String attributeName = pathElements[0];
int index = CmsContentDefinition.extractIndex(attributeName);
if (index > 0) {
index--;
}
attributeName = entity.getTypeName() + "/" + CmsContentDefinition.removeIndex(attributeName);
CmsEntityAttribute attribute = entity.getAttribute(attributeName);
if (!((attribute == null) || (attribute.isComplexValue() && (pathElements.length == 1)))) {
if (attribute.isSimpleValue()) {
if ((pathElements.length == 1) && (attribute.getValueCount() > 0)) {
List<String> values = attribute.getSimpleValues();
result = values.get(index);
}
} else if (attribute.getValueCount() > (index)) {
String[] childPathElements = new String[pathElements.length - 1];
for (int i = 1; i < pathElements.length; i++) {
childPathElements[i - 1] = pathElements[i];
}
List<CmsEntity> values = attribute.getComplexValues();
result = getValueForPath(values.get(index), childPathElements);
}
}
}
return result;
}
/**
* Gets the list of values reachable from the given base object with the given path.<p>
*
* @param baseObject the base object (a CmsEntity or a string)
* @param pathComponents the path components
* @return the list of values for the given path (either of type String or CmsEntity)
*/
public static List<Object> getValuesForPath(Object baseObject, String[] pathComponents) {
List<Object> currentList = Lists.newArrayList();
currentList.add(baseObject);
for (String pathComponent : pathComponents) {
List<Object> newList = Lists.newArrayList();
for (Object element : currentList) {
newList.addAll(getValuesForPathComponent(element, pathComponent));
}
currentList = newList;
}
return currentList;
}
/**
* Gets the values reachable from a given object (an entity or a string) with a single XPath component.<p>
*
* If entityOrString is a string, and pathComponent is "VALUE", a list containing only entityOrString is returned.
* Otherwise, entityOrString is assumed to be an entity, and the pathComponent is interpreted as a field of the entity
* (possibly with an index).
*
* @param entityOrString the entity or string from which to get the values for the given path component
* @param pathComponent the path component
* @return the list of reachable values
*/
public static List<Object> getValuesForPathComponent(Object entityOrString, String pathComponent) {
List<Object> result = Lists.newArrayList();
if (pathComponent.equals("VALUE")) {
result.add(entityOrString);
} else {
if (entityOrString instanceof CmsEntity) {
CmsEntity entity = (CmsEntity)entityOrString;
boolean hasIndex = CmsContentDefinition.hasIndex(pathComponent);
int index = CmsContentDefinition.extractIndex(pathComponent);
if (index > 0) {
index--;
}
String attributeName = entity.getTypeName() + "/" + CmsContentDefinition.removeIndex(pathComponent);
CmsEntityAttribute attribute = entity.getAttribute(attributeName);
if (attribute != null) {
if (hasIndex) {
if (index < attribute.getValueCount()) {
if (attribute.isSimpleValue()) {
result.add(attribute.getSimpleValues().get(index));
} else {
result.add(attribute.getComplexValues().get(index));
}
}
} else {
if (attribute.isSimpleValue()) {
result.addAll(attribute.getSimpleValues());
} else {
result.addAll(attribute.getComplexValues());
}
}
}
}
}
return result;
}
/**
* Adds the given attribute value.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
*/
public void addAttributeValue(String attributeName, CmsEntity value) {
if (m_simpleAttributes.containsKey(attributeName)) {
throw new RuntimeException("Attribute already exists with a simple type value.");
}
if (m_entityAttributes.containsKey(attributeName)) {
m_entityAttributes.get(attributeName).add(value);
} else {
List<CmsEntity> values = new ArrayList<CmsEntity>();
values.add(value);
m_entityAttributes.put(attributeName, values);
}
registerChangeHandler(value);
fireChange(ChangeType.add);
}
/**
* Adds the given attribute value.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
*/
public void addAttributeValue(String attributeName, String value) {
if (m_entityAttributes.containsKey(attributeName)) {
throw new RuntimeException("Attribute already exists with a entity type value.");
}
if (m_simpleAttributes.containsKey(attributeName)) {
m_simpleAttributes.get(attributeName).add(value);
} else {
List<String> values = new ArrayList<String>();
values.add(value);
m_simpleAttributes.put(attributeName, values);
}
fireChange(ChangeType.add);
}
/**
* @see com.google.gwt.event.logical.shared.HasValueChangeHandlers#addValueChangeHandler(com.google.gwt.event.logical.shared.ValueChangeHandler)
*/
public HandlerRegistration addValueChangeHandler(ValueChangeHandler<CmsEntity> handler) {
return addHandler(handler, ValueChangeEvent.getType());
}
/**
* Clones the given entity keeping all entity ids.<p>
*
* @return returns the cloned instance
*/
public CmsEntity cloneEntity() {
CmsEntity clone = new CmsEntity(getId(), getTypeName());
for (CmsEntityAttribute attribute : getAttributes()) {
if (attribute.isSimpleValue()) {
List<String> values = attribute.getSimpleValues();
for (String value : values) {
clone.addAttributeValue(attribute.getAttributeName(), value);
}
} else {
List<CmsEntity> values = attribute.getComplexValues();
for (CmsEntity value : values) {
clone.addAttributeValue(attribute.getAttributeName(), value.cloneEntity());
}
}
}
return clone;
}
/**
* Creates a deep copy of this entity.<p>
*
* @param entityId the id of the new entity, if <code>null</code> a generic id will be used
*
* @return the entity copy
*/
public CmsEntity createDeepCopy(String entityId) {
CmsEntity result = new CmsEntity(entityId, getTypeName());
for (CmsEntityAttribute attribute : getAttributes()) {
if (attribute.isSimpleValue()) {
List<String> values = attribute.getSimpleValues();
for (String value : values) {
result.addAttributeValue(attribute.getAttributeName(), value);
}
} else {
List<CmsEntity> values = attribute.getComplexValues();
for (CmsEntity value : values) {
result.addAttributeValue(attribute.getAttributeName(), value.createDeepCopy(null));
}
}
}
return result;
}
/**
* Ensures that the change event is also fired on child entity change.<p>
*/
public void ensureChangeHandlers() {
if (!m_changeHandlerRegistry.isEmpty()) {
for (HandlerRegistration reg : m_changeHandlerRegistry.values()) {
reg.removeHandler();
}
m_changeHandlerRegistry.clear();
}
for (List<CmsEntity> attr : m_entityAttributes.values()) {
for (CmsEntity child : attr) {
registerChangeHandler(child);
child.ensureChangeHandlers();
}
}
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
boolean result = false;
if (obj instanceof CmsEntity) {
CmsEntity test = (CmsEntity)obj;
if (m_simpleAttributes.keySet().equals(test.m_simpleAttributes.keySet())
&& m_entityAttributes.keySet().equals(test.m_entityAttributes.keySet())) {
result = true;
for (String attributeName : m_simpleAttributes.keySet()) {
if (!m_simpleAttributes.get(attributeName).equals(test.m_simpleAttributes.get(attributeName))) {
result = false;
break;
}
}
if (result) {
for (String attributeName : m_entityAttributes.keySet()) {
if (!m_entityAttributes.get(attributeName).equals(test.m_entityAttributes.get(attributeName))) {
result = false;
break;
}
}
}
}
}
return result;
}
/**
* @see com.google.gwt.event.shared.HasHandlers#fireEvent(com.google.gwt.event.shared.GwtEvent)
*/
public void fireEvent(GwtEvent<?> event) {
ensureHandlers().fireEventFromSource(event, this);
}
/**
* Returns an attribute.<p>
*
* @param attributeName the attribute name
*
* @return the attribute value
*/
public CmsEntityAttribute getAttribute(String attributeName) {
if (m_simpleAttributes.containsKey(attributeName)) {
return CmsEntityAttribute.createSimpleAttribute(attributeName, m_simpleAttributes.get(attributeName));
}
if (m_entityAttributes.containsKey(attributeName)) {
return CmsEntityAttribute.createEntityAttribute(attributeName, m_entityAttributes.get(attributeName));
}
return null;
}
/**
* Returns all entity attributes.<p>
*
* @return the entity attributes
*/
public List<CmsEntityAttribute> getAttributes() {
List<CmsEntityAttribute> result = new ArrayList<CmsEntityAttribute>();
for (String name : m_simpleAttributes.keySet()) {
result.add(getAttribute(name));
}
for (String name : m_entityAttributes.keySet()) {
result.add(getAttribute(name));
}
return result;
}
/**
* Returns this or a child entity with the given id.<p>
* Will return <code>null</code> if no entity with the given id is present.<p>
*
* @param entityId the entity id
*
* @return the entity
*/
public CmsEntity getEntityById(String entityId) {
CmsEntity result = null;
if (m_id.equals(entityId)) {
result = this;
} else {
for (List<CmsEntity> children : m_entityAttributes.values()) {
for (CmsEntity child : children) {
result = child.getEntityById(entityId);
if (result != null) {
break;
}
}
if (result != null) {
break;
}
}
}
return result;
}
/**
* Returns the entity id.<p>
*
* @return the id
*/
public String getId() {
return m_id;
}
/**
* Returns the entity type name.<p>
*
* @return the entity type name
*/
public String getTypeName() {
return m_typeName;
}
/**
* Returns if the entity has the given attribute.<p>
*
* @param attributeName the attribute name
*
* @return <code>true</code> if the entity has the given attribute
*/
public boolean hasAttribute(String attributeName) {
return m_simpleAttributes.containsKey(attributeName) || m_entityAttributes.containsKey(attributeName);
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return super.hashCode();
}
/**
* Inserts a new attribute value at the given index.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
* @param index the value index
*/
public void insertAttributeValue(String attributeName, CmsEntity value, int index) {
if (m_entityAttributes.containsKey(attributeName)) {
m_entityAttributes.get(attributeName).add(index, value);
} else {
setAttributeValue(attributeName, value);
}
registerChangeHandler(value);
fireChange(ChangeType.add);
}
/**
* Inserts a new attribute value at the given index.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
* @param index the value index
*/
public void insertAttributeValue(String attributeName, String value, int index) {
if (m_simpleAttributes.containsKey(attributeName)) {
m_simpleAttributes.get(attributeName).add(index, value);
} else {
setAttributeValue(attributeName, value);
}
fireChange(ChangeType.add);
}
/**
* Removes the given attribute.<p>
*
* @param attributeName the attribute name
*/
public void removeAttribute(String attributeName) {
removeAttributeSilent(attributeName);
fireChange(ChangeType.remove);
}
<|fim▁hole|> * Removes the attribute without triggering any change events.<p>
*
* @param attributeName the attribute name
*/
public void removeAttributeSilent(String attributeName) {
CmsEntityAttribute attr = getAttribute(attributeName);
if (attr != null) {
if (attr.isSimpleValue()) {
m_simpleAttributes.remove(attributeName);
} else {
for (CmsEntity child : attr.getComplexValues()) {
removeChildChangeHandler(child);
}
m_entityAttributes.remove(attributeName);
}
}
}
/**
* Removes a specific attribute value.<p>
*
* @param attributeName the attribute name
* @param index the value index
*/
public void removeAttributeValue(String attributeName, int index) {
if (m_simpleAttributes.containsKey(attributeName)) {
List<String> values = m_simpleAttributes.get(attributeName);
if ((values.size() == 1) && (index == 0)) {
removeAttributeSilent(attributeName);
} else {
values.remove(index);
}
} else if (m_entityAttributes.containsKey(attributeName)) {
List<CmsEntity> values = m_entityAttributes.get(attributeName);
if ((values.size() == 1) && (index == 0)) {
removeAttributeSilent(attributeName);
} else {
CmsEntity child = values.remove(index);
removeChildChangeHandler(child);
}
}
fireChange(ChangeType.remove);
}
/**
* Sets the given attribute value. Will remove all previous attribute values.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
*/
public void setAttributeValue(String attributeName, CmsEntity value) {
// make sure there is no attribute value set
removeAttributeSilent(attributeName);
addAttributeValue(attributeName, value);
}
/**
* Sets the given attribute value at the given index.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
* @param index the value index
*/
public void setAttributeValue(String attributeName, CmsEntity value, int index) {
if (m_simpleAttributes.containsKey(attributeName)) {
throw new RuntimeException("Attribute already exists with a simple type value.");
}
if (!m_entityAttributes.containsKey(attributeName)) {
if (index != 0) {
throw new IndexOutOfBoundsException();
} else {
addAttributeValue(attributeName, value);
}
} else {
if (m_entityAttributes.get(attributeName).size() > index) {
CmsEntity child = m_entityAttributes.get(attributeName).remove(index);
removeChildChangeHandler(child);
}
m_entityAttributes.get(attributeName).add(index, value);
fireChange(ChangeType.change);
}
}
/**
* Sets the given attribute value. Will remove all previous attribute values.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
*/
public void setAttributeValue(String attributeName, String value) {
m_entityAttributes.remove(attributeName);
List<String> values = new ArrayList<String>();
values.add(value);
m_simpleAttributes.put(attributeName, values);
fireChange(ChangeType.change);
}
/**
* Sets the given attribute value at the given index.<p>
*
* @param attributeName the attribute name
* @param value the attribute value
* @param index the value index
*/
public void setAttributeValue(String attributeName, String value, int index) {
if (m_entityAttributes.containsKey(attributeName)) {
throw new RuntimeException("Attribute already exists with a simple type value.");
}
if (!m_simpleAttributes.containsKey(attributeName)) {
if (index != 0) {
throw new IndexOutOfBoundsException();
} else {
addAttributeValue(attributeName, value);
}
} else {
if (m_simpleAttributes.get(attributeName).size() > index) {
m_simpleAttributes.get(attributeName).remove(index);
}
m_simpleAttributes.get(attributeName).add(index, value);
fireChange(ChangeType.change);
}
}
/**
* Returns the JSON string representation of this entity.<p>
*
* @return the JSON string representation of this entity
*/
public String toJSON() {
StringBuffer result = new StringBuffer();
result.append("{\n");
for (Entry<String, List<String>> simpleEntry : m_simpleAttributes.entrySet()) {
result.append("\"").append(simpleEntry.getKey()).append("\"").append(": [\n");
boolean firstValue = true;
for (String value : simpleEntry.getValue()) {
if (firstValue) {
firstValue = false;
} else {
result.append(",\n");
}
result.append("\"").append(value).append("\"");
}
result.append("],\n");
}
for (Entry<String, List<CmsEntity>> entityEntry : m_entityAttributes.entrySet()) {
result.append("\"").append(entityEntry.getKey()).append("\"").append(": [\n");
boolean firstValue = true;
for (CmsEntity value : entityEntry.getValue()) {
if (firstValue) {
firstValue = false;
} else {
result.append(",\n");
}
result.append(value.toJSON());
}
result.append("],\n");
}
result.append("\"id\": \"").append(m_id).append("\"");
result.append("}");
return result.toString();
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return toJSON();
}
/**
* Adds this handler to the widget.
*
* @param <H> the type of handler to add
* @param type the event type
* @param handler the handler
* @return {@link HandlerRegistration} used to remove the handler
*/
protected final <H extends EventHandler> HandlerRegistration addHandler(final H handler, GwtEvent.Type<H> type) {
return ensureHandlers().addHandlerToSource(type, this, handler);
}
/**
* Fires the change event for this entity.<p>
*
* @param type the change type
*/
void fireChange(ChangeType type) {
CmsEntityChangeEvent event = new CmsEntityChangeEvent(this, type);
fireEvent(event);
}
/**
* Lazy initializing the handler manager.<p>
*
* @return the handler manager
*/
private SimpleEventBus ensureHandlers() {
if (m_eventBus == null) {
m_eventBus = new SimpleEventBus();
}
return m_eventBus;
}
/**
* Adds the value change handler to the given entity.<p>
*
* @param child the child entity
*/
private void registerChangeHandler(CmsEntity child) {
HandlerRegistration reg = child.addValueChangeHandler(m_childChangeHandler);
m_changeHandlerRegistry.put(child.getId(), reg);
}
/**
* Removes the child entity change handler.<p>
*
* @param child the child entity
*/
private void removeChildChangeHandler(CmsEntity child) {
HandlerRegistration reg = m_changeHandlerRegistry.remove(child.getId());
if (reg != null) {
reg.removeHandler();
}
}
}<|fim▁end|> |
/**
|
<|file_name|>show_in_website_for_template_item.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import frappe
import frappe.website.render
<|fim▁hole|>def execute():
for item_code in frappe.db.sql_list("""select distinct variant_of from `tabItem`
where variant_of is not null and variant_of !='' and show_in_website=1"""):
item = frappe.get_doc("Item", item_code)
item.db_set("show_in_website", 1, update_modified=False)
item.get_route()
item.db_set("page_name", item.page_name, update_modified=False)
frappe.website.render.clear_cache()<|fim▁end|> | |
<|file_name|>configparser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def __init__(self, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
# Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
<|fim▁hole|> # @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
inCommentSingle = True
t.append(s[fromIndex:i])
i += 1
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t)<|fim▁end|> | ## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297 |
<|file_name|>workerTest.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from toil.common import Config
from toil.job import CheckpointJobDescription, JobDescription
from toil.jobStores.fileJobStore import FileJobStore
from toil.test import ToilTest, travis_test
from toil.worker import nextChainable
class WorkerTests(ToilTest):
"""Test miscellaneous units of the worker."""
def setUp(self):
super(WorkerTests, self).setUp()
path = self._getTestJobStorePath()
self.jobStore = FileJobStore(path)
self.config = Config()
self.config.jobStore = 'file:%s' % path
self.jobStore.initialize(self.config)
self.jobNumber = 0
@travis_test
def testNextChainable(self):
"""Make sure chainable/non-chainable jobs are identified correctly."""
def createTestJobDesc(memory, cores, disk, preemptable, checkpoint):
"""
Create a JobDescription with no command (representing a Job that
has already run) and return the JobDescription.
"""
name = 'job%d' % self.jobNumber
self.jobNumber += 1
descClass = CheckpointJobDescription if checkpoint else JobDescription
jobDesc = descClass(requirements={'memory': memory, 'cores': cores, 'disk': disk, 'preemptable': preemptable}, jobName=name)
# Assign an ID
self.jobStore.assignID(jobDesc)
# Save and return the JobDescription
return self.jobStore.create(jobDesc)
for successorType in ['addChild', 'addFollowOn']:
# Try with the branch point at both child and follow-on stages
# Identical non-checkpoint jobs should be chainable.
jobDesc1 = createTestJobDesc(1, 2, 3, True, False)
jobDesc2 = createTestJobDesc(1, 2, 3, True, False)
getattr(jobDesc1, successorType)(jobDesc2.jobStoreID)
chainable = nextChainable(jobDesc1, self.jobStore, self.config)
self.assertNotEqual(chainable, None)
self.assertEqual(jobDesc2.jobStoreID, chainable.jobStoreID)
# Identical checkpoint jobs should not be chainable.
jobDesc1 = createTestJobDesc(1, 2, 3, True, False)
jobDesc2 = createTestJobDesc(1, 2, 3, True, True)
getattr(jobDesc1, successorType)(jobDesc2.jobStoreID)
self.assertEqual(None, nextChainable(jobDesc1, self.jobStore, self.config))
# If there is no child we should get nothing to chain.
jobDesc1 = createTestJobDesc(1, 2, 3, True, False)
self.assertEqual(None, nextChainable(jobDesc1, self.jobStore, self.config))
# If there are 2 or more children we should get nothing to chain.<|fim▁hole|> getattr(jobDesc1, successorType)(jobDesc2.jobStoreID)
getattr(jobDesc1, successorType)(jobDesc3.jobStoreID)
self.assertEqual(None, nextChainable(jobDesc1, self.jobStore, self.config))
# If there is an increase in resource requirements we should get nothing to chain.
reqs = {'memory': 1, 'cores': 2, 'disk': 3, 'preemptable': True, 'checkpoint': False}
for increased_attribute in ('memory', 'cores', 'disk'):
jobDesc1 = createTestJobDesc(**reqs)
reqs[increased_attribute] += 1
jobDesc2 = createTestJobDesc(**reqs)
getattr(jobDesc1, successorType)(jobDesc2.jobStoreID)
self.assertEqual(None, nextChainable(jobDesc1, self.jobStore, self.config))
# A change in preemptability from True to False should be disallowed.
jobDesc1 = createTestJobDesc(1, 2, 3, True, False)
jobDesc2 = createTestJobDesc(1, 2, 3, False, True)
getattr(jobDesc1, successorType)(jobDesc2.jobStoreID)
self.assertEqual(None, nextChainable(jobDesc1, self.jobStore, self.config))<|fim▁end|> | jobDesc1 = createTestJobDesc(1, 2, 3, True, False)
jobDesc2 = createTestJobDesc(1, 2, 3, True, False)
jobDesc3 = createTestJobDesc(1, 2, 3, True, False) |
<|file_name|>exames.server.controller.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
Exame = mongoose.model('Exame'),
_ = require('lodash');
/**
* Create a Exame
*/
exports.create = function(req, res) {
var exame = new Exame(req.body);
exame.user = req.user;
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
exports.addPergunta= function(exameId,perguntaId){
Exame.findById(exameId).exec(function(err,exame){
if(err){
console.log('erro finding exam first');
return;
}
else{
if(!exame){
// console.log('exam not found'+exameId);
return;
}
var exame1=exame.toObject();
exame1._perguntas.push(perguntaId);
exame = _.extend(exame , exame1);
exame.save(function(err) {
if (err) {
//console.log('erro ao salvar');
return;
} else {
//console.log('sucesso');
}
});
}
});
};
exports.listar = function(req, res) {
Exame.find().select('id ano').exec(function (err,exames) {
// body...
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
res.jsonp(exames);
}
});
};
/**
* Show the current Exame
*/
exports.read = function(req, res) {
Exame.findById(req.params.exameId).populate({path:'_perguntas',model:'Pergunta'}).populate('disciplina').exec(function(err,exame){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
if(!exame){
return res.status(404).send({message:'Exame nao encontrado'});
}
Exame.populate(exame._perguntas,{
path:'_ajuda',
model:'Ajuda'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
exame._ajuda=docs;
});
Exame.populate(exame._perguntas,{
path:'_alternativas',
model:'Alternativa'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
// console.log(docs.toObject());
// exame._perguntas=docs;
res.jsonp(exame); //exame=docs;
});
//res.jsonp(exame);
}
});
};
/**
* Exame middleware
*/
// exports.exameByID = function(req, res, next, id) {
// Exame.findById(id).populate('_perguntas').exec(function(err, exame) {
// //Exame.findById(id).deepPopulate('_perguntas.alternativas').exec(function(err, exame) {
// if (err) return next(err);
// if (! exame) return next(new Error('Failed to load Exame ' + id));
// req.exame = exame ;
// next();
// });<|fim▁hole|>/**
* Update a Exame
*/
exports.update = function(req, res) {
var exame = req.exame ;
exame = _.extend(exame , req.body);
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* Delete an Exame
*/
exports.delete = function(req, res) {
var exame = req.exame ;
exame.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* List of Exames
*/
exports.list = function(req, res) {
Exame.find().select('id ano disciplina').populate('disciplina','name').sort({ano:-1}).exec(function(err, exames) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exames);
}
});
};
/**
* Exame authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.exame.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};<|fim▁end|> | // };
|
<|file_name|>gen_user_followers.py<|end_file_name|><|fim▁begin|>from datetime import *
from Tweetstream import *
from UserAnalyser import *
from TimeAnalyser import *
import math
import sys
import pickle
#Frequency over the common
def load_list(filein):
d = dict()
for l in filein:
l = eval(l)
d[l[0]] = l[1]
return d
if __name__ == "__main__":
follow = load_list(open(sys.argv[5], 'r'))
keywords = open(sys.argv[2], 'r').readline().strip("\n").split(",")
userstream = Tweetstream(jsonfilee=sys.argv[3], jsonformat=False, keywords=keywords)
topicstream = Tweetstream(jsonfilee=sys.argv[1], jsonformat=False, keywords=keywords)
ua = UserAnalyser (sys.argv[4], keywords = keywords)
ua.load_usersVectors()
ua.load_idf()
ua.load_usersScore()
rank = dict()
# normalizar pelo numero de kw no topic vector
c = 0<|fim▁hole|> for t in userstream:
rank[t['id']] = 0
n = 0
if t['user_id'] in follow:
c += 1
for fuser in follow[t['user_id']]:
if fuser in ua.usersScore:
rank[t['id']] += ua.usersScore[fuser]
n += 1
if n > 0: rank[t['id']] /= n
print c
#prinit score, nwindow
pickle.dump(rank, open(sys.argv[4]+"_rank_USER_followers.pick", 'w'), pickle.HIGHEST_PROTOCOL)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from functools import partial
import testtools
from tempest import config
from tempest.exceptions import InvalidServiceTag
from tempest.lib.common.utils import data_utils as lib_data_utils
from tempest.lib import decorators
CONF = config.CONF
class DataUtils(object):
def __getattr__(self, attr):
if attr == 'rand_name':
# NOTE(flwang): This is a proxy to generate a random name that
# includes a random number and a prefix 'tempest'
attr_obj = partial(lib_data_utils.rand_name,
prefix='tempest')
else:
attr_obj = getattr(lib_data_utils, attr)
self.__dict__[attr] = attr_obj
return attr_obj
data_utils = DataUtils()
def get_service_list():
service_list = {
'compute': CONF.service_available.nova,
'image': CONF.service_available.glance,
'volume': CONF.service_available.cinder,
# NOTE(masayukig): We have two network services which are neutron and
# nova-network. And we have no way to know whether nova-network is
# available or not. After the pending removal of nova-network from
# nova, we can treat the network/neutron case in the same manner as
# the other services.
'network': True,
# NOTE(masayukig): Tempest tests always require the identity service.
# So we should set this True here.
'identity': True,
'object_storage': CONF.service_available.swift,
}
return service_list
def services(*args):
"""A decorator used to set an attr for each service used in a test case
This decorator applies a testtools attr for each service that gets
exercised by a test case.
"""
def decorator(f):
known_services = get_service_list()
for service in args:
if service not in known_services:
raise InvalidServiceTag('%s is not a valid service' % service)
decorators.attr(type=list(args))(f)
@functools.wraps(f)
def wrapper(*func_args, **func_kwargs):
service_list = get_service_list()
for service in args:
if not service_list[service]:
msg = 'Skipped because the %s service is not available' % (
service)
raise testtools.TestCase.skipException(msg)
return f(*func_args, **func_kwargs)
return wrapper
return decorator<|fim▁hole|>
def requires_ext(**kwargs):
"""A decorator to skip tests if an extension is not enabled
@param extension
@param service
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*func_args, **func_kwargs):
if not is_extension_enabled(kwargs['extension'],
kwargs['service']):
msg = "Skipped because %s extension: %s is not enabled" % (
kwargs['service'], kwargs['extension'])
raise testtools.TestCase.skipException(msg)
return func(*func_args, **func_kwargs)
return wrapper
return decorator
def is_extension_enabled(extension_name, service):
"""A function that will check the list of enabled extensions from config
"""
config_dict = {
'compute': CONF.compute_feature_enabled.api_extensions,
'volume': CONF.volume_feature_enabled.api_extensions,
'network': CONF.network_feature_enabled.api_extensions,
'object': CONF.object_storage_feature_enabled.discoverable_apis,
'identity': CONF.identity_feature_enabled.api_extensions
}
if not config_dict[service]:
return False
if config_dict[service][0] == 'all':
return True
if extension_name in config_dict[service]:
return True
return False<|fim▁end|> | |
<|file_name|>Solution2.java<|end_file_name|><|fim▁begin|>package contests;
/**
* Created by sherxon on 2/27/17.
*/
public class Solution2 {
public static void main(String[] args) {
}
static public String shortestPalindrome(String s) {
if (s.length() <= 1) return s;
char[] a = s.toCharArray();
StringBuilder sb = new StringBuilder();
int i = 0;
int j = a.length - 1;
while (i < j) {
if (a[j] == a[i]) {
j--;
i++;
} else {<|fim▁hole|> System.out.println(s.substring(0, i));
return sb.toString() + s.substring(i);
}
}<|fim▁end|> | i = 0;
}
} |
<|file_name|>up_action.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for moving the workspace to the parent directory.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from os.path import dirname
from enthought.traits.api import Bool, Instance
from enthought.pyface.api import ImageResource
from enthought.pyface.action.api import Action
from enthought.envisage.ui.workbench.api import WorkbenchWindow
from puddle.resource.resource_view import RESOURCE_VIEW
from common import IMAGE_LOCATION
#------------------------------------------------------------------------------
# "UpAction" class:
#------------------------------------------------------------------------------
class UpAction(Action):
""" Defines an action for moving the workspace to the parent directory.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# A longer description of the action:
description = "Move workspace to the parent directory"
# The action"s name (displayed on menus/tool bar tools etc):
name = "&Up"
# A short description of the action used for tooltip text etc:
tooltip = "Open parent directory"
# Keyboard accelerator:
accelerator = "Alt+Up"
<|fim▁hole|> image = ImageResource("up", search_path=[IMAGE_LOCATION])
#--------------------------------------------------------------------------
# "UpAction" interface:
#--------------------------------------------------------------------------
window = Instance(WorkbenchWindow)
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
# Note that we always offer the service via its name, but look it up
# via the actual protocol.
from puddle.resource.i_workspace import IWorkspace
workspace = self.window.application.get_service(IWorkspace)
workspace.path = dirname(workspace.absolute_path)
view = self.window.get_view_by_id(RESOURCE_VIEW)
if view is not None:
workspace = self.window.application.get_service(IWorkspace)
view.tree_viewer.refresh(workspace)
# EOF -------------------------------------------------------------------------<|fim▁end|> | # The action's image (displayed on tool bar tools etc): |
<|file_name|>ssh.py<|end_file_name|><|fim▁begin|>import os
import subprocess
SSH_OPTIONS = ['-o', 'StrictHostKeyChecking=no', '-o', 'PreferredAuthentications=publickey', '-o', 'PubkeyAuthentication=yes']
def rsync_get_file(uri_from, uri_to, user, host, port, key):
cmd = [
'rsync',
'-e',
'ssh -i {} -p {} {}'.format(key, port, ' '.join(SSH_OPTIONS)),
'{}@{}:{}'.format(user, host, uri_from),
uri_to,
]
_call(cmd)
def rsync_post_file(uri_from, uri_to, user, host, port, key):
_ensure_dir(uri_to, key, port, user, host)
cmd = [
'rsync',
'-e',
'ssh -i {} -p {} {}'.format(key, port, ' '.join(SSH_OPTIONS)),
uri_from,
'{}@{}:{}'.format(user, host, uri_to),
]
_call(cmd)
def scp_get_file(uri_from, uri_to, user, host, port, key):
cmd = [
'scp',
'-P', str(port),
'-i', key
] + SSH_OPTIONS + [
'{}@{}:{}'.format(user, host, uri_from),
uri_to,
]
_call(cmd)
def scp_post_file(uri_from, uri_to, user, host, port, key):
_ensure_dir(uri_to, key, port, user, host)
cmd = [
'scp',
'-P', str(port),
'-i', key,
] + SSH_OPTIONS + [
uri_from,
'{}@{}:{}'.format(user, host, uri_to),
]
_call(cmd)
def _ensure_dir(uri_to, key, port, user, host):
directory = os.path.dirname(uri_to)<|fim▁hole|> '-p', str(port),
] + SSH_OPTIONS + [
'{}@{}'.format(user, host),
'mkdir', '-p', directory,
]
_call(cmd)
def _call(cmd):
exit_code = subprocess.check_call(cmd)
if exit_code != 0:
raise Exception("{} exited with code {}".format(cmd[0], exit_code))
___all__ = [
'rsync_post_file',
'rsync_get_file',
'scp_post_file',
'scp_get_file'
]<|fim▁end|> | cmd = [
'ssh',
'-i', key, |
<|file_name|>b-dummy-module-loader.ts<|end_file_name|><|fim▁begin|>/*!
* V4Fire Client Core
* https://github.com/V4Fire/Client
*
* Released under the MIT license
* https://github.com/V4Fire/Client/blob/master/LICENSE
*/
/**
* [[include:dummies/b-dummy-module-loader/README.md]]
* @packageDocumentation
*/
import iData, { component, prop, Module } from 'super/i-data/i-data';
export * from 'super/i-data/i-data';
@component({
functional: {<|fim▁hole|> dataProvider: undefined
}
})
export default class bDummyModuleLoader extends iData {
@prop({
default: () => globalThis.loadFromProp === true ?
[
{
id: 'b-dummy-module1',
load: () => import('dummies/b-dummy-module-loader/b-dummy-module1')
},
{
id: 'b-dummy-module2',
load: () => import('dummies/b-dummy-module-loader/b-dummy-module2')
}
] :
[]
})
override dependenciesProp!: Module[];
}<|fim▁end|> | functional: true, |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/* Aurélien DESBRIÈRES
aurelien(at)hackers(dot)camp
License GNU GPL latest */
// Rust experimentations ───────────────┐
// Std Library Result ──────────────────┘
mod checked {
// Mathematical "errors" we want to catch
#[derive(Debug)]
pub enum MathError {
DivisionByZero,
NonPositiveLogarithm,
NegativeSquareRoot,
}
pub type MathResult = Result<f64, MathError>;
pub fn div(x: f64, y: f64) -> MathResult {
if y == 0.0 {
// this operation would `fail`, instead let's return the reason of
// the failur wrapped in `Err`
Err(MathError::DivisionByZero)
} else {
// This operation is valid, return the result wrapped in `Ok`
Ok(x / y)
}
}
pub fn sqrt(x: f64) -> MathResult {
if x < 0.0 {
Err(MathError::NegativeSquareRoot)
} else {
Ok(x.sqrt())
}
}
pub fn ln(x: f64) -> MathResult {
if x <= 0.0 {
Err(MathError::NonPositiveLogarithm)
} else {
Ok(x.ln())
}
}
}
// `op(x, y)` == `sqrt(ln(x / y))`
fn op(x: f64, y: f64) -> f64 {
// This is a three level match pyramid!
match checked::div(x, y) {
Err(why) => panic!("{:?}", why),
Ok(ratio) => match checked::ln(ratio) {
Err(why) => panic!("{:?}", why),
Ok(ln) => match checked::sqrt(ln) {
Err(why) => panic!("{:?}", why),
Ok(sqrt) => sqrt,
},
},
}
}
<|fim▁hole|>fn main() {
// Will this fail?
println!("{}", op(1.0, 10.0));
}<|fim▁end|> | |
<|file_name|>toggledlabelframe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""""""
import tkinter as tk
from tkinter import ttk
# http://infohost.nmt.edu/tcc/help/pubs/tkinter/web/ttk-LabelFrame.html
__title__ = "ToggledLabelFrame"
__version__ = "1.2.5"
__author__ = "DeflatedPickle"
class ToggledLabelFrame(ttk.LabelFrame):
"""
-----DESCRIPTION-----
This widget is used to store any other widgets inside of it.
It can be toggled on or off, so widgets inside of it aren't always shown.
-----USAGE-----
toggledFrame = ToggledLabelFrame(parent, on_text=[string], off_text=[string], default_state=[boolean], state="enabled")
toggledFrame.pack()
button = Button(toggledFrame.frame).pack()
-----PARAMETERS-----
parent = The parent of the widget.
on_text = The text displayed when the button is active.
off_text = The text displayed when the button is inactive.
default_state = The state the widget starts on.
state = The state of the button.
-----CONTENTS-----
---VARIABLES---
parent = The parent of the widget.
_on_text = The text displayed when the button is active.
_off_text = The text displayed when the button is inactive.
_default_state = The state the widget starts on.
_state = The state of the button.
---TKINTER VARIABLES---
variable = The variable used for the Button.
---WIDGETS---
self
_fill = A placeholder.
_button = The button that toggles the frame.
_frame = The frame which holds widgets.
---FUNCTIONS---
_activate() = Checks value of variable and shows or hides the frame.
toggle() = Switches the label frame to the opposite state.
"""
def __init__(self, parent, on_text="Active", off_text="Inactive", default_state=False, state="enabled", *args):
ttk.LabelFrame.__init__(self, parent, labelanchor="n", *args)
self.parent = parent
self._on_text = on_text
self._off_text = off_text
self._default_state = default_state
self._state = state
self._fill = tk.Frame(self, height=5)
self._variable = tk.IntVar()
self._variable.set(default_state)
self._button = ttk.Checkbutton(self, width=11, state=self._state, variable=self._variable, command=self._activate, style="TButton")
self.configure(labelwidget=self._button)
self.frame = ttk.Frame(self)
self._activate()
def _activate(self):
if not self._variable.get():
self._fill.pack()
self.frame.forget()
self._button.configure(text=self._off_text)
if self._variable.get():
self._fill.forget()
self.frame.pack(fill="both", expand=True)
self._button.configure(text=self._on_text)
<|fim▁hole|> self._activate()
##################################################
if __name__ == "__main__":
root = tk.Tk()
tframe = ToggledLabelFrame(root, on_text="Off", off_text="On", default_state=False, state="enabled")
tframe.pack(expand=True, padx=5, pady=5)
for i in range(3):
ttk.Button(tframe.frame).pack()
root.mainloop()<|fim▁end|> |
def toggle(self):
"""Switches the LabelFrame to the opposite state."""
self._variable.set(not self._variable.get())
|
<|file_name|>CustomRenderer.py<|end_file_name|><|fim▁begin|>import sys
import wx
import wx.dataview as dv
#import os; print('PID:'+str(os.getpid())); raw_input("Press enter...")
#----------------------------------------------------------------------
class MyCustomRenderer(dv.DataViewCustomRenderer):
def __init__(self, log, *args, **kw):
dv.DataViewCustomRenderer.__init__(self, *args, **kw)
self.log = log
self.value = None
def SetValue(self, value):
#self.log.write('MyCustomRenderer.SetValue: %s\n' % value)
self.value = value
return True
def GetValue(self):
#self.log.write('MyCustomRenderer.GetValue\n')<|fim▁hole|> # has a helper function we can use for measuring text that is
# aware of any custom attributes that may have been set for
# this item.
value = self.value if self.value else ""
size = self.GetTextExtent(value)
return size
def Render(self, rect, dc, state):
if state != 0:
self.log.write('Render: %s, %d\n' % (rect, state))
if not state & dv.DATAVIEW_CELL_SELECTED:
# we'll draw a shaded background to see if the rect correctly
# fills the cell
dc.SetBrush(wx.Brush('light grey'))
dc.SetPen(wx.TRANSPARENT_PEN)
rect.Deflate(1, 1)
dc.DrawRoundedRectangle(rect, 2)
# And then finish up with this helper function that draws the
# text for us, dealing with alignment, font and color
# attributes, etc
value = self.value if self.value else ""
self.RenderText(value,
4, # x-offset, to compensate for the rounded rectangles
rect,
dc,
state # wxDataViewCellRenderState flags
)
return True
# The HasEditorCtrl, CreateEditorCtrl and GetValueFromEditorCtrl
# methods need to be implemented if this renderer is going to
# support in-place editing of the cell value, otherwise they can
# be omitted.
def HasEditorCtrl(self):
self.log.write('HasEditorCtrl')
return True
def CreateEditorCtrl(self, parent, labelRect, value):
self.log.write('CreateEditorCtrl: %s' % labelRect)
ctrl = wx.TextCtrl(parent,
value=value,
pos=labelRect.Position,
size=labelRect.Size)
# select the text and put the caret at the end
ctrl.SetInsertionPointEnd()
ctrl.SelectAll()
return ctrl
def GetValueFromEditorCtrl(self, editor):
self.log.write('GetValueFromEditorCtrl: %s' % editor)
value = editor.GetValue()
return True, value
# The LeftClick and Activate methods serve as notifications
# letting you know that the user has either clicked or
# double-clicked on an item. Implementing them in your renderer
# is optional.
def LeftClick(self, pos, cellRect, model, item, col):
self.log.write('LeftClick')
return False
def Activate(self, cellRect, model, item, col):
self.log.write('Activate')
return False
#----------------------------------------------------------------------
# To help focus this sample on the custom renderer, we'll reuse the
# model class from another sample.
from IndexListModel import TestModel
class TestPanel(wx.Panel):
def __init__(self, parent, log, model=None, data=None):
self.log = log
wx.Panel.__init__(self, parent, -1)
# Create a dataview control
self.dvc = dv.DataViewCtrl(self, style=wx.BORDER_THEME
| dv.DV_ROW_LINES
#| dv.DV_HORIZ_RULES
| dv.DV_VERT_RULES
| dv.DV_MULTIPLE
)
# Create an instance of the model
if model is None:
self.model = TestModel(data, log)
else:
self.model = model
self.dvc.AssociateModel(self.model)
# Now we create some columns.
c0 = self.dvc.AppendTextColumn("Id", 0, width=40)
c0.Alignment = wx.ALIGN_RIGHT
c0.MinWidth = 40
# We'll use our custom renderer for these columns
for title, col, width in [ ('Artist', 1, 170),
('Title', 2, 260),
('Genre', 3, 80)]:
renderer = MyCustomRenderer(self.log, mode=dv.DATAVIEW_CELL_EDITABLE)
column = dv.DataViewColumn(title, renderer, col, width=width)
column.Alignment = wx.ALIGN_LEFT
self.dvc.AppendColumn(column)
# Layout
self.Sizer = wx.BoxSizer(wx.VERTICAL)
self.Sizer.Add(self.dvc, 1, wx.EXPAND)
#----------------------------------------------------------------------
def main():
from data import musicdata
app = wx.App()
frm = wx.Frame(None, title="CustomRenderer sample", size=(700,500))
pnl = TestPanel(frm, sys.stdout, data=musicdata)
frm.Show()
app.MainLoop()
if __name__ == '__main__':
main()
#----------------------------------------------------------------------<|fim▁end|> | return self.value
def GetSize(self):
# Return the size needed to display the value. The renderer |
<|file_name|>doc.py<|end_file_name|><|fim▁begin|># (c) 2014, James Tanner <[email protected]>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
# http://docs.ansible.com/playbooks_vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import traceback
import textwrap
from ansible.compat.six import iteritems, string_types
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.plugins import module_loader, action_loader
from ansible.cli import CLI
from ansible.utils import module_docs
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class DocCLI(CLI):
""" Vault command line class """
def __init__(self, args):
super(DocCLI, self).__init__(args)
self.module_list = []
def parse(self):
self.parser = CLI.base_parser(
usage='usage: %prog [options] [module...]',
epilog='Show Ansible module documentation',
module_opts=True,
)
self.parser.add_option("-l", "--list", action="store_true", default=False, dest='list_dir',
help='List available modules')
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
help='Show playbook snippet for specified module(s)')
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_modules',
help='Show documentation for all modules')
super(DocCLI, self).parse()
display.verbosity = self.options.verbosity
def run(self):
super(DocCLI, self).run()
if self.options.module_path is not None:
for i in self.options.module_path.split(os.pathsep):
module_loader.add_directory(i)
# list modules
if self.options.list_dir:
paths = module_loader._get_paths()
for path in paths:
self.find_modules(path)
self.pager(self.get_module_list_text())<|fim▁hole|> return 0
# process all modules
if self.options.all_modules:
paths = module_loader._get_paths()
for path in paths:
self.find_modules(path)
self.args = sorted(set(self.module_list) - module_docs.BLACKLIST_MODULES)
if len(self.args) == 0:
raise AnsibleOptionsError("Incorrect options passed")
# process command line module list
text = ''
for module in self.args:
try:
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
continue
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
continue
try:
doc, plainexamples, returndocs, metadata = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
except:
display.vvv(traceback.format_exc())
display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
continue
if doc is not None:
# is there corresponding action plugin?
if module in action_loader:
doc['action'] = True
else:
doc['action'] = False
all_keys = []
for (k,v) in iteritems(doc['options']):
all_keys.append(k)
all_keys = sorted(all_keys)
doc['option_keys'] = all_keys
doc['filename'] = filename
doc['docuri'] = doc['module'].replace('_', '-')
doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
doc['plainexamples'] = plainexamples
doc['returndocs'] = returndocs
doc['metadata'] = metadata
if self.options.show_snippet:
text += self.get_snippet_text(doc)
else:
text += self.get_man_text(doc)
else:
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
# probably a quoting issue.
raise AnsibleError("Parsing produced an empty object.")
except Exception as e:
display.vvv(traceback.format_exc())
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))
if text:
self.pager(text)
return 0
def find_modules(self, path):
for module in os.listdir(path):
full_path = '/'.join([path, module])
if module.startswith('.'):
continue
elif os.path.isdir(full_path):
continue
elif any(module.endswith(x) for x in C.BLACKLIST_EXTS):
continue
elif module.startswith('__'):
continue
elif module in C.IGNORE_FILES:
continue
elif module.startswith('_'):
if os.path.islink(full_path): # avoids aliases
continue
module = os.path.splitext(module)[0] # removes the extension
module = module.lstrip('_') # remove underscore from deprecated modules
self.module_list.append(module)
def get_module_list_text(self):
columns = display.columns
displace = max(len(x) for x in self.module_list)
linelimit = columns - displace - 5
text = []
deprecated = []
for module in sorted(set(self.module_list)):
if module in module_docs.BLACKLIST_MODULES:
continue
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
continue
if filename.endswith(".ps1"):
continue
if os.path.isdir(filename):
continue
try:
doc, plainexamples, returndocs, metadata = module_docs.get_docstring(filename)
desc = self.tty_ify(doc.get('short_description', '?')).strip()
if len(desc) > linelimit:
desc = desc[:linelimit] + '...'
if module.startswith('_'): # Handle deprecated
deprecated.append("%-*s %-*.*s" % (displace, module[1:], linelimit, len(desc), desc))
else:
text.append("%-*s %-*.*s" % (displace, module, linelimit, len(desc), desc))
except:
raise AnsibleError("module %s has a documentation error formatting or is missing documentation\n" % module)
if len(deprecated) > 0:
text.append("\nDEPRECATED:")
text.extend(deprecated)
return "\n".join(text)
@staticmethod
def print_paths(finder):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in finder._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def get_snippet_text(self, doc):
text = []
desc = CLI.tty_ify(doc['short_description'])
text.append("- name: %s" % (desc))
text.append(" action: %s" % (doc['module']))
pad = 31
subdent = " " * pad
limit = display.columns - pad
for o in sorted(doc['options'].keys()):
opt = doc['options'][o]
desc = CLI.tty_ify(" ".join(opt['description']))
required = opt.get('required', False)
if not isinstance(required, bool):
raise("Incorrect value for 'Required', a boolean is needed.: %s" % required)
if required:
s = o + "="
else:
s = o
text.append(" %-20s # %s" % (s, textwrap.fill(desc, limit, subsequent_indent=subdent)))
text.append('')
return "\n".join(text)
def get_man_text(self, doc):
opt_indent=" "
text = []
text.append("> %s\n" % doc['module'].upper())
pad = display.columns * 0.20
limit = max(display.columns - int(pad), 70)
if isinstance(doc['description'], list):
desc = " ".join(doc['description'])
else:
desc = doc['description']
text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=" ", subsequent_indent=" "))
# FUTURE: move deprecation to metadata-only
if 'deprecated' in doc and doc['deprecated'] is not None and len(doc['deprecated']) > 0:
text.append("DEPRECATED: \n%s\n" % doc['deprecated'])
metadata = doc['metadata']
supported_by = metadata['supported_by']
text.append("Supported by: %s\n" % supported_by)
status = metadata['status']
text.append("Status: %s\n" % ", ".join(status))
if 'action' in doc and doc['action']:
text.append(" * note: %s\n" % "This module has a corresponding action plugin.")
if 'option_keys' in doc and len(doc['option_keys']) > 0:
text.append("Options (= is mandatory):\n")
for o in sorted(doc['option_keys']):
opt = doc['options'][o]
required = opt.get('required', False)
if not isinstance(required, bool):
raise("Incorrect value for 'Required', a boolean is needed.: %s" % required)
if required:
opt_leadin = "="
else:
opt_leadin = "-"
text.append("%s %s" % (opt_leadin, o))
if isinstance(opt['description'], list):
for entry in opt['description']:
text.append(textwrap.fill(CLI.tty_ify(entry), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
else:
text.append(textwrap.fill(CLI.tty_ify(opt['description']), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
choices = ''
if 'choices' in opt:
choices = "(Choices: " + ", ".join(str(i) for i in opt['choices']) + ")"
default = ''
if 'default' in opt or not required:
default = "[Default: " + str(opt.get('default', '(null)')) + "]"
text.append(textwrap.fill(CLI.tty_ify(choices + default), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
text.append("Notes:")
for note in doc['notes']:
text.append(textwrap.fill(CLI.tty_ify(note), limit-6, initial_indent=" * ", subsequent_indent=opt_indent))
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
req = ", ".join(doc['requirements'])
text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit-16, initial_indent=" ", subsequent_indent=opt_indent))
if 'examples' in doc and len(doc['examples']) > 0:
text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
for ex in doc['examples']:
text.append("%s\n" % (ex['code']))
if 'plainexamples' in doc and doc['plainexamples'] is not None:
text.append("EXAMPLES:")
text.append(doc['plainexamples'])
if 'returndocs' in doc and doc['returndocs'] is not None:
text.append("RETURN VALUES:")
text.append(doc['returndocs'])
text.append('')
maintainers = set()
if 'author' in doc:
if isinstance(doc['author'], string_types):
maintainers.add(doc['author'])
else:
maintainers.update(doc['author'])
if 'maintainers' in doc:
if isinstance(doc['maintainers'], string_types):
maintainers.add(doc['author'])
else:
maintainers.update(doc['author'])
text.append('MAINTAINERS: ' + ', '.join(maintainers))
text.append('')
return "\n".join(text)<|fim▁end|> | |
<|file_name|>roundtripper_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The daxxcoreAuthors<|fim▁hole|>// The daxxcore library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The daxxcore library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the daxxcore library. If not, see <http://www.gnu.org/licenses/>.
package http
import (
"io/ioutil"
"net/http"
"strings"
"testing"
"time"
)
const port = "3222"
func TestRoundTripper(t *testing.T) {
serveMux := http.NewServeMux()
serveMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if r.Method == "GET" {
w.Header().Set("Content-Type", "text/plain")
http.ServeContent(w, r, "", time.Unix(0, 0), strings.NewReader(r.RequestURI))
} else {
http.Error(w, "Method "+r.Method+" is not supported.", http.StatusMethodNotAllowed)
}
})
go http.ListenAndServe(":"+port, serveMux)
rt := &RoundTripper{Port: port}
trans := &http.Transport{}
trans.RegisterProtocol("bzz", rt)
client := &http.Client{Transport: trans}
resp, err := client.Get("bzz://test.com/path")
if err != nil {
t.Errorf("expected no error, got %v", err)
return
}
defer func() {
if resp != nil {
resp.Body.Close()
}
}()
content, err := ioutil.ReadAll(resp.Body)
if err != nil {
t.Errorf("expected no error, got %v", err)
return
}
if string(content) != "/HTTP/1.1:/test.com/path" {
t.Errorf("incorrect response from http server: expected '%v', got '%v'", "/HTTP/1.1:/test.com/path", string(content))
}
}<|fim▁end|> | // This file is part of the daxxcore library.
// |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | module.exports = require('./lib/socket.io'); |
<|file_name|>accesslog.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.14.0
// source: envoy/config/filter/accesslog/v2/accesslog.proto
package envoy_config_filter_accesslog_v2
import (
_ "github.com/cncf/xds/go/udpa/annotations"
core "github.com/envoyproxy/go-control-plane/envoy/api/v2/core"
route "github.com/envoyproxy/go-control-plane/envoy/api/v2/route"
_type "github.com/envoyproxy/go-control-plane/envoy/type"
_ "github.com/envoyproxy/protoc-gen-validate/validate"
proto "github.com/golang/protobuf/proto"
any "github.com/golang/protobuf/ptypes/any"
_struct "github.com/golang/protobuf/ptypes/struct"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type ComparisonFilter_Op int32
const (
// =
ComparisonFilter_EQ ComparisonFilter_Op = 0
// >=
ComparisonFilter_GE ComparisonFilter_Op = 1
// <=
ComparisonFilter_LE ComparisonFilter_Op = 2
)
// Enum value maps for ComparisonFilter_Op.
var (
ComparisonFilter_Op_name = map[int32]string{
0: "EQ",
1: "GE",
2: "LE",
}
ComparisonFilter_Op_value = map[string]int32{
"EQ": 0,
"GE": 1,
"LE": 2,
}
)
func (x ComparisonFilter_Op) Enum() *ComparisonFilter_Op {
p := new(ComparisonFilter_Op)
*p = x
return p
}
func (x ComparisonFilter_Op) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (ComparisonFilter_Op) Descriptor() protoreflect.EnumDescriptor {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes[0].Descriptor()
}
func (ComparisonFilter_Op) Type() protoreflect.EnumType {
return &file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes[0]
}
func (x ComparisonFilter_Op) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use ComparisonFilter_Op.Descriptor instead.
func (ComparisonFilter_Op) EnumDescriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{2, 0}
}
type GrpcStatusFilter_Status int32
const (
GrpcStatusFilter_OK GrpcStatusFilter_Status = 0
GrpcStatusFilter_CANCELED GrpcStatusFilter_Status = 1
GrpcStatusFilter_UNKNOWN GrpcStatusFilter_Status = 2
GrpcStatusFilter_INVALID_ARGUMENT GrpcStatusFilter_Status = 3
GrpcStatusFilter_DEADLINE_EXCEEDED GrpcStatusFilter_Status = 4
GrpcStatusFilter_NOT_FOUND GrpcStatusFilter_Status = 5
GrpcStatusFilter_ALREADY_EXISTS GrpcStatusFilter_Status = 6
GrpcStatusFilter_PERMISSION_DENIED GrpcStatusFilter_Status = 7
GrpcStatusFilter_RESOURCE_EXHAUSTED GrpcStatusFilter_Status = 8
GrpcStatusFilter_FAILED_PRECONDITION GrpcStatusFilter_Status = 9
GrpcStatusFilter_ABORTED GrpcStatusFilter_Status = 10
GrpcStatusFilter_OUT_OF_RANGE GrpcStatusFilter_Status = 11
GrpcStatusFilter_UNIMPLEMENTED GrpcStatusFilter_Status = 12
GrpcStatusFilter_INTERNAL GrpcStatusFilter_Status = 13
GrpcStatusFilter_UNAVAILABLE GrpcStatusFilter_Status = 14
GrpcStatusFilter_DATA_LOSS GrpcStatusFilter_Status = 15
GrpcStatusFilter_UNAUTHENTICATED GrpcStatusFilter_Status = 16
)
// Enum value maps for GrpcStatusFilter_Status.
var (
GrpcStatusFilter_Status_name = map[int32]string{
0: "OK",
1: "CANCELED",
2: "UNKNOWN",
3: "INVALID_ARGUMENT",
4: "DEADLINE_EXCEEDED",
5: "NOT_FOUND",
6: "ALREADY_EXISTS",
7: "PERMISSION_DENIED",
8: "RESOURCE_EXHAUSTED",
9: "FAILED_PRECONDITION",
10: "ABORTED",
11: "OUT_OF_RANGE",
12: "UNIMPLEMENTED",
13: "INTERNAL",
14: "UNAVAILABLE",
15: "DATA_LOSS",
16: "UNAUTHENTICATED",
}
GrpcStatusFilter_Status_value = map[string]int32{
"OK": 0,
"CANCELED": 1,
"UNKNOWN": 2,
"INVALID_ARGUMENT": 3,
"DEADLINE_EXCEEDED": 4,
"NOT_FOUND": 5,
"ALREADY_EXISTS": 6,
"PERMISSION_DENIED": 7,
"RESOURCE_EXHAUSTED": 8,
"FAILED_PRECONDITION": 9,
"ABORTED": 10,
"OUT_OF_RANGE": 11,
"UNIMPLEMENTED": 12,
"INTERNAL": 13,
"UNAVAILABLE": 14,
"DATA_LOSS": 15,
"UNAUTHENTICATED": 16,
}
)
func (x GrpcStatusFilter_Status) Enum() *GrpcStatusFilter_Status {
p := new(GrpcStatusFilter_Status)
*p = x
return p
}
func (x GrpcStatusFilter_Status) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (GrpcStatusFilter_Status) Descriptor() protoreflect.EnumDescriptor {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes[1].Descriptor()
}
func (GrpcStatusFilter_Status) Type() protoreflect.EnumType {
return &file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes[1]
}
func (x GrpcStatusFilter_Status) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use GrpcStatusFilter_Status.Descriptor instead.
func (GrpcStatusFilter_Status) EnumDescriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{12, 0}
}
type AccessLog struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The name of the access log implementation to instantiate. The name must
// match a statically registered access log. Current built-in loggers include:
//
// #. "envoy.access_loggers.file"
// #. "envoy.access_loggers.http_grpc"
// #. "envoy.access_loggers.tcp_grpc"
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Filter which is used to determine if the access log needs to be written.
Filter *AccessLogFilter `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"`
// Custom configuration that depends on the access log being instantiated. Built-in
// configurations include:
//
// #. "envoy.access_loggers.file": :ref:`FileAccessLog
// <envoy_api_msg_config.accesslog.v2.FileAccessLog>`
// #. "envoy.access_loggers.http_grpc": :ref:`HttpGrpcAccessLogConfig
// <envoy_api_msg_config.accesslog.v2.HttpGrpcAccessLogConfig>`
// #. "envoy.access_loggers.tcp_grpc": :ref:`TcpGrpcAccessLogConfig
// <envoy_api_msg_config.accesslog.v2.TcpGrpcAccessLogConfig>`
//
// Types that are assignable to ConfigType:
// *AccessLog_Config
// *AccessLog_TypedConfig
ConfigType isAccessLog_ConfigType `protobuf_oneof:"config_type"`
}
func (x *AccessLog) Reset() {
*x = AccessLog{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AccessLog) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AccessLog) ProtoMessage() {}
func (x *AccessLog) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AccessLog.ProtoReflect.Descriptor instead.
func (*AccessLog) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{0}
}
func (x *AccessLog) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *AccessLog) GetFilter() *AccessLogFilter {
if x != nil {
return x.Filter
}
return nil
}
func (m *AccessLog) GetConfigType() isAccessLog_ConfigType {
if m != nil {
return m.ConfigType
}
return nil
}
// Deprecated: Do not use.
func (x *AccessLog) GetConfig() *_struct.Struct {
if x, ok := x.GetConfigType().(*AccessLog_Config); ok {
return x.Config
}
return nil
}
func (x *AccessLog) GetTypedConfig() *any.Any {
if x, ok := x.GetConfigType().(*AccessLog_TypedConfig); ok {
return x.TypedConfig
}
return nil
}
type isAccessLog_ConfigType interface {
isAccessLog_ConfigType()
}
type AccessLog_Config struct {
// Deprecated: Do not use.
Config *_struct.Struct `protobuf:"bytes,3,opt,name=config,proto3,oneof"`
}
type AccessLog_TypedConfig struct {
TypedConfig *any.Any `protobuf:"bytes,4,opt,name=typed_config,json=typedConfig,proto3,oneof"`
}
func (*AccessLog_Config) isAccessLog_ConfigType() {}
func (*AccessLog_TypedConfig) isAccessLog_ConfigType() {}
// [#next-free-field: 12]
type AccessLogFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to FilterSpecifier:
// *AccessLogFilter_StatusCodeFilter
// *AccessLogFilter_DurationFilter
// *AccessLogFilter_NotHealthCheckFilter
// *AccessLogFilter_TraceableFilter
// *AccessLogFilter_RuntimeFilter
// *AccessLogFilter_AndFilter
// *AccessLogFilter_OrFilter
// *AccessLogFilter_HeaderFilter
// *AccessLogFilter_ResponseFlagFilter
// *AccessLogFilter_GrpcStatusFilter
// *AccessLogFilter_ExtensionFilter
FilterSpecifier isAccessLogFilter_FilterSpecifier `protobuf_oneof:"filter_specifier"`
}
func (x *AccessLogFilter) Reset() {
*x = AccessLogFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AccessLogFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AccessLogFilter) ProtoMessage() {}
func (x *AccessLogFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AccessLogFilter.ProtoReflect.Descriptor instead.
func (*AccessLogFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{1}
}
func (m *AccessLogFilter) GetFilterSpecifier() isAccessLogFilter_FilterSpecifier {
if m != nil {
return m.FilterSpecifier
}
return nil
}
func (x *AccessLogFilter) GetStatusCodeFilter() *StatusCodeFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_StatusCodeFilter); ok {
return x.StatusCodeFilter
}
return nil
}
func (x *AccessLogFilter) GetDurationFilter() *DurationFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_DurationFilter); ok {
return x.DurationFilter
}
return nil
}
func (x *AccessLogFilter) GetNotHealthCheckFilter() *NotHealthCheckFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_NotHealthCheckFilter); ok {
return x.NotHealthCheckFilter
}
return nil
}
func (x *AccessLogFilter) GetTraceableFilter() *TraceableFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_TraceableFilter); ok {
return x.TraceableFilter
}
return nil
}
func (x *AccessLogFilter) GetRuntimeFilter() *RuntimeFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_RuntimeFilter); ok {
return x.RuntimeFilter
}
return nil
}
func (x *AccessLogFilter) GetAndFilter() *AndFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_AndFilter); ok {
return x.AndFilter
}
return nil
}
func (x *AccessLogFilter) GetOrFilter() *OrFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_OrFilter); ok {
return x.OrFilter
}
return nil
}
func (x *AccessLogFilter) GetHeaderFilter() *HeaderFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_HeaderFilter); ok {
return x.HeaderFilter
}
return nil
}
func (x *AccessLogFilter) GetResponseFlagFilter() *ResponseFlagFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_ResponseFlagFilter); ok {
return x.ResponseFlagFilter
}
return nil
}
func (x *AccessLogFilter) GetGrpcStatusFilter() *GrpcStatusFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_GrpcStatusFilter); ok {
return x.GrpcStatusFilter
}
return nil
}
func (x *AccessLogFilter) GetExtensionFilter() *ExtensionFilter {
if x, ok := x.GetFilterSpecifier().(*AccessLogFilter_ExtensionFilter); ok {
return x.ExtensionFilter
}
return nil
}
type isAccessLogFilter_FilterSpecifier interface {
isAccessLogFilter_FilterSpecifier()
}
type AccessLogFilter_StatusCodeFilter struct {
// Status code filter.
StatusCodeFilter *StatusCodeFilter `protobuf:"bytes,1,opt,name=status_code_filter,json=statusCodeFilter,proto3,oneof"`
}
type AccessLogFilter_DurationFilter struct {
// Duration filter.
DurationFilter *DurationFilter `protobuf:"bytes,2,opt,name=duration_filter,json=durationFilter,proto3,oneof"`
}
type AccessLogFilter_NotHealthCheckFilter struct {
// Not health check filter.
NotHealthCheckFilter *NotHealthCheckFilter `protobuf:"bytes,3,opt,name=not_health_check_filter,json=notHealthCheckFilter,proto3,oneof"`
}
type AccessLogFilter_TraceableFilter struct {
// Traceable filter.
TraceableFilter *TraceableFilter `protobuf:"bytes,4,opt,name=traceable_filter,json=traceableFilter,proto3,oneof"`
}
type AccessLogFilter_RuntimeFilter struct {
// Runtime filter.
RuntimeFilter *RuntimeFilter `protobuf:"bytes,5,opt,name=runtime_filter,json=runtimeFilter,proto3,oneof"`
}
type AccessLogFilter_AndFilter struct {
// And filter.
AndFilter *AndFilter `protobuf:"bytes,6,opt,name=and_filter,json=andFilter,proto3,oneof"`
}
type AccessLogFilter_OrFilter struct {
// Or filter.
OrFilter *OrFilter `protobuf:"bytes,7,opt,name=or_filter,json=orFilter,proto3,oneof"`
}
type AccessLogFilter_HeaderFilter struct {
// Header filter.
HeaderFilter *HeaderFilter `protobuf:"bytes,8,opt,name=header_filter,json=headerFilter,proto3,oneof"`
}
type AccessLogFilter_ResponseFlagFilter struct {
// Response flag filter.
ResponseFlagFilter *ResponseFlagFilter `protobuf:"bytes,9,opt,name=response_flag_filter,json=responseFlagFilter,proto3,oneof"`
}
type AccessLogFilter_GrpcStatusFilter struct {
// gRPC status filter.
GrpcStatusFilter *GrpcStatusFilter `protobuf:"bytes,10,opt,name=grpc_status_filter,json=grpcStatusFilter,proto3,oneof"`
}
type AccessLogFilter_ExtensionFilter struct {
// Extension filter.
ExtensionFilter *ExtensionFilter `protobuf:"bytes,11,opt,name=extension_filter,json=extensionFilter,proto3,oneof"`
}
func (*AccessLogFilter_StatusCodeFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_DurationFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_NotHealthCheckFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_TraceableFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_RuntimeFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_AndFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_OrFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_HeaderFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_ResponseFlagFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_GrpcStatusFilter) isAccessLogFilter_FilterSpecifier() {}
func (*AccessLogFilter_ExtensionFilter) isAccessLogFilter_FilterSpecifier() {}
// Filter on an integer comparison.
type ComparisonFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Comparison operator.
Op ComparisonFilter_Op `protobuf:"varint,1,opt,name=op,proto3,enum=envoy.config.filter.accesslog.v2.ComparisonFilter_Op" json:"op,omitempty"`
// Value to compare against.
Value *core.RuntimeUInt32 `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *ComparisonFilter) Reset() {
*x = ComparisonFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ComparisonFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ComparisonFilter) ProtoMessage() {}
func (x *ComparisonFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ComparisonFilter.ProtoReflect.Descriptor instead.
func (*ComparisonFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{2}
}
func (x *ComparisonFilter) GetOp() ComparisonFilter_Op {
if x != nil {
return x.Op
}
return ComparisonFilter_EQ
}
func (x *ComparisonFilter) GetValue() *core.RuntimeUInt32 {
if x != nil {
return x.Value
}
return nil
}
// Filters on HTTP response/status code.
type StatusCodeFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Comparison.
Comparison *ComparisonFilter `protobuf:"bytes,1,opt,name=comparison,proto3" json:"comparison,omitempty"`
}
func (x *StatusCodeFilter) Reset() {
*x = StatusCodeFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *StatusCodeFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*StatusCodeFilter) ProtoMessage() {}
func (x *StatusCodeFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use StatusCodeFilter.ProtoReflect.Descriptor instead.
func (*StatusCodeFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{3}
}
func (x *StatusCodeFilter) GetComparison() *ComparisonFilter {
if x != nil {
return x.Comparison
}
return nil
}
// Filters on total request duration in milliseconds.
type DurationFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Comparison.
Comparison *ComparisonFilter `protobuf:"bytes,1,opt,name=comparison,proto3" json:"comparison,omitempty"`
}
func (x *DurationFilter) Reset() {
*x = DurationFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DurationFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DurationFilter) ProtoMessage() {}
func (x *DurationFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DurationFilter.ProtoReflect.Descriptor instead.
func (*DurationFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{4}
}
func (x *DurationFilter) GetComparison() *ComparisonFilter {
if x != nil {
return x.Comparison
}
return nil
}
// Filters for requests that are not health check requests. A health check
// request is marked by the health check filter.
type NotHealthCheckFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *NotHealthCheckFilter) Reset() {
*x = NotHealthCheckFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *NotHealthCheckFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*NotHealthCheckFilter) ProtoMessage() {}
func (x *NotHealthCheckFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use NotHealthCheckFilter.ProtoReflect.Descriptor instead.
func (*NotHealthCheckFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{5}
}
// Filters for requests that are traceable. See the tracing overview for more
// information on how a request becomes traceable.
type TraceableFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *TraceableFilter) Reset() {
*x = TraceableFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *TraceableFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*TraceableFilter) ProtoMessage() {}
func (x *TraceableFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use TraceableFilter.ProtoReflect.Descriptor instead.
func (*TraceableFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{6}
}
// Filters for random sampling of requests.
type RuntimeFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Runtime key to get an optional overridden numerator for use in the *percent_sampled* field.
// If found in runtime, this value will replace the default numerator.
RuntimeKey string `protobuf:"bytes,1,opt,name=runtime_key,json=runtimeKey,proto3" json:"runtime_key,omitempty"`
// The default sampling percentage. If not specified, defaults to 0% with denominator of 100.
PercentSampled *_type.FractionalPercent `protobuf:"bytes,2,opt,name=percent_sampled,json=percentSampled,proto3" json:"percent_sampled,omitempty"`
// By default, sampling pivots on the header
// :ref:`x-request-id<config_http_conn_man_headers_x-request-id>` being present. If
// :ref:`x-request-id<config_http_conn_man_headers_x-request-id>` is present, the filter will
// consistently sample across multiple hosts based on the runtime key value and the value
// extracted from :ref:`x-request-id<config_http_conn_man_headers_x-request-id>`. If it is
// missing, or *use_independent_randomness* is set to true, the filter will randomly sample based
// on the runtime key value alone. *use_independent_randomness* can be used for logging kill
// switches within complex nested :ref:`AndFilter
// <envoy_api_msg_config.filter.accesslog.v2.AndFilter>` and :ref:`OrFilter
// <envoy_api_msg_config.filter.accesslog.v2.OrFilter>` blocks that are easier to reason about
// from a probability perspective (i.e., setting to true will cause the filter to behave like
// an independent random variable when composed within logical operator filters).
UseIndependentRandomness bool `protobuf:"varint,3,opt,name=use_independent_randomness,json=useIndependentRandomness,proto3" json:"use_independent_randomness,omitempty"`
}
func (x *RuntimeFilter) Reset() {
*x = RuntimeFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *RuntimeFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*RuntimeFilter) ProtoMessage() {}
func (x *RuntimeFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use RuntimeFilter.ProtoReflect.Descriptor instead.
func (*RuntimeFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{7}
}
func (x *RuntimeFilter) GetRuntimeKey() string {
if x != nil {
return x.RuntimeKey
}
return ""
}
func (x *RuntimeFilter) GetPercentSampled() *_type.FractionalPercent {
if x != nil {
return x.PercentSampled
}
return nil
}
func (x *RuntimeFilter) GetUseIndependentRandomness() bool {
if x != nil {
return x.UseIndependentRandomness
}
return false
}
// Performs a logical “and” operation on the result of each filter in filters.
// Filters are evaluated sequentially and if one of them returns false, the
// filter returns false immediately.
type AndFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Filters []*AccessLogFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"`
}
func (x *AndFilter) Reset() {
*x = AndFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AndFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AndFilter) ProtoMessage() {}
func (x *AndFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AndFilter.ProtoReflect.Descriptor instead.
func (*AndFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{8}
}
func (x *AndFilter) GetFilters() []*AccessLogFilter {
if x != nil {
return x.Filters
}
return nil
}
// Performs a logical “or” operation on the result of each individual filter.
// Filters are evaluated sequentially and if one of them returns true, the
// filter returns true immediately.
type OrFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Filters []*AccessLogFilter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"`
}
func (x *OrFilter) Reset() {
*x = OrFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *OrFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*OrFilter) ProtoMessage() {}
func (x *OrFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use OrFilter.ProtoReflect.Descriptor instead.
func (*OrFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{9}
}
func (x *OrFilter) GetFilters() []*AccessLogFilter {
if x != nil {
return x.Filters
}
return nil
}
// Filters requests based on the presence or value of a request header.
type HeaderFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Only requests with a header which matches the specified HeaderMatcher will pass the filter
// check.
Header *route.HeaderMatcher `protobuf:"bytes,1,opt,name=header,proto3" json:"header,omitempty"`
}
func (x *HeaderFilter) Reset() {
*x = HeaderFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *HeaderFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*HeaderFilter) ProtoMessage() {}
func (x *HeaderFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use HeaderFilter.ProtoReflect.Descriptor instead.
func (*HeaderFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{10}
}
func (x *HeaderFilter) GetHeader() *route.HeaderMatcher {
if x != nil {
return x.Header
}
return nil
}
// Filters requests that received responses with an Envoy response flag set.
// A list of the response flags can be found
// in the access log formatter :ref:`documentation<config_access_log_format_response_flags>`.
type ResponseFlagFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Only responses with the any of the flags listed in this field will be logged.
// This field is optional. If it is not specified, then any response flag will pass
// the filter check.
Flags []string `protobuf:"bytes,1,rep,name=flags,proto3" json:"flags,omitempty"`
}
func (x *ResponseFlagFilter) Reset() {
*x = ResponseFlagFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[11]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ResponseFlagFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ResponseFlagFilter) ProtoMessage() {}
func (x *ResponseFlagFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[11]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {<|fim▁hole|> ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ResponseFlagFilter.ProtoReflect.Descriptor instead.
func (*ResponseFlagFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{11}
}
func (x *ResponseFlagFilter) GetFlags() []string {
if x != nil {
return x.Flags
}
return nil
}
// Filters gRPC requests based on their response status. If a gRPC status is not provided, the
// filter will infer the status from the HTTP status code.
type GrpcStatusFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Logs only responses that have any one of the gRPC statuses in this field.
Statuses []GrpcStatusFilter_Status `protobuf:"varint,1,rep,packed,name=statuses,proto3,enum=envoy.config.filter.accesslog.v2.GrpcStatusFilter_Status" json:"statuses,omitempty"`
// If included and set to true, the filter will instead block all responses with a gRPC status or
// inferred gRPC status enumerated in statuses, and allow all other responses.
Exclude bool `protobuf:"varint,2,opt,name=exclude,proto3" json:"exclude,omitempty"`
}
func (x *GrpcStatusFilter) Reset() {
*x = GrpcStatusFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[12]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GrpcStatusFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GrpcStatusFilter) ProtoMessage() {}
func (x *GrpcStatusFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[12]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GrpcStatusFilter.ProtoReflect.Descriptor instead.
func (*GrpcStatusFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{12}
}
func (x *GrpcStatusFilter) GetStatuses() []GrpcStatusFilter_Status {
if x != nil {
return x.Statuses
}
return nil
}
func (x *GrpcStatusFilter) GetExclude() bool {
if x != nil {
return x.Exclude
}
return false
}
// Extension filter is statically registered at runtime.
type ExtensionFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The name of the filter implementation to instantiate. The name must
// match a statically registered filter.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Custom configuration that depends on the filter being instantiated.
//
// Types that are assignable to ConfigType:
// *ExtensionFilter_Config
// *ExtensionFilter_TypedConfig
ConfigType isExtensionFilter_ConfigType `protobuf_oneof:"config_type"`
}
func (x *ExtensionFilter) Reset() {
*x = ExtensionFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[13]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ExtensionFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ExtensionFilter) ProtoMessage() {}
func (x *ExtensionFilter) ProtoReflect() protoreflect.Message {
mi := &file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[13]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ExtensionFilter.ProtoReflect.Descriptor instead.
func (*ExtensionFilter) Descriptor() ([]byte, []int) {
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP(), []int{13}
}
func (x *ExtensionFilter) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (m *ExtensionFilter) GetConfigType() isExtensionFilter_ConfigType {
if m != nil {
return m.ConfigType
}
return nil
}
// Deprecated: Do not use.
func (x *ExtensionFilter) GetConfig() *_struct.Struct {
if x, ok := x.GetConfigType().(*ExtensionFilter_Config); ok {
return x.Config
}
return nil
}
func (x *ExtensionFilter) GetTypedConfig() *any.Any {
if x, ok := x.GetConfigType().(*ExtensionFilter_TypedConfig); ok {
return x.TypedConfig
}
return nil
}
type isExtensionFilter_ConfigType interface {
isExtensionFilter_ConfigType()
}
type ExtensionFilter_Config struct {
// Deprecated: Do not use.
Config *_struct.Struct `protobuf:"bytes,2,opt,name=config,proto3,oneof"`
}
type ExtensionFilter_TypedConfig struct {
TypedConfig *any.Any `protobuf:"bytes,3,opt,name=typed_config,json=typedConfig,proto3,oneof"`
}
func (*ExtensionFilter_Config) isExtensionFilter_ConfigType() {}
func (*ExtensionFilter_TypedConfig) isExtensionFilter_ConfigType() {}
var File_envoy_config_filter_accesslog_v2_accesslog_proto protoreflect.FileDescriptor
var file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDesc = []byte{
0x0a, 0x30, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2f, 0x66,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x2f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2f,
0x76, 0x32, 0x2f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x12, 0x20, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f,
0x67, 0x2e, 0x76, 0x32, 0x1a, 0x1c, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x61, 0x70, 0x69, 0x2f,
0x76, 0x32, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x29, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32,
0x2f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x6d,
0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x18, 0x65,
0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x2f, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e,
0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x1e, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x73, 0x2f, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x1d, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x73, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61,
0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xeb, 0x01, 0x0a, 0x09, 0x41, 0x63, 0x63,
0x65, 0x73, 0x73, 0x4c, 0x6f, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x49, 0x0a, 0x06, 0x66, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x6e, 0x76,
0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x63,
0x63, 0x65, 0x73, 0x73, 0x4c, 0x6f, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x35, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18,
0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x42, 0x02,
0x18, 0x01, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x39, 0x0a, 0x0c,
0x74, 0x79, 0x70, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x0b, 0x74, 0x79, 0x70, 0x65,
0x64, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x0d, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x66, 0x69,
0x67, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x22, 0xb4, 0x08, 0x0a, 0x0f, 0x41, 0x63, 0x63, 0x65, 0x73,
0x73, 0x4c, 0x6f, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x62, 0x0a, 0x12, 0x73, 0x74,
0x61, 0x74, 0x75, 0x73, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73,
0x43, 0x6f, 0x64, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x10, 0x73, 0x74,
0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x5b,
0x0a, 0x0f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65,
0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63,
0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x64, 0x75, 0x72,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x6f, 0x0a, 0x17, 0x6e,
0x6f, 0x74, 0x5f, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x5f,
0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x65,
0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e,
0x4e, 0x6f, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x46, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x14, 0x6e, 0x6f, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74,
0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x5e, 0x0a, 0x10,
0x74, 0x72, 0x61, 0x63, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x61,
0x62, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0f, 0x74, 0x72, 0x61,
0x63, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x58, 0x0a, 0x0e,
0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e,
0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x46,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65,
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x4c, 0x0a, 0x0a, 0x61, 0x6e, 0x64, 0x5f, 0x66, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x65, 0x6e, 0x76,
0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e,
0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x09, 0x61, 0x6e, 0x64, 0x46, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x12, 0x49, 0x0a, 0x09, 0x6f, 0x72, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65,
0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63,
0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x72, 0x46, 0x69, 0x6c,
0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x08, 0x6f, 0x72, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12,
0x55, 0x0a, 0x0d, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72,
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72,
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x68, 0x0a, 0x14, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x5f, 0x66, 0x6c, 0x61, 0x67, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x09,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e,
0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x46, 0x6c, 0x61, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x12, 0x72, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x12, 0x62, 0x0a, 0x12, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f,
0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x65,
0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e,
0x47, 0x72, 0x70, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72,
0x48, 0x00, 0x52, 0x10, 0x67, 0x72, 0x70, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x46, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x12, 0x5e, 0x0a, 0x10, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f,
0x6e, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31,
0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76,
0x32, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74, 0x65,
0x72, 0x48, 0x00, 0x52, 0x0f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x42, 0x17, 0x0a, 0x10, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x5f, 0x73,
0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x03, 0xf8, 0x42, 0x01, 0x22, 0xb9, 0x01,
0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x12, 0x4f, 0x0a, 0x02, 0x6f, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x35,
0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76,
0x32, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x2e, 0x4f, 0x70, 0x42, 0x08, 0xfa, 0x42, 0x05, 0x82, 0x01, 0x02, 0x10, 0x01, 0x52,
0x02, 0x6f, 0x70, 0x12, 0x36, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x20, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76,
0x32, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x55, 0x49,
0x6e, 0x74, 0x33, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x1c, 0x0a, 0x02, 0x4f,
0x70, 0x12, 0x06, 0x0a, 0x02, 0x45, 0x51, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x47, 0x45, 0x10,
0x01, 0x12, 0x06, 0x0a, 0x02, 0x4c, 0x45, 0x10, 0x02, 0x22, 0x70, 0x0a, 0x10, 0x53, 0x74, 0x61,
0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x5c, 0x0a,
0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x32, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f,
0x67, 0x2e, 0x76, 0x32, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x46,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x42, 0x08, 0xfa, 0x42, 0x05, 0x8a, 0x01, 0x02, 0x10, 0x01, 0x52,
0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x22, 0x6e, 0x0a, 0x0e, 0x44,
0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x5c, 0x0a,
0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x32, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f,
0x67, 0x2e, 0x76, 0x32, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x46,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x42, 0x08, 0xfa, 0x42, 0x05, 0x8a, 0x01, 0x02, 0x10, 0x01, 0x52,
0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x22, 0x16, 0x0a, 0x14, 0x4e,
0x6f, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x46, 0x69, 0x6c,
0x74, 0x65, 0x72, 0x22, 0x11, 0x0a, 0x0f, 0x54, 0x72, 0x61, 0x63, 0x65, 0x61, 0x62, 0x6c, 0x65,
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xbf, 0x01, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x74, 0x69,
0x6d, 0x65, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x28, 0x0a, 0x0b, 0x72, 0x75, 0x6e, 0x74,
0x69, 0x6d, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xfa,
0x42, 0x04, 0x72, 0x02, 0x20, 0x01, 0x52, 0x0a, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x4b,
0x65, 0x79, 0x12, 0x46, 0x0a, 0x0f, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x61,
0x6d, 0x70, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x65, 0x6e,
0x76, 0x6f, 0x79, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x46, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f,
0x6e, 0x61, 0x6c, 0x50, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x52, 0x0e, 0x70, 0x65, 0x72, 0x63,
0x65, 0x6e, 0x74, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x64, 0x12, 0x3c, 0x0a, 0x1a, 0x75, 0x73,
0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x61,
0x6e, 0x64, 0x6f, 0x6d, 0x6e, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x18,
0x75, 0x73, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x74, 0x52, 0x61,
0x6e, 0x64, 0x6f, 0x6d, 0x6e, 0x65, 0x73, 0x73, 0x22, 0x62, 0x0a, 0x09, 0x41, 0x6e, 0x64, 0x46,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x55, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73,
0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73,
0x4c, 0x6f, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x42, 0x08, 0xfa, 0x42, 0x05, 0x92, 0x01,
0x02, 0x08, 0x02, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x22, 0x61, 0x0a, 0x08,
0x4f, 0x72, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x55, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x65, 0x6e, 0x76, 0x6f,
0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e,
0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x63, 0x63,
0x65, 0x73, 0x73, 0x4c, 0x6f, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x42, 0x08, 0xfa, 0x42,
0x05, 0x92, 0x01, 0x02, 0x08, 0x02, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x22,
0x53, 0x0a, 0x0c, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12,
0x43, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x21, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x72,
0x6f, 0x75, 0x74, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x4d, 0x61, 0x74, 0x63, 0x68,
0x65, 0x72, 0x42, 0x08, 0xfa, 0x42, 0x05, 0x8a, 0x01, 0x02, 0x10, 0x01, 0x52, 0x06, 0x68, 0x65,
0x61, 0x64, 0x65, 0x72, 0x22, 0x88, 0x01, 0x0a, 0x12, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x46, 0x6c, 0x61, 0x67, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x72, 0x0a, 0x05, 0x66,
0x6c, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x42, 0x5c, 0xfa, 0x42, 0x59, 0x92,
0x01, 0x56, 0x22, 0x54, 0x72, 0x52, 0x52, 0x02, 0x4c, 0x48, 0x52, 0x02, 0x55, 0x48, 0x52, 0x02,
0x55, 0x54, 0x52, 0x02, 0x4c, 0x52, 0x52, 0x02, 0x55, 0x52, 0x52, 0x02, 0x55, 0x46, 0x52, 0x02,
0x55, 0x43, 0x52, 0x02, 0x55, 0x4f, 0x52, 0x02, 0x4e, 0x52, 0x52, 0x02, 0x44, 0x49, 0x52, 0x02,
0x46, 0x49, 0x52, 0x02, 0x52, 0x4c, 0x52, 0x04, 0x55, 0x41, 0x45, 0x58, 0x52, 0x04, 0x52, 0x4c,
0x53, 0x45, 0x52, 0x02, 0x44, 0x43, 0x52, 0x03, 0x55, 0x52, 0x58, 0x52, 0x02, 0x53, 0x49, 0x52,
0x02, 0x49, 0x48, 0x52, 0x03, 0x44, 0x50, 0x45, 0x52, 0x05, 0x66, 0x6c, 0x61, 0x67, 0x73, 0x22,
0xcd, 0x03, 0x0a, 0x10, 0x47, 0x72, 0x70, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x46, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x12, 0x64, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73,
0x18, 0x01, 0x20, 0x03, 0x28, 0x0e, 0x32, 0x39, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x2e, 0x47, 0x72, 0x70, 0x63, 0x53, 0x74,
0x61, 0x74, 0x75, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75,
0x73, 0x42, 0x0d, 0xfa, 0x42, 0x0a, 0x92, 0x01, 0x07, 0x22, 0x05, 0x82, 0x01, 0x02, 0x10, 0x01,
0x52, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x78,
0x63, 0x6c, 0x75, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x78, 0x63,
0x6c, 0x75, 0x64, 0x65, 0x22, 0xb8, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12,
0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x41, 0x4e, 0x43, 0x45,
0x4c, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e,
0x10, 0x02, 0x12, 0x14, 0x0a, 0x10, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x41, 0x52,
0x47, 0x55, 0x4d, 0x45, 0x4e, 0x54, 0x10, 0x03, 0x12, 0x15, 0x0a, 0x11, 0x44, 0x45, 0x41, 0x44,
0x4c, 0x49, 0x4e, 0x45, 0x5f, 0x45, 0x58, 0x43, 0x45, 0x45, 0x44, 0x45, 0x44, 0x10, 0x04, 0x12,
0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x54, 0x5f, 0x46, 0x4f, 0x55, 0x4e, 0x44, 0x10, 0x05, 0x12, 0x12,
0x0a, 0x0e, 0x41, 0x4c, 0x52, 0x45, 0x41, 0x44, 0x59, 0x5f, 0x45, 0x58, 0x49, 0x53, 0x54, 0x53,
0x10, 0x06, 0x12, 0x15, 0x0a, 0x11, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x4f, 0x4e,
0x5f, 0x44, 0x45, 0x4e, 0x49, 0x45, 0x44, 0x10, 0x07, 0x12, 0x16, 0x0a, 0x12, 0x52, 0x45, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x58, 0x48, 0x41, 0x55, 0x53, 0x54, 0x45, 0x44, 0x10,
0x08, 0x12, 0x17, 0x0a, 0x13, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x5f, 0x50, 0x52, 0x45, 0x43,
0x4f, 0x4e, 0x44, 0x49, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x09, 0x12, 0x0b, 0x0a, 0x07, 0x41, 0x42,
0x4f, 0x52, 0x54, 0x45, 0x44, 0x10, 0x0a, 0x12, 0x10, 0x0a, 0x0c, 0x4f, 0x55, 0x54, 0x5f, 0x4f,
0x46, 0x5f, 0x52, 0x41, 0x4e, 0x47, 0x45, 0x10, 0x0b, 0x12, 0x11, 0x0a, 0x0d, 0x55, 0x4e, 0x49,
0x4d, 0x50, 0x4c, 0x45, 0x4d, 0x45, 0x4e, 0x54, 0x45, 0x44, 0x10, 0x0c, 0x12, 0x0c, 0x0a, 0x08,
0x49, 0x4e, 0x54, 0x45, 0x52, 0x4e, 0x41, 0x4c, 0x10, 0x0d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e,
0x41, 0x56, 0x41, 0x49, 0x4c, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x0e, 0x12, 0x0d, 0x0a, 0x09, 0x44,
0x41, 0x54, 0x41, 0x5f, 0x4c, 0x4f, 0x53, 0x53, 0x10, 0x0f, 0x12, 0x13, 0x0a, 0x0f, 0x55, 0x4e,
0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x10, 0x22,
0xa6, 0x01, 0x0a, 0x0f, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46, 0x69, 0x6c,
0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74,
0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x39,
0x0a, 0x0c, 0x74, 0x79, 0x70, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x0b, 0x74, 0x79,
0x70, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x0d, 0x0a, 0x0b, 0x63, 0x6f, 0x6e,
0x66, 0x69, 0x67, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x6b, 0x0a, 0x2e, 0x69, 0x6f, 0x2e, 0x65,
0x6e, 0x76, 0x6f, 0x79, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x61, 0x63,
0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x32, 0x42, 0x0e, 0x41, 0x63, 0x63, 0x65,
0x73, 0x73, 0x6c, 0x6f, 0x67, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0xf2, 0x98, 0xfe, 0x8f,
0x05, 0x1b, 0x12, 0x19, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x33, 0xba, 0x80, 0xc8,
0xd1, 0x06, 0x02, 0x10, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescOnce sync.Once
file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescData = file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDesc
)
func file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescGZIP() []byte {
file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescOnce.Do(func() {
file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescData = protoimpl.X.CompressGZIP(file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescData)
})
return file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDescData
}
var file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes = make([]protoimpl.MessageInfo, 14)
var file_envoy_config_filter_accesslog_v2_accesslog_proto_goTypes = []interface{}{
(ComparisonFilter_Op)(0), // 0: envoy.config.filter.accesslog.v2.ComparisonFilter.Op
(GrpcStatusFilter_Status)(0), // 1: envoy.config.filter.accesslog.v2.GrpcStatusFilter.Status
(*AccessLog)(nil), // 2: envoy.config.filter.accesslog.v2.AccessLog
(*AccessLogFilter)(nil), // 3: envoy.config.filter.accesslog.v2.AccessLogFilter
(*ComparisonFilter)(nil), // 4: envoy.config.filter.accesslog.v2.ComparisonFilter
(*StatusCodeFilter)(nil), // 5: envoy.config.filter.accesslog.v2.StatusCodeFilter
(*DurationFilter)(nil), // 6: envoy.config.filter.accesslog.v2.DurationFilter
(*NotHealthCheckFilter)(nil), // 7: envoy.config.filter.accesslog.v2.NotHealthCheckFilter
(*TraceableFilter)(nil), // 8: envoy.config.filter.accesslog.v2.TraceableFilter
(*RuntimeFilter)(nil), // 9: envoy.config.filter.accesslog.v2.RuntimeFilter
(*AndFilter)(nil), // 10: envoy.config.filter.accesslog.v2.AndFilter
(*OrFilter)(nil), // 11: envoy.config.filter.accesslog.v2.OrFilter
(*HeaderFilter)(nil), // 12: envoy.config.filter.accesslog.v2.HeaderFilter
(*ResponseFlagFilter)(nil), // 13: envoy.config.filter.accesslog.v2.ResponseFlagFilter
(*GrpcStatusFilter)(nil), // 14: envoy.config.filter.accesslog.v2.GrpcStatusFilter
(*ExtensionFilter)(nil), // 15: envoy.config.filter.accesslog.v2.ExtensionFilter
(*_struct.Struct)(nil), // 16: google.protobuf.Struct
(*any.Any)(nil), // 17: google.protobuf.Any
(*core.RuntimeUInt32)(nil), // 18: envoy.api.v2.core.RuntimeUInt32
(*_type.FractionalPercent)(nil), // 19: envoy.type.FractionalPercent
(*route.HeaderMatcher)(nil), // 20: envoy.api.v2.route.HeaderMatcher
}
var file_envoy_config_filter_accesslog_v2_accesslog_proto_depIdxs = []int32{
3, // 0: envoy.config.filter.accesslog.v2.AccessLog.filter:type_name -> envoy.config.filter.accesslog.v2.AccessLogFilter
16, // 1: envoy.config.filter.accesslog.v2.AccessLog.config:type_name -> google.protobuf.Struct
17, // 2: envoy.config.filter.accesslog.v2.AccessLog.typed_config:type_name -> google.protobuf.Any
5, // 3: envoy.config.filter.accesslog.v2.AccessLogFilter.status_code_filter:type_name -> envoy.config.filter.accesslog.v2.StatusCodeFilter
6, // 4: envoy.config.filter.accesslog.v2.AccessLogFilter.duration_filter:type_name -> envoy.config.filter.accesslog.v2.DurationFilter
7, // 5: envoy.config.filter.accesslog.v2.AccessLogFilter.not_health_check_filter:type_name -> envoy.config.filter.accesslog.v2.NotHealthCheckFilter
8, // 6: envoy.config.filter.accesslog.v2.AccessLogFilter.traceable_filter:type_name -> envoy.config.filter.accesslog.v2.TraceableFilter
9, // 7: envoy.config.filter.accesslog.v2.AccessLogFilter.runtime_filter:type_name -> envoy.config.filter.accesslog.v2.RuntimeFilter
10, // 8: envoy.config.filter.accesslog.v2.AccessLogFilter.and_filter:type_name -> envoy.config.filter.accesslog.v2.AndFilter
11, // 9: envoy.config.filter.accesslog.v2.AccessLogFilter.or_filter:type_name -> envoy.config.filter.accesslog.v2.OrFilter
12, // 10: envoy.config.filter.accesslog.v2.AccessLogFilter.header_filter:type_name -> envoy.config.filter.accesslog.v2.HeaderFilter
13, // 11: envoy.config.filter.accesslog.v2.AccessLogFilter.response_flag_filter:type_name -> envoy.config.filter.accesslog.v2.ResponseFlagFilter
14, // 12: envoy.config.filter.accesslog.v2.AccessLogFilter.grpc_status_filter:type_name -> envoy.config.filter.accesslog.v2.GrpcStatusFilter
15, // 13: envoy.config.filter.accesslog.v2.AccessLogFilter.extension_filter:type_name -> envoy.config.filter.accesslog.v2.ExtensionFilter
0, // 14: envoy.config.filter.accesslog.v2.ComparisonFilter.op:type_name -> envoy.config.filter.accesslog.v2.ComparisonFilter.Op
18, // 15: envoy.config.filter.accesslog.v2.ComparisonFilter.value:type_name -> envoy.api.v2.core.RuntimeUInt32
4, // 16: envoy.config.filter.accesslog.v2.StatusCodeFilter.comparison:type_name -> envoy.config.filter.accesslog.v2.ComparisonFilter
4, // 17: envoy.config.filter.accesslog.v2.DurationFilter.comparison:type_name -> envoy.config.filter.accesslog.v2.ComparisonFilter
19, // 18: envoy.config.filter.accesslog.v2.RuntimeFilter.percent_sampled:type_name -> envoy.type.FractionalPercent
3, // 19: envoy.config.filter.accesslog.v2.AndFilter.filters:type_name -> envoy.config.filter.accesslog.v2.AccessLogFilter
3, // 20: envoy.config.filter.accesslog.v2.OrFilter.filters:type_name -> envoy.config.filter.accesslog.v2.AccessLogFilter
20, // 21: envoy.config.filter.accesslog.v2.HeaderFilter.header:type_name -> envoy.api.v2.route.HeaderMatcher
1, // 22: envoy.config.filter.accesslog.v2.GrpcStatusFilter.statuses:type_name -> envoy.config.filter.accesslog.v2.GrpcStatusFilter.Status
16, // 23: envoy.config.filter.accesslog.v2.ExtensionFilter.config:type_name -> google.protobuf.Struct
17, // 24: envoy.config.filter.accesslog.v2.ExtensionFilter.typed_config:type_name -> google.protobuf.Any
25, // [25:25] is the sub-list for method output_type
25, // [25:25] is the sub-list for method input_type
25, // [25:25] is the sub-list for extension type_name
25, // [25:25] is the sub-list for extension extendee
0, // [0:25] is the sub-list for field type_name
}
func init() { file_envoy_config_filter_accesslog_v2_accesslog_proto_init() }
func file_envoy_config_filter_accesslog_v2_accesslog_proto_init() {
if File_envoy_config_filter_accesslog_v2_accesslog_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AccessLog); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AccessLogFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ComparisonFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*StatusCodeFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DurationFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NotHealthCheckFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*TraceableFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RuntimeFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AndFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*OrFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*HeaderFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ResponseFlagFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GrpcStatusFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExtensionFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[0].OneofWrappers = []interface{}{
(*AccessLog_Config)(nil),
(*AccessLog_TypedConfig)(nil),
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[1].OneofWrappers = []interface{}{
(*AccessLogFilter_StatusCodeFilter)(nil),
(*AccessLogFilter_DurationFilter)(nil),
(*AccessLogFilter_NotHealthCheckFilter)(nil),
(*AccessLogFilter_TraceableFilter)(nil),
(*AccessLogFilter_RuntimeFilter)(nil),
(*AccessLogFilter_AndFilter)(nil),
(*AccessLogFilter_OrFilter)(nil),
(*AccessLogFilter_HeaderFilter)(nil),
(*AccessLogFilter_ResponseFlagFilter)(nil),
(*AccessLogFilter_GrpcStatusFilter)(nil),
(*AccessLogFilter_ExtensionFilter)(nil),
}
file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes[13].OneofWrappers = []interface{}{
(*ExtensionFilter_Config)(nil),
(*ExtensionFilter_TypedConfig)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDesc,
NumEnums: 2,
NumMessages: 14,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_envoy_config_filter_accesslog_v2_accesslog_proto_goTypes,
DependencyIndexes: file_envoy_config_filter_accesslog_v2_accesslog_proto_depIdxs,
EnumInfos: file_envoy_config_filter_accesslog_v2_accesslog_proto_enumTypes,
MessageInfos: file_envoy_config_filter_accesslog_v2_accesslog_proto_msgTypes,
}.Build()
File_envoy_config_filter_accesslog_v2_accesslog_proto = out.File
file_envoy_config_filter_accesslog_v2_accesslog_proto_rawDesc = nil
file_envoy_config_filter_accesslog_v2_accesslog_proto_goTypes = nil
file_envoy_config_filter_accesslog_v2_accesslog_proto_depIdxs = nil
}<|fim▁end|> | |
<|file_name|>plotting.py<|end_file_name|><|fim▁begin|># being a bit too dynamic
# pylint: disable=E1101
import datetime
import warnings
import re
from math import ceil
from collections import namedtuple
from contextlib import contextmanager
from distutils.version import LooseVersion
import numpy as np
from pandas.util.decorators import cache_readonly, deprecate_kwarg
import pandas.core.common as com
from pandas.core.common import AbstractMethodError
from pandas.core.generic import _shared_docs, _shared_doc_kwargs
from pandas.core.index import Index, MultiIndex
from pandas.core.series import Series, remove_na
from pandas.tseries.index import DatetimeIndex
from pandas.tseries.period import PeriodIndex, Period
import pandas.tseries.frequencies as frequencies
from pandas.tseries.offsets import DateOffset
from pandas.compat import range, lrange, lmap, map, zip, string_types
import pandas.compat as compat
from pandas.util.decorators import Appender
try: # mpl optional
import pandas.tseries.converter as conv
conv.register() # needs to override so set_xlim works with str/number
except ImportError:
pass
# Extracted from https://gist.github.com/huyng/816622
# this is the rcParams set when setting display.with_mpl_style
# to True.
mpl_stylesheet = {
'axes.axisbelow': True,
'axes.color_cycle': ['#348ABD',
'#7A68A6',
'#A60628',
'#467821',
'#CF4457',
'#188487',
'#E24A33'],
'axes.edgecolor': '#bcbcbc',
'axes.facecolor': '#eeeeee',
'axes.grid': True,
'axes.labelcolor': '#555555',
'axes.labelsize': 'large',
'axes.linewidth': 1.0,
'axes.titlesize': 'x-large',
'figure.edgecolor': 'white',
'figure.facecolor': 'white',
'figure.figsize': (6.0, 4.0),
'figure.subplot.hspace': 0.5,
'font.family': 'monospace',
'font.monospace': ['Andale Mono',
'Nimbus Mono L',
'Courier New',
'Courier',
'Fixed',
'Terminal',
'monospace'],
'font.size': 10,
'interactive': True,
'keymap.all_axes': ['a'],
'keymap.back': ['left', 'c', 'backspace'],
'keymap.forward': ['right', 'v'],
'keymap.fullscreen': ['f'],
'keymap.grid': ['g'],
'keymap.home': ['h', 'r', 'home'],
'keymap.pan': ['p'],
'keymap.save': ['s'],
'keymap.xscale': ['L', 'k'],
'keymap.yscale': ['l'],
'keymap.zoom': ['o'],
'legend.fancybox': True,
'lines.antialiased': True,
'lines.linewidth': 1.0,
'patch.antialiased': True,
'patch.edgecolor': '#EEEEEE',
'patch.facecolor': '#348ABD',
'patch.linewidth': 0.5,
'toolbar': 'toolbar2',
'xtick.color': '#555555',
'xtick.direction': 'in',
'xtick.major.pad': 6.0,
'xtick.major.size': 0.0,
'xtick.minor.pad': 6.0,
'xtick.minor.size': 0.0,
'ytick.color': '#555555',
'ytick.direction': 'in',
'ytick.major.pad': 6.0,
'ytick.major.size': 0.0,
'ytick.minor.pad': 6.0,
'ytick.minor.size': 0.0
}
def _get_standard_kind(kind):
return {'density': 'kde'}.get(kind, kind)
def _get_standard_colors(num_colors=None, colormap=None, color_type='default',
color=None):
import matplotlib.pyplot as plt
if color is None and colormap is not None:
if isinstance(colormap, compat.string_types):
import matplotlib.cm as cm
cmap = colormap
colormap = cm.get_cmap(colormap)
if colormap is None:
raise ValueError("Colormap {0} is not recognized".format(cmap))
colors = lmap(colormap, np.linspace(0, 1, num=num_colors))
elif color is not None:
if colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
colors = color
else:
if color_type == 'default':
# need to call list() on the result to copy so we don't
# modify the global rcParams below
colors = list(plt.rcParams.get('axes.color_cycle',
list('bgrcmyk')))
if isinstance(colors, compat.string_types):
colors = list(colors)
elif color_type == 'random':
import random
def random_color(column):
random.seed(column)
return [random.random() for _ in range(3)]
colors = lmap(random_color, lrange(num_colors))
else:
raise ValueError("color_type must be either 'default' or 'random'")
if len(colors) != num_colors:
multiple = num_colors//len(colors) - 1
mod = num_colors % len(colors)
colors += multiple * colors
colors += colors[:mod]
return colors
class _Options(dict):
"""
Stores pandas plotting options.
Allows for parameter aliasing so you can just use parameter names that are
the same as the plot function parameters, but is stored in a canonical
format that makes it easy to breakdown into groups later
"""
# alias so the names are same as plotting method parameter names
_ALIASES = {'x_compat': 'xaxis.compat'}
_DEFAULT_KEYS = ['xaxis.compat']
def __init__(self):
self['xaxis.compat'] = False
def __getitem__(self, key):
key = self._get_canonical_key(key)
if key not in self:
raise ValueError('%s is not a valid pandas plotting option' % key)
return super(_Options, self).__getitem__(key)
def __setitem__(self, key, value):
key = self._get_canonical_key(key)
return super(_Options, self).__setitem__(key, value)
def __delitem__(self, key):
key = self._get_canonical_key(key)
if key in self._DEFAULT_KEYS:
raise ValueError('Cannot remove default parameter %s' % key)
return super(_Options, self).__delitem__(key)
def __contains__(self, key):
key = self._get_canonical_key(key)
return super(_Options, self).__contains__(key)
def reset(self):
"""
Reset the option store to its initial state
Returns
-------
None
"""
self.__init__()
def _get_canonical_key(self, key):
return self._ALIASES.get(key, key)
@contextmanager
def use(self, key, value):
"""
Temporarily set a parameter value using the with statement.
Aliasing allowed.
"""
old_value = self[key]
try:
self[key] = value
yield self
finally:
self[key] = old_value
plot_params = _Options()
def scatter_matrix(frame, alpha=0.5, figsize=None, ax=None, grid=False,
diagonal='hist', marker='.', density_kwds=None,
hist_kwds=None, range_padding=0.05, **kwds):
"""
Draw a matrix of scatter plots.
Parameters
----------
frame : DataFrame
alpha : float, optional
amount of transparency applied
figsize : (float,float), optional
a tuple (width, height) in inches
ax : Matplotlib axis object, optional
grid : bool, optional
setting this to True will show the grid
diagonal : {'hist', 'kde'}
pick between 'kde' and 'hist' for
either Kernel Density Estimation or Histogram
plot in the diagonal
marker : str, optional
Matplotlib marker type, default '.'
hist_kwds : other plotting keyword arguments
To be passed to hist function
density_kwds : other plotting keyword arguments
To be passed to kernel density estimate plot
range_padding : float, optional
relative extension of axis range in x and y
with respect to (x_max - x_min) or (y_max - y_min),
default 0.05
kwds : other plotting keyword arguments
To be passed to scatter function
Examples
--------
>>> df = DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D'])
>>> scatter_matrix(df, alpha=0.2)
"""
import matplotlib.pyplot as plt
from matplotlib.artist import setp
df = frame._get_numeric_data()
n = df.columns.size
naxes = n * n
fig, axes = _subplots(naxes=naxes, figsize=figsize, ax=ax,
squeeze=False)
# no gaps between subplots
fig.subplots_adjust(wspace=0, hspace=0)
mask = com.notnull(df)
marker = _get_marker_compat(marker)
hist_kwds = hist_kwds or {}
density_kwds = density_kwds or {}
# workaround because `c='b'` is hardcoded in matplotlibs scatter method
kwds.setdefault('c', plt.rcParams['patch.facecolor'])
boundaries_list = []
for a in df.columns:
values = df[a].values[mask[a].values]
rmin_, rmax_ = np.min(values), np.max(values)
rdelta_ext = (rmax_ - rmin_) * range_padding / 2.
boundaries_list.append((rmin_ - rdelta_ext, rmax_+ rdelta_ext))
for i, a in zip(lrange(n), df.columns):
for j, b in zip(lrange(n), df.columns):
ax = axes[i, j]
if i == j:
values = df[a].values[mask[a].values]
# Deal with the diagonal by drawing a histogram there.
if diagonal == 'hist':
ax.hist(values, **hist_kwds)
elif diagonal in ('kde', 'density'):
from scipy.stats import gaussian_kde
y = values
gkde = gaussian_kde(y)
ind = np.linspace(y.min(), y.max(), 1000)
ax.plot(ind, gkde.evaluate(ind), **density_kwds)
ax.set_xlim(boundaries_list[i])
else:
common = (mask[a] & mask[b]).values
ax.scatter(df[b][common], df[a][common],
marker=marker, alpha=alpha, **kwds)
ax.set_xlim(boundaries_list[j])
ax.set_ylim(boundaries_list[i])
ax.set_xlabel(b)
ax.set_ylabel(a)
if j!= 0:
ax.yaxis.set_visible(False)
if i != n-1:
ax.xaxis.set_visible(False)
if len(df.columns) > 1:
lim1 = boundaries_list[0]
locs = axes[0][1].yaxis.get_majorticklocs()
locs = locs[(lim1[0] <= locs) & (locs <= lim1[1])]
adj = (locs - lim1[0]) / (lim1[1] - lim1[0])
lim0 = axes[0][0].get_ylim()
adj = adj * (lim0[1] - lim0[0]) + lim0[0]
axes[0][0].yaxis.set_ticks(adj)
if np.all(locs == locs.astype(int)):
# if all ticks are int
locs = locs.astype(int)
axes[0][0].yaxis.set_ticklabels(locs)
_set_ticks_props(axes, xlabelsize=8, xrot=90, ylabelsize=8, yrot=0)
return axes
def _gca():
import matplotlib.pyplot as plt
return plt.gca()
def _gcf():
import matplotlib.pyplot as plt
return plt.gcf()
def _get_marker_compat(marker):
import matplotlib.lines as mlines
import matplotlib as mpl
if mpl.__version__ < '1.1.0' and marker == '.':
return 'o'
if marker not in mlines.lineMarkers:
return 'o'
return marker
def radviz(frame, class_column, ax=None, color=None, colormap=None, **kwds):
"""RadViz - a multivariate data visualization algorithm
Parameters:
-----------
frame: DataFrame
class_column: str
Column name containing class names
ax: Matplotlib axis object, optional
color: list or tuple, optional
Colors to use for the different classes
colormap : str or matplotlib colormap object, default None
Colormap to select colors from. If string, load colormap with that name
from matplotlib.
kwds: keywords
Options to pass to matplotlib scatter plotting method
Returns:
--------
ax: Matplotlib axis object
"""
import matplotlib.pyplot as plt
import matplotlib.patches as patches
def normalize(series):
a = min(series)
b = max(series)
return (series - a) / (b - a)
n = len(frame)
classes = frame[class_column].drop_duplicates()
class_col = frame[class_column]
df = frame.drop(class_column, axis=1).apply(normalize)
if ax is None:
ax = plt.gca(xlim=[-1, 1], ylim=[-1, 1])
to_plot = {}
colors = _get_standard_colors(num_colors=len(classes), colormap=colormap,
color_type='random', color=color)
for kls in classes:
to_plot[kls] = [[], []]
m = len(frame.columns) - 1
s = np.array([(np.cos(t), np.sin(t))
for t in [2.0 * np.pi * (i / float(m))
for i in range(m)]])
for i in range(n):
row = df.iloc[i].values
row_ = np.repeat(np.expand_dims(row, axis=1), 2, axis=1)
y = (s * row_).sum(axis=0) / row.sum()
kls = class_col.iat[i]
to_plot[kls][0].append(y[0])
to_plot[kls][1].append(y[1])
for i, kls in enumerate(classes):
ax.scatter(to_plot[kls][0], to_plot[kls][1], color=colors[i],
label=com.pprint_thing(kls), **kwds)
ax.legend()
ax.add_patch(patches.Circle((0.0, 0.0), radius=1.0, facecolor='none'))
for xy, name in zip(s, df.columns):
ax.add_patch(patches.Circle(xy, radius=0.025, facecolor='gray'))
if xy[0] < 0.0 and xy[1] < 0.0:
ax.text(xy[0] - 0.025, xy[1] - 0.025, name,
ha='right', va='top', size='small')
elif xy[0] < 0.0 and xy[1] >= 0.0:
ax.text(xy[0] - 0.025, xy[1] + 0.025, name,
ha='right', va='bottom', size='small')
elif xy[0] >= 0.0 and xy[1] < 0.0:
ax.text(xy[0] + 0.025, xy[1] - 0.025, name,
ha='left', va='top', size='small')
elif xy[0] >= 0.0 and xy[1] >= 0.0:
ax.text(xy[0] + 0.025, xy[1] + 0.025, name,
ha='left', va='bottom', size='small')
ax.axis('equal')
return ax
@deprecate_kwarg(old_arg_name='data', new_arg_name='frame')
def andrews_curves(frame, class_column, ax=None, samples=200, color=None,
colormap=None, **kwds):
"""
Parameters:
-----------
frame : DataFrame
Data to be plotted, preferably normalized to (0.0, 1.0)
class_column : Name of the column containing class names
ax : matplotlib axes object, default None
samples : Number of points to plot in each curve
color: list or tuple, optional
Colors to use for the different classes
colormap : str or matplotlib colormap object, default None
Colormap to select colors from. If string, load colormap with that name
from matplotlib.
kwds: keywords
Options to pass to matplotlib plotting method
Returns:
--------
ax: Matplotlib axis object
"""
from math import sqrt, pi, sin, cos
import matplotlib.pyplot as plt
def function(amplitudes):
def f(x):
x1 = amplitudes[0]
result = x1 / sqrt(2.0)
harmonic = 1.0
for x_even, x_odd in zip(amplitudes[1::2], amplitudes[2::2]):
result += (x_even * sin(harmonic * x) +
x_odd * cos(harmonic * x))
harmonic += 1.0
if len(amplitudes) % 2 != 0:
result += amplitudes[-1] * sin(harmonic * x)
return result
return f
n = len(frame)
class_col = frame[class_column]
classes = frame[class_column].drop_duplicates()
df = frame.drop(class_column, axis=1)
x = [-pi + 2.0 * pi * (t / float(samples)) for t in range(samples)]
used_legends = set([])
color_values = _get_standard_colors(num_colors=len(classes),
colormap=colormap, color_type='random',
color=color)
colors = dict(zip(classes, color_values))
if ax is None:
ax = plt.gca(xlim=(-pi, pi))
for i in range(n):
row = df.iloc[i].values
f = function(row)
y = [f(t) for t in x]
kls = class_col.iat[i]
label = com.pprint_thing(kls)
if label not in used_legends:
used_legends.add(label)
ax.plot(x, y, color=colors[kls], label=label, **kwds)
else:
ax.plot(x, y, color=colors[kls], **kwds)
ax.legend(loc='upper right')
ax.grid()
return ax
def bootstrap_plot(series, fig=None, size=50, samples=500, **kwds):
"""Bootstrap plot.
Parameters:
-----------
series: Time series
fig: matplotlib figure object, optional
size: number of data points to consider during each sampling
samples: number of times the bootstrap procedure is performed
kwds: optional keyword arguments for plotting commands, must be accepted
by both hist and plot
Returns:
--------
fig: matplotlib figure
"""
import random
import matplotlib.pyplot as plt
# random.sample(ndarray, int) fails on python 3.3, sigh
data = list(series.values)
samplings = [random.sample(data, size) for _ in range(samples)]
means = np.array([np.mean(sampling) for sampling in samplings])
medians = np.array([np.median(sampling) for sampling in samplings])
midranges = np.array([(min(sampling) + max(sampling)) * 0.5
for sampling in samplings])
if fig is None:
fig = plt.figure()
x = lrange(samples)
axes = []
ax1 = fig.add_subplot(2, 3, 1)
ax1.set_xlabel("Sample")
axes.append(ax1)
ax1.plot(x, means, **kwds)
ax2 = fig.add_subplot(2, 3, 2)
ax2.set_xlabel("Sample")
axes.append(ax2)
ax2.plot(x, medians, **kwds)
ax3 = fig.add_subplot(2, 3, 3)
ax3.set_xlabel("Sample")
axes.append(ax3)
ax3.plot(x, midranges, **kwds)
ax4 = fig.add_subplot(2, 3, 4)
ax4.set_xlabel("Mean")
axes.append(ax4)
ax4.hist(means, **kwds)
ax5 = fig.add_subplot(2, 3, 5)
ax5.set_xlabel("Median")
axes.append(ax5)
ax5.hist(medians, **kwds)
ax6 = fig.add_subplot(2, 3, 6)
ax6.set_xlabel("Midrange")
axes.append(ax6)
ax6.hist(midranges, **kwds)
for axis in axes:
plt.setp(axis.get_xticklabels(), fontsize=8)
plt.setp(axis.get_yticklabels(), fontsize=8)
return fig
@deprecate_kwarg(old_arg_name='colors', new_arg_name='color')
@deprecate_kwarg(old_arg_name='data', new_arg_name='frame')
def parallel_coordinates(frame, class_column, cols=None, ax=None, color=None,
use_columns=False, xticks=None, colormap=None,
axvlines=True, **kwds):
"""Parallel coordinates plotting.
Parameters
----------
frame: DataFrame
class_column: str
Column name containing class names
cols: list, optional
A list of column names to use
ax: matplotlib.axis, optional
matplotlib axis object
color: list or tuple, optional
Colors to use for the different classes
use_columns: bool, optional
If true, columns will be used as xticks
xticks: list or tuple, optional
A list of values to use for xticks
colormap: str or matplotlib colormap, default None
Colormap to use for line colors.
axvlines: bool, optional
If true, vertical lines will be added at each xtick
kwds: keywords
Options to pass to matplotlib plotting method
Returns
-------
ax: matplotlib axis object
Examples
--------
>>> from pandas import read_csv
>>> from pandas.tools.plotting import parallel_coordinates
>>> from matplotlib import pyplot as plt
>>> df = read_csv('https://raw.github.com/pydata/pandas/master/pandas/tests/data/iris.csv')
>>> parallel_coordinates(df, 'Name', color=('#556270', '#4ECDC4', '#C7F464'))
>>> plt.show()
"""
import matplotlib.pyplot as plt
n = len(frame)
classes = frame[class_column].drop_duplicates()
class_col = frame[class_column]
if cols is None:
df = frame.drop(class_column, axis=1)
else:
df = frame[cols]
used_legends = set([])
ncols = len(df.columns)
# determine values to use for xticks
if use_columns is True:
if not np.all(np.isreal(list(df.columns))):
raise ValueError('Columns must be numeric to be used as xticks')
x = df.columns
elif xticks is not None:
if not np.all(np.isreal(xticks)):
raise ValueError('xticks specified must be numeric')
elif len(xticks) != ncols:
raise ValueError('Length of xticks must match number of columns')
x = xticks
else:
x = lrange(ncols)
if ax is None:
ax = plt.gca()
color_values = _get_standard_colors(num_colors=len(classes),
colormap=colormap, color_type='random',
color=color)
colors = dict(zip(classes, color_values))
for i in range(n):
y = df.iloc[i].values
kls = class_col.iat[i]
label = com.pprint_thing(kls)
if label not in used_legends:
used_legends.add(label)
ax.plot(x, y, color=colors[kls], label=label, **kwds)
else:
ax.plot(x, y, color=colors[kls], **kwds)
if axvlines:
for i in x:
ax.axvline(i, linewidth=1, color='black')
ax.set_xticks(x)
ax.set_xticklabels(df.columns)
ax.set_xlim(x[0], x[-1])
ax.legend(loc='upper right')
ax.grid()
return ax
def lag_plot(series, lag=1, ax=None, **kwds):
"""Lag plot for time series.
Parameters:
-----------
series: Time series
lag: lag of the scatter plot, default 1
ax: Matplotlib axis object, optional
kwds: Matplotlib scatter method keyword arguments, optional
Returns:
--------
ax: Matplotlib axis object
"""
import matplotlib.pyplot as plt
# workaround because `c='b'` is hardcoded in matplotlibs scatter method
kwds.setdefault('c', plt.rcParams['patch.facecolor'])
data = series.values
y1 = data[:-lag]
y2 = data[lag:]
if ax is None:
ax = plt.gca()
ax.set_xlabel("y(t)")
ax.set_ylabel("y(t + %s)" % lag)
ax.scatter(y1, y2, **kwds)
return ax
def autocorrelation_plot(series, ax=None, **kwds):
"""Autocorrelation plot for time series.
Parameters:
-----------
series: Time series
ax: Matplotlib axis object, optional
kwds : keywords
Options to pass to matplotlib plotting method
Returns:
-----------
ax: Matplotlib axis object
"""
import matplotlib.pyplot as plt
n = len(series)
data = np.asarray(series)
if ax is None:
ax = plt.gca(xlim=(1, n), ylim=(-1.0, 1.0))
mean = np.mean(data)
c0 = np.sum((data - mean) ** 2) / float(n)
def r(h):
return ((data[:n - h] - mean) * (data[h:] - mean)).sum() / float(n) / c0
x = np.arange(n) + 1
y = lmap(r, x)
z95 = 1.959963984540054
z99 = 2.5758293035489004
ax.axhline(y=z99 / np.sqrt(n), linestyle='--', color='grey')
ax.axhline(y=z95 / np.sqrt(n), color='grey')
ax.axhline(y=0.0, color='black')
ax.axhline(y=-z95 / np.sqrt(n), color='grey')
ax.axhline(y=-z99 / np.sqrt(n), linestyle='--', color='grey')
ax.set_xlabel("Lag")
ax.set_ylabel("Autocorrelation")
ax.plot(x, y, **kwds)
if 'label' in kwds:
ax.legend()
ax.grid()
return ax
class MPLPlot(object):
"""
Base class for assembling a pandas plot using matplotlib
Parameters
----------
data :
"""
_layout_type = 'vertical'
_default_rot = 0
orientation = None
_pop_attributes = ['label', 'style', 'logy', 'logx', 'loglog',
'mark_right', 'stacked']
_attr_defaults = {'logy': False, 'logx': False, 'loglog': False,
'mark_right': True, 'stacked': False}
def __init__(self, data, kind=None, by=None, subplots=False, sharex=None,
sharey=False, use_index=True,
figsize=None, grid=None, legend=True, rot=None,
ax=None, fig=None, title=None, xlim=None, ylim=None,
xticks=None, yticks=None,
sort_columns=False, fontsize=None,
secondary_y=False, colormap=None,
table=False, layout=None, **kwds):
self.data = data
self.by = by
self.kind = kind
self.sort_columns = sort_columns
self.subplots = subplots
if sharex is None:
if ax is None:
self.sharex = True
else:
# if we get an axis, the users should do the visibility setting...
self.sharex = False
else:
self.sharex = sharex
self.sharey = sharey
self.figsize = figsize
self.layout = layout
self.xticks = xticks
self.yticks = yticks
self.xlim = xlim
self.ylim = ylim
self.title = title
self.use_index = use_index
self.fontsize = fontsize
if rot is not None:
self.rot = rot
# need to know for format_date_labels since it's rotated to 30 by
# default
self._rot_set = True
else:
self._rot_set = False
if isinstance(self._default_rot, dict):
self.rot = self._default_rot[self.kind]
else:
self.rot = self._default_rot
if grid is None:
grid = False if secondary_y else self.plt.rcParams['axes.grid']
self.grid = grid
self.legend = legend
self.legend_handles = []
self.legend_labels = []
for attr in self._pop_attributes:
value = kwds.pop(attr, self._attr_defaults.get(attr, None))
setattr(self, attr, value)
self.ax = ax
self.fig = fig
self.axes = None
# parse errorbar input if given
xerr = kwds.pop('xerr', None)
yerr = kwds.pop('yerr', None)
self.errors = {}
for kw, err in zip(['xerr', 'yerr'], [xerr, yerr]):
self.errors[kw] = self._parse_errorbars(kw, err)
if not isinstance(secondary_y, (bool, tuple, list, np.ndarray, Index)):
secondary_y = [secondary_y]
self.secondary_y = secondary_y
# ugly TypeError if user passes matplotlib's `cmap` name.
# Probably better to accept either.
if 'cmap' in kwds and colormap:
raise TypeError("Only specify one of `cmap` and `colormap`.")
elif 'cmap' in kwds:
self.colormap = kwds.pop('cmap')
else:
self.colormap = colormap
self.table = table
self.kwds = kwds
self._validate_color_args()
def _validate_color_args(self):
if 'color' not in self.kwds and 'colors' in self.kwds:
warnings.warn(("'colors' is being deprecated. Please use 'color'"
"instead of 'colors'"))
colors = self.kwds.pop('colors')
self.kwds['color'] = colors
if ('color' in self.kwds and self.nseries == 1):
# support series.plot(color='green')
self.kwds['color'] = [self.kwds['color']]
if ('color' in self.kwds or 'colors' in self.kwds) and \
self.colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
if 'color' in self.kwds and self.style is not None:
if com.is_list_like(self.style):
styles = self.style
else:
styles = [self.style]
# need only a single match
for s in styles:
if re.match('^[a-z]+?', s) is not None:
raise ValueError("Cannot pass 'style' string with a color "
"symbol and 'color' keyword argument. Please"
" use one or the other or pass 'style' "
"without a color symbol")
def _iter_data(self, data=None, keep_index=False, fillna=None):
if data is None:
data = self.data
if fillna is not None:
data = data.fillna(fillna)
if self.sort_columns:
columns = com._try_sort(data.columns)
else:
columns = data.columns
for col in columns:
if keep_index is True:
yield col, data[col]
else:
yield col, data[col].values
@property
def nseries(self):
if self.data.ndim == 1:
return 1
else:
return self.data.shape[1]
def draw(self):
self.plt.draw_if_interactive()
def generate(self):
self._args_adjust()
self._compute_plot_data()
self._setup_subplots()
self._make_plot()
self._add_table()
self._make_legend()
self._post_plot_logic()
self._adorn_subplots()
def _args_adjust(self):
pass
def _has_plotted_object(self, ax):
"""check whether ax has data"""
return (len(ax.lines) != 0 or
len(ax.artists) != 0 or
len(ax.containers) != 0)
def _maybe_right_yaxis(self, ax, axes_num):
if not self.on_right(axes_num):
# secondary axes may be passed via ax kw
return self._get_ax_layer(ax)
if hasattr(ax, 'right_ax'):
# if it has right_ax proparty, ``ax`` must be left axes
return ax.right_ax
elif hasattr(ax, 'left_ax'):
# if it has left_ax proparty, ``ax`` must be right axes
return ax
else:
# otherwise, create twin axes
orig_ax, new_ax = ax, ax.twinx()
new_ax._get_lines.color_cycle = orig_ax._get_lines.color_cycle
orig_ax.right_ax, new_ax.left_ax = new_ax, orig_ax
if not self._has_plotted_object(orig_ax): # no data on left y
orig_ax.get_yaxis().set_visible(False)
return new_ax
def _setup_subplots(self):
if self.subplots:
fig, axes = _subplots(naxes=self.nseries,
sharex=self.sharex, sharey=self.sharey,
figsize=self.figsize, ax=self.ax,
layout=self.layout,
layout_type=self._layout_type)
else:
if self.ax is None:
fig = self.plt.figure(figsize=self.figsize)
axes = fig.add_subplot(111)
else:
fig = self.ax.get_figure()
if self.figsize is not None:
fig.set_size_inches(self.figsize)
axes = self.ax
axes = _flatten(axes)
if self.logx or self.loglog:
[a.set_xscale('log') for a in axes]
if self.logy or self.loglog:
[a.set_yscale('log') for a in axes]
self.fig = fig
self.axes = axes
@property
def result(self):
"""
Return result axes
"""
if self.subplots:
if self.layout is not None and not com.is_list_like(self.ax):
return self.axes.reshape(*self.layout)
else:
return self.axes
else:
sec_true = isinstance(self.secondary_y, bool) and self.secondary_y
all_sec = (com.is_list_like(self.secondary_y) and
len(self.secondary_y) == self.nseries)
if (sec_true or all_sec):
# if all data is plotted on secondary, return right axes
return self._get_ax_layer(self.axes[0], primary=False)
else:
return self.axes[0]
def _compute_plot_data(self):
data = self.data
if isinstance(data, Series):
label = self.label
if label is None and data.name is None:
label = 'None'
data = data.to_frame(name=label)
numeric_data = data.convert_objects()._get_numeric_data()
try:
is_empty = numeric_data.empty
except AttributeError:
is_empty = not len(numeric_data)
# no empty frames or series allowed
if is_empty:
raise TypeError('Empty {0!r}: no numeric data to '
'plot'.format(numeric_data.__class__.__name__))
self.data = numeric_data
def _make_plot(self):
raise AbstractMethodError(self)
def _add_table(self):
if self.table is False:
return
elif self.table is True:
data = self.data.transpose()
else:
data = self.table
ax = self._get_ax(0)
table(ax, data)
def _post_plot_logic(self):
pass
def _adorn_subplots(self):
to_adorn = self.axes
if len(self.axes) > 0:
all_axes = self._get_axes()
nrows, ncols = self._get_axes_layout()
_handle_shared_axes(axarr=all_axes, nplots=len(all_axes),
naxes=nrows * ncols, nrows=nrows,
ncols=ncols, sharex=self.sharex,
sharey=self.sharey)
for ax in to_adorn:
if self.yticks is not None:
ax.set_yticks(self.yticks)
if self.xticks is not None:
ax.set_xticks(self.xticks)
if self.ylim is not None:
ax.set_ylim(self.ylim)
if self.xlim is not None:
ax.set_xlim(self.xlim)
ax.grid(self.grid)
if self.title:
if self.subplots:
self.fig.suptitle(self.title)
else:
self.axes[0].set_title(self.title)
labels = [com.pprint_thing(key) for key in self.data.index]
labels = dict(zip(range(len(self.data.index)), labels))
for ax in self.axes:
if self.orientation == 'vertical' or self.orientation is None:
if self._need_to_set_index:
xticklabels = [labels.get(x, '') for x in ax.get_xticks()]
ax.set_xticklabels(xticklabels)
self._apply_axis_properties(ax.xaxis, rot=self.rot,
fontsize=self.fontsize)
self._apply_axis_properties(ax.yaxis, fontsize=self.fontsize)
elif self.orientation == 'horizontal':
if self._need_to_set_index:
yticklabels = [labels.get(y, '') for y in ax.get_yticks()]
ax.set_yticklabels(yticklabels)
self._apply_axis_properties(ax.yaxis, rot=self.rot,
fontsize=self.fontsize)
self._apply_axis_properties(ax.xaxis, fontsize=self.fontsize)
def _apply_axis_properties(self, axis, rot=None, fontsize=None):
labels = axis.get_majorticklabels() + axis.get_minorticklabels()
for label in labels:
if rot is not None:
label.set_rotation(rot)
if fontsize is not None:
label.set_fontsize(fontsize)
@property
def legend_title(self):
if not isinstance(self.data.columns, MultiIndex):
name = self.data.columns.name
if name is not None:
name = com.pprint_thing(name)
return name
else:
stringified = map(com.pprint_thing,
self.data.columns.names)
return ','.join(stringified)
def _add_legend_handle(self, handle, label, index=None):
if not label is None:
if self.mark_right and index is not None:
if self.on_right(index):
label = label + ' (right)'
self.legend_handles.append(handle)
self.legend_labels.append(label)
def _make_legend(self):
ax, leg = self._get_ax_legend(self.axes[0])
handles = []
labels = []
title = ''
if not self.subplots:
if not leg is None:
title = leg.get_title().get_text()
handles = leg.legendHandles
labels = [x.get_text() for x in leg.get_texts()]
if self.legend:
if self.legend == 'reverse':
self.legend_handles = reversed(self.legend_handles)
self.legend_labels = reversed(self.legend_labels)
handles += self.legend_handles
labels += self.legend_labels
if not self.legend_title is None:
title = self.legend_title
if len(handles) > 0:
ax.legend(handles, labels, loc='best', title=title)
elif self.subplots and self.legend:
for ax in self.axes:
if ax.get_visible():
ax.legend(loc='best')
def _get_ax_legend(self, ax):
leg = ax.get_legend()
other_ax = (getattr(ax, 'left_ax', None) or
getattr(ax, 'right_ax', None))
other_leg = None
if other_ax is not None:
other_leg = other_ax.get_legend()
if leg is None and other_leg is not None:
leg = other_leg
ax = other_ax
return ax, leg
@cache_readonly
def plt(self):
import matplotlib.pyplot as plt
return plt
_need_to_set_index = False
def _get_xticks(self, convert_period=False):
index = self.data.index
is_datetype = index.inferred_type in ('datetime', 'date',
'datetime64', 'time')
if self.use_index:
if convert_period and isinstance(index, PeriodIndex):
self.data = self.data.reindex(index=index.order())
x = self.data.index.to_timestamp()._mpl_repr()
elif index.is_numeric():
"""
Matplotlib supports numeric values or datetime objects as
xaxis values. Taking LBYL approach here, by the time
matplotlib raises exception when using non numeric/datetime
values for xaxis, several actions are already taken by plt.
"""
x = index._mpl_repr()
elif is_datetype:
self.data = self.data.sort_index()
x = self.data.index._mpl_repr()
else:
self._need_to_set_index = True
x = lrange(len(index))
else:
x = lrange(len(index))
return x
def _is_datetype(self):
index = self.data.index
return (isinstance(index, (PeriodIndex, DatetimeIndex)) or
index.inferred_type in ('datetime', 'date', 'datetime64',
'time'))
def _get_plot_function(self):
'''
Returns the matplotlib plotting function (plot or errorbar) based on
the presence of errorbar keywords.
'''
errorbar = any(e is not None for e in self.errors.values())
def plotf(ax, x, y, style=None, **kwds):
mask = com.isnull(y)
if mask.any():
y = np.ma.array(y)
y = np.ma.masked_where(mask, y)
if errorbar:
return self.plt.Axes.errorbar(ax, x, y, **kwds)
else:
# prevent style kwarg from going to errorbar, where it is unsupported
if style is not None:
args = (ax, x, y, style)
else:
args = (ax, x, y)
return self.plt.Axes.plot(*args, **kwds)
return plotf
def _get_index_name(self):
if isinstance(self.data.index, MultiIndex):
name = self.data.index.names
if any(x is not None for x in name):
name = ','.join([com.pprint_thing(x) for x in name])
else:
name = None
else:
name = self.data.index.name
if name is not None:
name = com.pprint_thing(name)
return name
@classmethod
def _get_ax_layer(cls, ax, primary=True):
"""get left (primary) or right (secondary) axes"""
if primary:
return getattr(ax, 'left_ax', ax)
else:
return getattr(ax, 'right_ax', ax)
def _get_ax(self, i):
# get the twinx ax if appropriate
if self.subplots:
ax = self.axes[i]
ax = self._maybe_right_yaxis(ax, i)
self.axes[i] = ax
else:
ax = self.axes[0]
ax = self._maybe_right_yaxis(ax, i)
ax.get_yaxis().set_visible(True)
return ax
def on_right(self, i):
if isinstance(self.secondary_y, bool):
return self.secondary_y
if isinstance(self.secondary_y, (tuple, list, np.ndarray, Index)):
return self.data.columns[i] in self.secondary_y
def _get_style(self, i, col_name):
style = ''
if self.subplots:
style = 'k'
if self.style is not None:
if isinstance(self.style, list):
try:
style = self.style[i]
except IndexError:
pass
elif isinstance(self.style, dict):
style = self.style.get(col_name, style)
else:
style = self.style
return style or None
def _get_colors(self, num_colors=None, color_kwds='color'):
if num_colors is None:
num_colors = self.nseries
return _get_standard_colors(num_colors=num_colors,
colormap=self.colormap,
color=self.kwds.get(color_kwds))
def _maybe_add_color(self, colors, kwds, style, i):
has_color = 'color' in kwds or self.colormap is not None
if has_color and (style is None or re.match('[a-z]+', style) is None):
kwds['color'] = colors[i % len(colors)]
def _parse_errorbars(self, label, err):
'''
Look for error keyword arguments and return the actual errorbar data
or return the error DataFrame/dict
Error bars can be specified in several ways:
Series: the user provides a pandas.Series object of the same
length as the data
ndarray: provides a np.ndarray of the same length as the data
DataFrame/dict: error values are paired with keys matching the
key in the plotted DataFrame
str: the name of the column within the plotted DataFrame
'''
if err is None:
return None
from pandas import DataFrame, Series
def match_labels(data, e):
e = e.reindex_axis(data.index)
return e
# key-matched DataFrame
if isinstance(err, DataFrame):
err = match_labels(self.data, err)
# key-matched dict
elif isinstance(err, dict):
pass
# Series of error values
elif isinstance(err, Series):
# broadcast error series across data
err = match_labels(self.data, err)
err = np.atleast_2d(err)
err = np.tile(err, (self.nseries, 1))
# errors are a column in the dataframe
elif isinstance(err, string_types):
evalues = self.data[err].values
self.data = self.data[self.data.columns.drop(err)]
err = np.atleast_2d(evalues)
err = np.tile(err, (self.nseries, 1))
elif com.is_list_like(err):
if com.is_iterator(err):
err = np.atleast_2d(list(err))
else:
# raw error values
err = np.atleast_2d(err)
err_shape = err.shape
# asymmetrical error bars
if err.ndim == 3:
if (err_shape[0] != self.nseries) or \
(err_shape[1] != 2) or \
(err_shape[2] != len(self.data)):
msg = "Asymmetrical error bars should be provided " + \
"with the shape (%u, 2, %u)" % \
(self.nseries, len(self.data))
raise ValueError(msg)
# broadcast errors to each data series
if len(err) == 1:
err = np.tile(err, (self.nseries, 1))
elif com.is_number(err):
err = np.tile([err], (self.nseries, len(self.data)))
else:
msg = "No valid %s detected" % label
raise ValueError(msg)
return err
def _get_errorbars(self, label=None, index=None, xerr=True, yerr=True):
from pandas import DataFrame
errors = {}
for kw, flag in zip(['xerr', 'yerr'], [xerr, yerr]):
if flag:
err = self.errors[kw]
# user provided label-matched dataframe of errors
if isinstance(err, (DataFrame, dict)):
if label is not None and label in err.keys():
err = err[label]
else:
err = None
elif index is not None and err is not None:
err = err[index]
if err is not None:
errors[kw] = err
return errors
def _get_axes(self):
return self.axes[0].get_figure().get_axes()
def _get_axes_layout(self):
axes = self._get_axes()
x_set = set()
y_set = set()
for ax in axes:
# check axes coordinates to estimate layout
points = ax.get_position().get_points()
x_set.add(points[0][0])
y_set.add(points[0][1])
return (len(y_set), len(x_set))
class ScatterPlot(MPLPlot):
_layout_type = 'single'
def __init__(self, data, x, y, c=None, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
if x is None or y is None:
raise ValueError( 'scatter requires and x and y column')
if com.is_integer(x) and not self.data.columns.holds_integer():
x = self.data.columns[x]
if com.is_integer(y) and not self.data.columns.holds_integer():
y = self.data.columns[y]
if com.is_integer(c) and not self.data.columns.holds_integer():
c = self.data.columns[c]
self.x = x
self.y = y
self.c = c
@property
def nseries(self):
return 1
def _make_plot(self):
import matplotlib as mpl
mpl_ge_1_3_1 = str(mpl.__version__) >= LooseVersion('1.3.1')
import matplotlib.pyplot as plt
x, y, c, data = self.x, self.y, self.c, self.data
ax = self.axes[0]
c_is_column = com.is_hashable(c) and c in self.data.columns
# plot a colorbar only if a colormap is provided or necessary
cb = self.kwds.pop('colorbar', self.colormap or c_is_column)
# pandas uses colormap, matplotlib uses cmap.
cmap = self.colormap or 'Greys'
cmap = plt.cm.get_cmap(cmap)
if c is None:
c_values = self.plt.rcParams['patch.facecolor']
elif c_is_column:
c_values = self.data[c].values
else:
c_values = c
if self.legend and hasattr(self, 'label'):
label = self.label
else:
label = None
scatter = ax.scatter(data[x].values, data[y].values, c=c_values,
label=label, cmap=cmap, **self.kwds)
if cb:
img = ax.collections[0]
kws = dict(ax=ax)
if mpl_ge_1_3_1:
kws['label'] = c if c_is_column else ''
self.fig.colorbar(img, **kws)
if label is not None:
self._add_legend_handle(scatter, label)
else:
self.legend = False
errors_x = self._get_errorbars(label=x, index=0, yerr=False)
errors_y = self._get_errorbars(label=y, index=0, xerr=False)
if len(errors_x) > 0 or len(errors_y) > 0:
err_kwds = dict(errors_x, **errors_y)
err_kwds['ecolor'] = scatter.get_facecolor()[0]
ax.errorbar(data[x].values, data[y].values, linestyle='none', **err_kwds)
def _post_plot_logic(self):
ax = self.axes[0]
x, y = self.x, self.y
ax.set_ylabel(com.pprint_thing(y))
ax.set_xlabel(com.pprint_thing(x))
class HexBinPlot(MPLPlot):
_layout_type = 'single'
def __init__(self, data, x, y, C=None, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
if x is None or y is None:
raise ValueError('hexbin requires and x and y column')
if com.is_integer(x) and not self.data.columns.holds_integer():
x = self.data.columns[x]
if com.is_integer(y) and not self.data.columns.holds_integer():
y = self.data.columns[y]
if com.is_integer(C) and not self.data.columns.holds_integer():
C = self.data.columns[C]
self.x = x
self.y = y
self.C = C
@property
def nseries(self):
return 1
def _make_plot(self):
import matplotlib.pyplot as plt
x, y, data, C = self.x, self.y, self.data, self.C
ax = self.axes[0]
# pandas uses colormap, matplotlib uses cmap.
cmap = self.colormap or 'BuGn'
cmap = plt.cm.get_cmap(cmap)
cb = self.kwds.pop('colorbar', True)
if C is None:
c_values = None
else:
c_values = data[C].values
ax.hexbin(data[x].values, data[y].values, C=c_values, cmap=cmap,
**self.kwds)
if cb:
img = ax.collections[0]
self.fig.colorbar(img, ax=ax)
def _make_legend(self):
pass
def _post_plot_logic(self):
ax = self.axes[0]
x, y = self.x, self.y
ax.set_ylabel(com.pprint_thing(y))
ax.set_xlabel(com.pprint_thing(x))
class LinePlot(MPLPlot):
_default_rot = 0
orientation = 'vertical'
def __init__(self, data, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
if self.stacked:
self.data = self.data.fillna(value=0)
self.x_compat = plot_params['x_compat']
if 'x_compat' in self.kwds:
self.x_compat = bool(self.kwds.pop('x_compat'))
def _index_freq(self):
freq = getattr(self.data.index, 'freq', None)
if freq is None:
freq = getattr(self.data.index, 'inferred_freq', None)
if freq == 'B':
weekdays = np.unique(self.data.index.dayofweek)
if (5 in weekdays) or (6 in weekdays):
freq = None
return freq
def _is_dynamic_freq(self, freq):
if isinstance(freq, DateOffset):
freq = freq.rule_code
else:
freq = frequencies.get_base_alias(freq)
freq = frequencies.get_period_alias(freq)
return freq is not None and self._no_base(freq)
def _no_base(self, freq):
# hack this for 0.10.1, creating more technical debt...sigh
if isinstance(self.data.index, DatetimeIndex):
base = frequencies.get_freq(freq)
x = self.data.index
if (base <= frequencies.FreqGroup.FR_DAY):
return x[:1].is_normalized
return Period(x[0], freq).to_timestamp(tz=x.tz) == x[0]
return True
def _use_dynamic_x(self):
freq = self._index_freq()
ax = self._get_ax(0)
ax_freq = getattr(ax, 'freq', None)
if freq is None: # convert irregular if axes has freq info
freq = ax_freq
else: # do not use tsplot if irregular was plotted first
if (ax_freq is None) and (len(ax.get_lines()) > 0):
return False
return (freq is not None) and self._is_dynamic_freq(freq)
def _is_ts_plot(self):
# this is slightly deceptive
return not self.x_compat and self.use_index and self._use_dynamic_x()
def _make_plot(self):
self._initialize_prior(len(self.data))
if self._is_ts_plot():
data = self._maybe_convert_index(self.data)
x = data.index # dummy, not used
plotf = self._get_ts_plot_function()
it = self._iter_data(data=data, keep_index=True)
else:
x = self._get_xticks(convert_period=True)
plotf = self._get_plot_function()
it = self._iter_data()
colors = self._get_colors()
for i, (label, y) in enumerate(it):
ax = self._get_ax(i)
style = self._get_style(i, label)
kwds = self.kwds.copy()
self._maybe_add_color(colors, kwds, style, i)
errors = self._get_errorbars(label=label, index=i)
kwds = dict(kwds, **errors)
label = com.pprint_thing(label) # .encode('utf-8')
kwds['label'] = label
newlines = plotf(ax, x, y, style=style, column_num=i, **kwds)
self._add_legend_handle(newlines[0], label, index=i)
lines = _get_all_lines(ax)
left, right = _get_xlim(lines)
ax.set_xlim(left, right)
def _get_stacked_values(self, y, label):
if self.stacked:
if (y >= 0).all():
return self._pos_prior + y
elif (y <= 0).all():
return self._neg_prior + y
else:
raise ValueError('When stacked is True, each column must be either all positive or negative.'
'{0} contains both positive and negative values'.format(label))
else:
return y
def _get_plot_function(self):
f = MPLPlot._get_plot_function(self)
def plotf(ax, x, y, style=None, column_num=None, **kwds):
# column_num is used to get the target column from protf in line and area plots
if column_num == 0:
self._initialize_prior(len(self.data))
y_values = self._get_stacked_values(y, kwds['label'])
lines = f(ax, x, y_values, style=style, **kwds)
self._update_prior(y)
return lines
return plotf
def _get_ts_plot_function(self):
from pandas.tseries.plotting import tsplot
plotf = self._get_plot_function()
def _plot(ax, x, data, style=None, **kwds):
# accept x to be consistent with normal plot func,
# x is not passed to tsplot as it uses data.index as x coordinate
lines = tsplot(data, plotf, ax=ax, style=style, **kwds)
return lines
return _plot
def _initialize_prior(self, n):
self._pos_prior = np.zeros(n)
self._neg_prior = np.zeros(n)
def _update_prior(self, y):
if self.stacked and not self.subplots:
# tsplot resample may changedata length
if len(self._pos_prior) != len(y):
self._initialize_prior(len(y))
if (y >= 0).all():
self._pos_prior += y
elif (y <= 0).all():
self._neg_prior += y
def _maybe_convert_index(self, data):
# tsplot converts automatically, but don't want to convert index
# over and over for DataFrames
if isinstance(data.index, DatetimeIndex):
freq = getattr(data.index, 'freq', None)
if freq is None:
freq = getattr(data.index, 'inferred_freq', None)
if isinstance(freq, DateOffset):
freq = freq.rule_code
if freq is None:
ax = self._get_ax(0)
freq = getattr(ax, 'freq', None)
if freq is None:
raise ValueError('Could not get frequency alias for plotting')
freq = frequencies.get_base_alias(freq)
freq = frequencies.get_period_alias(freq)
data.index = data.index.to_period(freq=freq)
return data
def _post_plot_logic(self):
df = self.data
condition = (not self._use_dynamic_x()
and df.index.is_all_dates
and not self.subplots
or (self.subplots and self.sharex))
index_name = self._get_index_name()
for ax in self.axes:
if condition:
# irregular TS rotated 30 deg. by default
# probably a better place to check / set this.
if not self._rot_set:
self.rot = 30
format_date_labels(ax, rot=self.rot)
if index_name is not None and self.use_index:
ax.set_xlabel(index_name)
class AreaPlot(LinePlot):
def __init__(self, data, **kwargs):
kwargs.setdefault('stacked', True)
data = data.fillna(value=0)
LinePlot.__init__(self, data, **kwargs)
if not self.stacked:
# use smaller alpha to distinguish overlap
self.kwds.setdefault('alpha', 0.5)
def _get_plot_function(self):
if self.logy or self.loglog:
raise ValueError("Log-y scales are not supported in area plot")
else:
f = MPLPlot._get_plot_function(self)
def plotf(ax, x, y, style=None, column_num=None, **kwds):
if column_num == 0:
self._initialize_prior(len(self.data))
y_values = self._get_stacked_values(y, kwds['label'])
lines = f(ax, x, y_values, style=style, **kwds)
# get data from the line to get coordinates for fill_between
xdata, y_values = lines[0].get_data(orig=False)
if (y >= 0).all():
start = self._pos_prior
elif (y <= 0).all():
start = self._neg_prior
else:
start = np.zeros(len(y))
if not 'color' in kwds:
kwds['color'] = lines[0].get_color()
self.plt.Axes.fill_between(ax, xdata, start, y_values, **kwds)
self._update_prior(y)
return lines
return plotf
def _add_legend_handle(self, handle, label, index=None):
from matplotlib.patches import Rectangle
# Because fill_between isn't supported in legend,
# specifically add Rectangle handle here
alpha = self.kwds.get('alpha', None)
handle = Rectangle((0, 0), 1, 1, fc=handle.get_color(), alpha=alpha)
LinePlot._add_legend_handle(self, handle, label, index=index)
def _post_plot_logic(self):
LinePlot._post_plot_logic(self)
if self.ylim is None:
if (self.data >= 0).all().all():
for ax in self.axes:
ax.set_ylim(0, None)
elif (self.data <= 0).all().all():
for ax in self.axes:
ax.set_ylim(None, 0)
class BarPlot(MPLPlot):
_default_rot = {'bar': 90, 'barh': 0}
def __init__(self, data, **kwargs):
self.bar_width = kwargs.pop('width', 0.5)
pos = kwargs.pop('position', 0.5)
kwargs.setdefault('align', 'center')
self.tick_pos = np.arange(len(data))
self.bottom = kwargs.pop('bottom', 0)
self.left = kwargs.pop('left', 0)
self.log = kwargs.pop('log',False)
MPLPlot.__init__(self, data, **kwargs)
if self.stacked or self.subplots:
self.tickoffset = self.bar_width * pos
if kwargs['align'] == 'edge':
self.lim_offset = self.bar_width / 2
else:
self.lim_offset = 0
else:
if kwargs['align'] == 'edge':
w = self.bar_width / self.nseries
self.tickoffset = self.bar_width * (pos - 0.5) + w * 0.5
self.lim_offset = w * 0.5
else:
self.tickoffset = self.bar_width * pos
self.lim_offset = 0
self.ax_pos = self.tick_pos - self.tickoffset
def _args_adjust(self):
if com.is_list_like(self.bottom):
self.bottom = np.array(self.bottom)
if com.is_list_like(self.left):
self.left = np.array(self.left)
def _get_plot_function(self):
if self.kind == 'bar':
def f(ax, x, y, w, start=None, **kwds):
start = start + self.bottom
return ax.bar(x, y, w, bottom=start, log=self.log, **kwds)
elif self.kind == 'barh':
def f(ax, x, y, w, start=None, log=self.log, **kwds):
start = start + self.left
return ax.barh(x, y, w, left=start, log=self.log, **kwds)
else:
raise ValueError("BarPlot kind must be either 'bar' or 'barh'")
return f
def _make_plot(self):
import matplotlib as mpl
colors = self._get_colors()
ncolors = len(colors)
bar_f = self._get_plot_function()
pos_prior = neg_prior = np.zeros(len(self.data))
K = self.nseries
for i, (label, y) in enumerate(self._iter_data(fillna=0)):
ax = self._get_ax(i)
kwds = self.kwds.copy()
kwds['color'] = colors[i % ncolors]
errors = self._get_errorbars(label=label, index=i)
kwds = dict(kwds, **errors)
label = com.pprint_thing(label)
if (('yerr' in kwds) or ('xerr' in kwds)) \
and (kwds.get('ecolor') is None):
kwds['ecolor'] = mpl.rcParams['xtick.color']
start = 0
if self.log and (y >= 1).all():
start = 1
if self.subplots:
w = self.bar_width / 2
rect = bar_f(ax, self.ax_pos + w, y, self.bar_width,
start=start, label=label, **kwds)
ax.set_title(label)
elif self.stacked:
mask = y > 0
start = np.where(mask, pos_prior, neg_prior)
w = self.bar_width / 2
rect = bar_f(ax, self.ax_pos + w, y, self.bar_width,
start=start, label=label, **kwds)
pos_prior = pos_prior + np.where(mask, y, 0)
neg_prior = neg_prior + np.where(mask, 0, y)
else:
w = self.bar_width / K
rect = bar_f(ax, self.ax_pos + (i + 0.5) * w, y, w,
start=start, label=label, **kwds)
self._add_legend_handle(rect, label, index=i)
def _post_plot_logic(self):
for ax in self.axes:
if self.use_index:
str_index = [com.pprint_thing(key) for key in self.data.index]
else:
str_index = [com.pprint_thing(key) for key in
range(self.data.shape[0])]
name = self._get_index_name()
s_edge = self.ax_pos[0] - 0.25 + self.lim_offset
e_edge = self.ax_pos[-1] + 0.25 + self.bar_width + self.lim_offset
if self.kind == 'bar':
ax.set_xlim((s_edge, e_edge))
ax.set_xticks(self.tick_pos)
ax.set_xticklabels(str_index)
if name is not None and self.use_index:
ax.set_xlabel(name)
elif self.kind == 'barh':
# horizontal bars
ax.set_ylim((s_edge, e_edge))
ax.set_yticks(self.tick_pos)
ax.set_yticklabels(str_index)
if name is not None and self.use_index:
ax.set_ylabel(name)
else:
raise NotImplementedError(self.kind)
@property
def orientation(self):
if self.kind == 'bar':
return 'vertical'
elif self.kind == 'barh':
return 'horizontal'
else:
raise NotImplementedError(self.kind)
class HistPlot(LinePlot):
def __init__(self, data, bins=10, bottom=0, **kwargs):
self.bins = bins # use mpl default
self.bottom = bottom
# Do not call LinePlot.__init__ which may fill nan
MPLPlot.__init__(self, data, **kwargs)
def _args_adjust(self):
if com.is_integer(self.bins):
# create common bin edge
values = self.data.convert_objects()._get_numeric_data()
values = np.ravel(values)<|fim▁hole|> hist, self.bins = np.histogram(values, bins=self.bins,
range=self.kwds.get('range', None),
weights=self.kwds.get('weights', None))
if com.is_list_like(self.bottom):
self.bottom = np.array(self.bottom)
def _get_plot_function(self):
def plotf(ax, y, style=None, column_num=None, **kwds):
if column_num == 0:
self._initialize_prior(len(self.bins) - 1)
y = y[~com.isnull(y)]
bottom = self._pos_prior + self.bottom
# ignore style
n, bins, patches = self.plt.Axes.hist(ax, y, bins=self.bins,
bottom=bottom, **kwds)
self._update_prior(n)
return patches
return plotf
def _make_plot(self):
plotf = self._get_plot_function()
colors = self._get_colors()
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
style = self._get_style(i, label)
label = com.pprint_thing(label)
kwds = self.kwds.copy()
kwds['label'] = label
self._maybe_add_color(colors, kwds, style, i)
if style is not None:
kwds['style'] = style
artists = plotf(ax, y, column_num=i, **kwds)
self._add_legend_handle(artists[0], label, index=i)
def _post_plot_logic(self):
if self.orientation == 'horizontal':
for ax in self.axes:
ax.set_xlabel('Degree')
else:
for ax in self.axes:
ax.set_ylabel('Degree')
@property
def orientation(self):
if self.kwds.get('orientation', None) == 'horizontal':
return 'horizontal'
else:
return 'vertical'
class KdePlot(HistPlot):
orientation = 'vertical'
def __init__(self, data, bw_method=None, ind=None, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
self.bw_method = bw_method
self.ind = ind
def _args_adjust(self):
pass
def _get_ind(self, y):
if self.ind is None:
sample_range = max(y) - min(y)
ind = np.linspace(min(y) - 0.5 * sample_range,
max(y) + 0.5 * sample_range, 1000)
else:
ind = self.ind
return ind
def _get_plot_function(self):
from scipy.stats import gaussian_kde
from scipy import __version__ as spv
f = MPLPlot._get_plot_function(self)
def plotf(ax, y, style=None, column_num=None, **kwds):
y = remove_na(y)
if LooseVersion(spv) >= '0.11.0':
gkde = gaussian_kde(y, bw_method=self.bw_method)
else:
gkde = gaussian_kde(y)
if self.bw_method is not None:
msg = ('bw_method was added in Scipy 0.11.0.' +
' Scipy version in use is %s.' % spv)
warnings.warn(msg)
ind = self._get_ind(y)
y = gkde.evaluate(ind)
lines = f(ax, ind, y, style=style, **kwds)
return lines
return plotf
def _post_plot_logic(self):
for ax in self.axes:
ax.set_ylabel('Density')
class PiePlot(MPLPlot):
_layout_type = 'horizontal'
def __init__(self, data, kind=None, **kwargs):
data = data.fillna(value=0)
if (data < 0).any().any():
raise ValueError("{0} doesn't allow negative values".format(kind))
MPLPlot.__init__(self, data, kind=kind, **kwargs)
def _args_adjust(self):
self.grid = False
self.logy = False
self.logx = False
self.loglog = False
def _validate_color_args(self):
pass
def _make_plot(self):
self.kwds.setdefault('colors', self._get_colors(num_colors=len(self.data),
color_kwds='colors'))
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
if label is not None:
label = com.pprint_thing(label)
ax.set_ylabel(label)
kwds = self.kwds.copy()
def blank_labeler(label, value):
if value == 0:
return ''
else:
return label
idx = [com.pprint_thing(v) for v in self.data.index]
labels = kwds.pop('labels', idx)
# labels is used for each wedge's labels
# Blank out labels for values of 0 so they don't overlap
# with nonzero wedges
if labels is not None:
blabels = [blank_labeler(label, value) for
label, value in zip(labels, y)]
else:
blabels = None
results = ax.pie(y, labels=blabels, **kwds)
if kwds.get('autopct', None) is not None:
patches, texts, autotexts = results
else:
patches, texts = results
autotexts = []
if self.fontsize is not None:
for t in texts + autotexts:
t.set_fontsize(self.fontsize)
# leglabels is used for legend labels
leglabels = labels if labels is not None else idx
for p, l in zip(patches, leglabels):
self._add_legend_handle(p, l)
class BoxPlot(LinePlot):
_layout_type = 'horizontal'
_valid_return_types = (None, 'axes', 'dict', 'both')
# namedtuple to hold results
BP = namedtuple("Boxplot", ['ax', 'lines'])
def __init__(self, data, return_type=None, **kwargs):
# Do not call LinePlot.__init__ which may fill nan
if return_type not in self._valid_return_types:
raise ValueError("return_type must be {None, 'axes', 'dict', 'both'}")
self.return_type = return_type
MPLPlot.__init__(self, data, **kwargs)
def _args_adjust(self):
if self.subplots:
# Disable label ax sharing. Otherwise, all subplots shows last column label
if self.orientation == 'vertical':
self.sharex = False
else:
self.sharey = False
def _get_plot_function(self):
def plotf(ax, y, column_num=None, **kwds):
if y.ndim == 2:
y = [remove_na(v) for v in y]
# Boxplot fails with empty arrays, so need to add a NaN
# if any cols are empty
# GH 8181
y = [v if v.size > 0 else np.array([np.nan]) for v in y]
else:
y = remove_na(y)
bp = ax.boxplot(y, **kwds)
if self.return_type == 'dict':
return bp, bp
elif self.return_type == 'both':
return self.BP(ax=ax, lines=bp), bp
else:
return ax, bp
return plotf
def _validate_color_args(self):
if 'color' in self.kwds:
if self.colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
self.color = self.kwds.pop('color')
if isinstance(self.color, dict):
valid_keys = ['boxes', 'whiskers', 'medians', 'caps']
for key, values in compat.iteritems(self.color):
if key not in valid_keys:
raise ValueError("color dict contains invalid key '{0}' "
"The key must be either {1}".format(key, valid_keys))
else:
self.color = None
# get standard colors for default
colors = _get_standard_colors(num_colors=3,
colormap=self.colormap,
color=None)
# use 2 colors by default, for box/whisker and median
# flier colors isn't needed here
# because it can be specified by ``sym`` kw
self._boxes_c = colors[0]
self._whiskers_c = colors[0]
self._medians_c = colors[2]
self._caps_c = 'k' # mpl default
def _get_colors(self, num_colors=None, color_kwds='color'):
pass
def maybe_color_bp(self, bp):
if isinstance(self.color, dict):
boxes = self.color.get('boxes', self._boxes_c)
whiskers = self.color.get('whiskers', self._whiskers_c)
medians = self.color.get('medians', self._medians_c)
caps = self.color.get('caps', self._caps_c)
else:
# Other types are forwarded to matplotlib
# If None, use default colors
boxes = self.color or self._boxes_c
whiskers = self.color or self._whiskers_c
medians = self.color or self._medians_c
caps = self.color or self._caps_c
from matplotlib.artist import setp
setp(bp['boxes'], color=boxes, alpha=1)
setp(bp['whiskers'], color=whiskers, alpha=1)
setp(bp['medians'], color=medians, alpha=1)
setp(bp['caps'], color=caps, alpha=1)
def _make_plot(self):
plotf = self._get_plot_function()
if self.subplots:
self._return_obj = compat.OrderedDict()
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
kwds = self.kwds.copy()
ret, bp = plotf(ax, y, column_num=i, **kwds)
self.maybe_color_bp(bp)
self._return_obj[label] = ret
label = [com.pprint_thing(label)]
self._set_ticklabels(ax, label)
else:
y = self.data.values.T
ax = self._get_ax(0)
kwds = self.kwds.copy()
ret, bp = plotf(ax, y, column_num=0, **kwds)
self.maybe_color_bp(bp)
self._return_obj = ret
labels = [l for l, y in self._iter_data()]
labels = [com.pprint_thing(l) for l in labels]
if not self.use_index:
labels = [com.pprint_thing(key) for key in range(len(labels))]
self._set_ticklabels(ax, labels)
def _set_ticklabels(self, ax, labels):
if self.orientation == 'vertical':
ax.set_xticklabels(labels)
else:
ax.set_yticklabels(labels)
def _make_legend(self):
pass
def _post_plot_logic(self):
pass
@property
def orientation(self):
if self.kwds.get('vert', True):
return 'vertical'
else:
return 'horizontal'
@property
def result(self):
if self.return_type is None:
return super(BoxPlot, self).result
else:
return self._return_obj
# kinds supported by both dataframe and series
_common_kinds = ['line', 'bar', 'barh', 'kde', 'density', 'area', 'hist', 'box']
# kinds supported by dataframe
_dataframe_kinds = ['scatter', 'hexbin']
# kinds supported only by series or dataframe single column
_series_kinds = ['pie']
_all_kinds = _common_kinds + _dataframe_kinds + _series_kinds
_plot_klass = {'line': LinePlot, 'bar': BarPlot, 'barh': BarPlot,
'kde': KdePlot, 'hist': HistPlot, 'box': BoxPlot,
'scatter': ScatterPlot, 'hexbin': HexBinPlot,
'area': AreaPlot, 'pie': PiePlot}
def _plot(data, x=None, y=None, subplots=False,
ax=None, kind='line', **kwds):
kind = _get_standard_kind(kind.lower().strip())
if kind in _all_kinds:
klass = _plot_klass[kind]
else:
raise ValueError("%r is not a valid plot kind" % kind)
from pandas import DataFrame
if kind in _dataframe_kinds:
if isinstance(data, DataFrame):
plot_obj = klass(data, x=x, y=y, subplots=subplots, ax=ax,
kind=kind, **kwds)
else:
raise ValueError("plot kind %r can only be used for data frames"
% kind)
elif kind in _series_kinds:
if isinstance(data, DataFrame):
if y is None and subplots is False:
msg = "{0} requires either y column or 'subplots=True'"
raise ValueError(msg.format(kind))
elif y is not None:
if com.is_integer(y) and not data.columns.holds_integer():
y = data.columns[y]
# converted to series actually. copy to not modify
data = data[y].copy()
data.index.name = y
plot_obj = klass(data, subplots=subplots, ax=ax, kind=kind, **kwds)
else:
if isinstance(data, DataFrame):
if x is not None:
if com.is_integer(x) and not data.columns.holds_integer():
x = data.columns[x]
data = data.set_index(x)
if y is not None:
if com.is_integer(y) and not data.columns.holds_integer():
y = data.columns[y]
label = kwds['label'] if 'label' in kwds else y
series = data[y].copy() # Don't modify
series.name = label
for kw in ['xerr', 'yerr']:
if (kw in kwds) and \
(isinstance(kwds[kw], string_types) or
com.is_integer(kwds[kw])):
try:
kwds[kw] = data[kwds[kw]]
except (IndexError, KeyError, TypeError):
pass
data = series
plot_obj = klass(data, subplots=subplots, ax=ax, kind=kind, **kwds)
plot_obj.generate()
plot_obj.draw()
return plot_obj.result
df_kind = """- 'scatter' : scatter plot
- 'hexbin' : hexbin plot"""
series_kind = ""
df_coord = """x : label or position, default None
y : label or position, default None
Allows plotting of one column versus another"""
series_coord = ""
df_unique = """stacked : boolean, default False in line and
bar plots, and True in area plot. If True, create stacked plot.
sort_columns : boolean, default False
Sort column names to determine plot ordering
secondary_y : boolean or sequence, default False
Whether to plot on the secondary y-axis
If a list/tuple, which columns to plot on secondary y-axis"""
series_unique = """label : label argument to provide to plot
secondary_y : boolean or sequence of ints, default False
If True then y-axis will be on the right"""
df_ax = """ax : matplotlib axes object, default None
subplots : boolean, default False
Make separate subplots for each column
sharex : boolean, default True if ax is None else False
In case subplots=True, share x axis and set some x axis labels to
invisible; defaults to True if ax is None otherwise False if an ax
is passed in; Be aware, that passing in both an ax and sharex=True
will alter all x axis labels for all axis in a figure!
sharey : boolean, default False
In case subplots=True, share y axis and set some y axis labels to
invisible
layout : tuple (optional)
(rows, columns) for the layout of subplots"""
series_ax = """ax : matplotlib axes object
If not passed, uses gca()"""
df_note = """- If `kind` = 'scatter' and the argument `c` is the name of a dataframe
column, the values of that column are used to color each point.
- If `kind` = 'hexbin', you can control the size of the bins with the
`gridsize` argument. By default, a histogram of the counts around each
`(x, y)` point is computed. You can specify alternative aggregations
by passing values to the `C` and `reduce_C_function` arguments.
`C` specifies the value at each `(x, y)` point and `reduce_C_function`
is a function of one argument that reduces all the values in a bin to
a single number (e.g. `mean`, `max`, `sum`, `std`)."""
series_note = ""
_shared_doc_df_kwargs = dict(klass='DataFrame', klass_kind=df_kind,
klass_coord=df_coord, klass_ax=df_ax,
klass_unique=df_unique, klass_note=df_note)
_shared_doc_series_kwargs = dict(klass='Series', klass_kind=series_kind,
klass_coord=series_coord, klass_ax=series_ax,
klass_unique=series_unique,
klass_note=series_note)
_shared_docs['plot'] = """
Make plots of %(klass)s using matplotlib / pylab.
Parameters
----------
data : %(klass)s
%(klass_coord)s
kind : str
- 'line' : line plot (default)
- 'bar' : vertical bar plot
- 'barh' : horizontal bar plot
- 'hist' : histogram
- 'box' : boxplot
- 'kde' : Kernel Density Estimation plot
- 'density' : same as 'kde'
- 'area' : area plot
- 'pie' : pie plot
%(klass_kind)s
%(klass_ax)s
figsize : a tuple (width, height) in inches
use_index : boolean, default True
Use index as ticks for x axis
title : string
Title to use for the plot
grid : boolean, default None (matlab style default)
Axis grid lines
legend : False/True/'reverse'
Place legend on axis subplots
style : list or dict
matplotlib line style per column
logx : boolean, default False
Use log scaling on x axis
logy : boolean, default False
Use log scaling on y axis
loglog : boolean, default False
Use log scaling on both x and y axes
xticks : sequence
Values to use for the xticks
yticks : sequence
Values to use for the yticks
xlim : 2-tuple/list
ylim : 2-tuple/list
rot : int, default None
Rotation for ticks (xticks for vertical, yticks for horizontal plots)
fontsize : int, default None
Font size for xticks and yticks
colormap : str or matplotlib colormap object, default None
Colormap to select colors from. If string, load colormap with that name
from matplotlib.
colorbar : boolean, optional
If True, plot colorbar (only relevant for 'scatter' and 'hexbin' plots)
position : float
Specify relative alignments for bar plot layout.
From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5 (center)
layout : tuple (optional)
(rows, columns) for the layout of the plot
table : boolean, Series or DataFrame, default False
If True, draw a table using the data in the DataFrame and the data will
be transposed to meet matplotlib's default layout.
If a Series or DataFrame is passed, use passed data to draw a table.
yerr : DataFrame, Series, array-like, dict and str
See :ref:`Plotting with Error Bars <visualization.errorbars>` for detail.
xerr : same types as yerr.
%(klass_unique)s
mark_right : boolean, default True
When using a secondary_y axis, automatically mark the column
labels with "(right)" in the legend
kwds : keywords
Options to pass to matplotlib plotting method
Returns
-------
axes : matplotlib.AxesSubplot or np.array of them
Notes
-----
- See matplotlib documentation online for more on this subject
- If `kind` = 'bar' or 'barh', you can specify relative alignments
for bar plot layout by `position` keyword.
From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5 (center)
%(klass_note)s
"""
@Appender(_shared_docs['plot'] % _shared_doc_df_kwargs)
def plot_frame(data, x=None, y=None, kind='line', ax=None, # Dataframe unique
subplots=False, sharex=None, sharey=False, layout=None, # Dataframe unique
figsize=None, use_index=True, title=None, grid=None,
legend=True, style=None, logx=False, logy=False, loglog=False,
xticks=None, yticks=None, xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
secondary_y=False, sort_columns=False, # Dataframe unique
**kwds):
return _plot(data, kind=kind, x=x, y=y, ax=ax,
subplots=subplots, sharex=sharex, sharey=sharey,
layout=layout, figsize=figsize, use_index=use_index,
title=title, grid=grid, legend=legend,
style=style, logx=logx, logy=logy, loglog=loglog,
xticks=xticks, yticks=yticks, xlim=xlim, ylim=ylim,
rot=rot, fontsize=fontsize, colormap=colormap, table=table,
yerr=yerr, xerr=xerr,
secondary_y=secondary_y, sort_columns=sort_columns,
**kwds)
@Appender(_shared_docs['plot'] % _shared_doc_series_kwargs)
def plot_series(data, kind='line', ax=None, # Series unique
figsize=None, use_index=True, title=None, grid=None,
legend=False, style=None, logx=False, logy=False, loglog=False,
xticks=None, yticks=None, xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
label=None, secondary_y=False, # Series unique
**kwds):
import matplotlib.pyplot as plt
"""
If no axes is specified, check whether there are existing figures
If there is no existing figures, _gca() will
create a figure with the default figsize, causing the figsize=parameter to
be ignored.
"""
if ax is None and len(plt.get_fignums()) > 0:
ax = _gca()
ax = MPLPlot._get_ax_layer(ax)
return _plot(data, kind=kind, ax=ax,
figsize=figsize, use_index=use_index, title=title,
grid=grid, legend=legend,
style=style, logx=logx, logy=logy, loglog=loglog,
xticks=xticks, yticks=yticks, xlim=xlim, ylim=ylim,
rot=rot, fontsize=fontsize, colormap=colormap, table=table,
yerr=yerr, xerr=xerr,
label=label, secondary_y=secondary_y,
**kwds)
_shared_docs['boxplot'] = """
Make a box plot from DataFrame column optionally grouped by some columns or
other inputs
Parameters
----------
data : the pandas object holding the data
column : column name or list of names, or vector
Can be any valid input to groupby
by : string or sequence
Column in the DataFrame to group by
ax : Matplotlib axes object, optional
fontsize : int or string
rot : label rotation angle
figsize : A tuple (width, height) in inches
grid : Setting this to True will show the grid
layout : tuple (optional)
(rows, columns) for the layout of the plot
return_type : {'axes', 'dict', 'both'}, default 'dict'
The kind of object to return. 'dict' returns a dictionary
whose values are the matplotlib Lines of the boxplot;
'axes' returns the matplotlib axes the boxplot is drawn on;
'both' returns a namedtuple with the axes and dict.
When grouping with ``by``, a dict mapping columns to ``return_type``
is returned.
kwds : other plotting keyword arguments to be passed to matplotlib boxplot
function
Returns
-------
lines : dict
ax : matplotlib Axes
(ax, lines): namedtuple
Notes
-----
Use ``return_type='dict'`` when you want to tweak the appearance
of the lines after plotting. In this case a dict containing the Lines
making up the boxes, caps, fliers, medians, and whiskers is returned.
"""
@Appender(_shared_docs['boxplot'] % _shared_doc_kwargs)
def boxplot(data, column=None, by=None, ax=None, fontsize=None,
rot=0, grid=True, figsize=None, layout=None, return_type=None,
**kwds):
# validate return_type:
if return_type not in BoxPlot._valid_return_types:
raise ValueError("return_type must be {None, 'axes', 'dict', 'both'}")
from pandas import Series, DataFrame
if isinstance(data, Series):
data = DataFrame({'x': data})
column = 'x'
def _get_colors():
return _get_standard_colors(color=kwds.get('color'), num_colors=1)
def maybe_color_bp(bp):
if 'color' not in kwds:
from matplotlib.artist import setp
setp(bp['boxes'], color=colors[0], alpha=1)
setp(bp['whiskers'], color=colors[0], alpha=1)
setp(bp['medians'], color=colors[2], alpha=1)
def plot_group(keys, values, ax):
keys = [com.pprint_thing(x) for x in keys]
values = [remove_na(v) for v in values]
bp = ax.boxplot(values, **kwds)
if kwds.get('vert', 1):
ax.set_xticklabels(keys, rotation=rot, fontsize=fontsize)
else:
ax.set_yticklabels(keys, rotation=rot, fontsize=fontsize)
maybe_color_bp(bp)
# Return axes in multiplot case, maybe revisit later # 985
if return_type == 'dict':
return bp
elif return_type == 'both':
return BoxPlot.BP(ax=ax, lines=bp)
else:
return ax
colors = _get_colors()
if column is None:
columns = None
else:
if isinstance(column, (list, tuple)):
columns = column
else:
columns = [column]
if by is not None:
result = _grouped_plot_by_column(plot_group, data, columns=columns,
by=by, grid=grid, figsize=figsize,
ax=ax, layout=layout,
return_type=return_type)
else:
if layout is not None:
raise ValueError("The 'layout' keyword is not supported when "
"'by' is None")
if return_type is None:
msg = ("\nThe default value for 'return_type' will change to "
"'axes' in a future release.\n To use the future behavior "
"now, set return_type='axes'.\n To keep the previous "
"behavior and silence this warning, set "
"return_type='dict'.")
warnings.warn(msg, FutureWarning)
return_type = 'dict'
if ax is None:
ax = _gca()
data = data._get_numeric_data()
if columns is None:
columns = data.columns
else:
data = data[columns]
result = plot_group(columns, data.values.T, ax)
ax.grid(grid)
return result
def format_date_labels(ax, rot):
# mini version of autofmt_xdate
try:
for label in ax.get_xticklabels():
label.set_ha('right')
label.set_rotation(rot)
fig = ax.get_figure()
fig.subplots_adjust(bottom=0.2)
except Exception: # pragma: no cover
pass
def scatter_plot(data, x, y, by=None, ax=None, figsize=None, grid=False,
**kwargs):
"""
Make a scatter plot from two DataFrame columns
Parameters
----------
data : DataFrame
x : Column name for the x-axis values
y : Column name for the y-axis values
ax : Matplotlib axis object
figsize : A tuple (width, height) in inches
grid : Setting this to True will show the grid
kwargs : other plotting keyword arguments
To be passed to scatter function
Returns
-------
fig : matplotlib.Figure
"""
import matplotlib.pyplot as plt
# workaround because `c='b'` is hardcoded in matplotlibs scatter method
kwargs.setdefault('c', plt.rcParams['patch.facecolor'])
def plot_group(group, ax):
xvals = group[x].values
yvals = group[y].values
ax.scatter(xvals, yvals, **kwargs)
ax.grid(grid)
if by is not None:
fig = _grouped_plot(plot_group, data, by=by, figsize=figsize, ax=ax)
else:
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
else:
fig = ax.get_figure()
plot_group(data, ax)
ax.set_ylabel(com.pprint_thing(y))
ax.set_xlabel(com.pprint_thing(x))
ax.grid(grid)
return fig
def hist_frame(data, column=None, by=None, grid=True, xlabelsize=None,
xrot=None, ylabelsize=None, yrot=None, ax=None, sharex=False,
sharey=False, figsize=None, layout=None, bins=10, **kwds):
"""
Draw histogram of the DataFrame's series using matplotlib / pylab.
Parameters
----------
data : DataFrame
column : string or sequence
If passed, will be used to limit data to a subset of columns
by : object, optional
If passed, then used to form histograms for separate groups
grid : boolean, default True
Whether to show axis grid lines
xlabelsize : int, default None
If specified changes the x-axis label size
xrot : float, default None
rotation of x axis labels
ylabelsize : int, default None
If specified changes the y-axis label size
yrot : float, default None
rotation of y axis labels
ax : matplotlib axes object, default None
sharex : boolean, default True if ax is None else False
In case subplots=True, share x axis and set some x axis labels to
invisible; defaults to True if ax is None otherwise False if an ax
is passed in; Be aware, that passing in both an ax and sharex=True
will alter all x axis labels for all subplots in a figure!
sharey : boolean, default False
In case subplots=True, share y axis and set some y axis labels to
invisible
figsize : tuple
The size of the figure to create in inches by default
layout: (optional) a tuple (rows, columns) for the layout of the histograms
bins: integer, default 10
Number of histogram bins to be used
kwds : other plotting keyword arguments
To be passed to hist function
"""
if by is not None:
axes = grouped_hist(data, column=column, by=by, ax=ax, grid=grid, figsize=figsize,
sharex=sharex, sharey=sharey, layout=layout, bins=bins,
xlabelsize=xlabelsize, xrot=xrot, ylabelsize=ylabelsize, yrot=yrot,
**kwds)
return axes
if column is not None:
if not isinstance(column, (list, np.ndarray, Index)):
column = [column]
data = data[column]
data = data._get_numeric_data()
naxes = len(data.columns)
fig, axes = _subplots(naxes=naxes, ax=ax, squeeze=False,
sharex=sharex, sharey=sharey, figsize=figsize,
layout=layout)
_axes = _flatten(axes)
for i, col in enumerate(com._try_sort(data.columns)):
ax = _axes[i]
ax.hist(data[col].dropna().values, bins=bins, **kwds)
ax.set_title(col)
ax.grid(grid)
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
fig.subplots_adjust(wspace=0.3, hspace=0.3)
return axes
def hist_series(self, by=None, ax=None, grid=True, xlabelsize=None,
xrot=None, ylabelsize=None, yrot=None, figsize=None, bins=10, **kwds):
"""
Draw histogram of the input series using matplotlib
Parameters
----------
by : object, optional
If passed, then used to form histograms for separate groups
ax : matplotlib axis object
If not passed, uses gca()
grid : boolean, default True
Whether to show axis grid lines
xlabelsize : int, default None
If specified changes the x-axis label size
xrot : float, default None
rotation of x axis labels
ylabelsize : int, default None
If specified changes the y-axis label size
yrot : float, default None
rotation of y axis labels
figsize : tuple, default None
figure size in inches by default
bins: integer, default 10
Number of histogram bins to be used
kwds : keywords
To be passed to the actual plotting function
Notes
-----
See matplotlib documentation online for more on this
"""
import matplotlib.pyplot as plt
if by is None:
if kwds.get('layout', None) is not None:
raise ValueError("The 'layout' keyword is not supported when "
"'by' is None")
# hack until the plotting interface is a bit more unified
fig = kwds.pop('figure', plt.gcf() if plt.get_fignums() else
plt.figure(figsize=figsize))
if (figsize is not None and tuple(figsize) !=
tuple(fig.get_size_inches())):
fig.set_size_inches(*figsize, forward=True)
if ax is None:
ax = fig.gca()
elif ax.get_figure() != fig:
raise AssertionError('passed axis not bound to passed figure')
values = self.dropna().values
ax.hist(values, bins=bins, **kwds)
ax.grid(grid)
axes = np.array([ax])
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
else:
if 'figure' in kwds:
raise ValueError("Cannot pass 'figure' when using the "
"'by' argument, since a new 'Figure' instance "
"will be created")
axes = grouped_hist(self, by=by, ax=ax, grid=grid, figsize=figsize, bins=bins,
xlabelsize=xlabelsize, xrot=xrot, ylabelsize=ylabelsize, yrot=yrot,
**kwds)
if hasattr(axes, 'ndim'):
if axes.ndim == 1 and len(axes) == 1:
return axes[0]
return axes
def grouped_hist(data, column=None, by=None, ax=None, bins=50, figsize=None,
layout=None, sharex=False, sharey=False, rot=90, grid=True,
xlabelsize=None, xrot=None, ylabelsize=None, yrot=None,
**kwargs):
"""
Grouped histogram
Parameters
----------
data: Series/DataFrame
column: object, optional
by: object, optional
ax: axes, optional
bins: int, default 50
figsize: tuple, optional
layout: optional
sharex: boolean, default False
sharey: boolean, default False
rot: int, default 90
grid: bool, default True
kwargs: dict, keyword arguments passed to matplotlib.Axes.hist
Returns
-------
axes: collection of Matplotlib Axes
"""
def plot_group(group, ax):
ax.hist(group.dropna().values, bins=bins, **kwargs)
xrot = xrot or rot
fig, axes = _grouped_plot(plot_group, data, column=column,
by=by, sharex=sharex, sharey=sharey, ax=ax,
figsize=figsize, layout=layout, rot=rot)
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1, right=0.9,
hspace=0.5, wspace=0.3)
return axes
def boxplot_frame_groupby(grouped, subplots=True, column=None, fontsize=None,
rot=0, grid=True, ax=None, figsize=None,
layout=None, **kwds):
"""
Make box plots from DataFrameGroupBy data.
Parameters
----------
grouped : Grouped DataFrame
subplots :
* ``False`` - no subplots will be used
* ``True`` - create a subplot for each group
column : column name or list of names, or vector
Can be any valid input to groupby
fontsize : int or string
rot : label rotation angle
grid : Setting this to True will show the grid
ax : Matplotlib axis object, default None
figsize : A tuple (width, height) in inches
layout : tuple (optional)
(rows, columns) for the layout of the plot
kwds : other plotting keyword arguments to be passed to matplotlib boxplot
function
Returns
-------
dict of key/value = group key/DataFrame.boxplot return value
or DataFrame.boxplot return value in case subplots=figures=False
Examples
--------
>>> import pandas
>>> import numpy as np
>>> import itertools
>>>
>>> tuples = [t for t in itertools.product(range(1000), range(4))]
>>> index = pandas.MultiIndex.from_tuples(tuples, names=['lvl0', 'lvl1'])
>>> data = np.random.randn(len(index),4)
>>> df = pandas.DataFrame(data, columns=list('ABCD'), index=index)
>>>
>>> grouped = df.groupby(level='lvl1')
>>> boxplot_frame_groupby(grouped)
>>>
>>> grouped = df.unstack(level='lvl1').groupby(level=0, axis=1)
>>> boxplot_frame_groupby(grouped, subplots=False)
"""
if subplots is True:
naxes = len(grouped)
fig, axes = _subplots(naxes=naxes, squeeze=False,
ax=ax, sharex=False, sharey=True, figsize=figsize,
layout=layout)
axes = _flatten(axes)
ret = compat.OrderedDict()
for (key, group), ax in zip(grouped, axes):
d = group.boxplot(ax=ax, column=column, fontsize=fontsize,
rot=rot, grid=grid, **kwds)
ax.set_title(com.pprint_thing(key))
ret[key] = d
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1, right=0.9, wspace=0.2)
else:
from pandas.tools.merge import concat
keys, frames = zip(*grouped)
if grouped.axis == 0:
df = concat(frames, keys=keys, axis=1)
else:
if len(frames) > 1:
df = frames[0].join(frames[1::])
else:
df = frames[0]
ret = df.boxplot(column=column, fontsize=fontsize, rot=rot,
grid=grid, ax=ax, figsize=figsize, layout=layout, **kwds)
return ret
def _grouped_plot(plotf, data, column=None, by=None, numeric_only=True,
figsize=None, sharex=True, sharey=True, layout=None,
rot=0, ax=None, **kwargs):
from pandas import DataFrame
if figsize == 'default':
# allowed to specify mpl default with 'default'
warnings.warn("figsize='default' is deprecated. Specify figure"
"size by tuple instead", FutureWarning)
figsize = None
grouped = data.groupby(by)
if column is not None:
grouped = grouped[column]
naxes = len(grouped)
fig, axes = _subplots(naxes=naxes, figsize=figsize,
sharex=sharex, sharey=sharey, ax=ax,
layout=layout)
_axes = _flatten(axes)
for i, (key, group) in enumerate(grouped):
ax = _axes[i]
if numeric_only and isinstance(group, DataFrame):
group = group._get_numeric_data()
plotf(group, ax, **kwargs)
ax.set_title(com.pprint_thing(key))
return fig, axes
def _grouped_plot_by_column(plotf, data, columns=None, by=None,
numeric_only=True, grid=False,
figsize=None, ax=None, layout=None, return_type=None,
**kwargs):
grouped = data.groupby(by)
if columns is None:
if not isinstance(by, (list, tuple)):
by = [by]
columns = data._get_numeric_data().columns.difference(by)
naxes = len(columns)
fig, axes = _subplots(naxes=naxes, sharex=True, sharey=True,
figsize=figsize, ax=ax, layout=layout)
_axes = _flatten(axes)
result = compat.OrderedDict()
for i, col in enumerate(columns):
ax = _axes[i]
gp_col = grouped[col]
keys, values = zip(*gp_col)
re_plotf = plotf(keys, values, ax, **kwargs)
ax.set_title(col)
ax.set_xlabel(com.pprint_thing(by))
result[col] = re_plotf
ax.grid(grid)
# Return axes in multiplot case, maybe revisit later # 985
if return_type is None:
result = axes
byline = by[0] if len(by) == 1 else by
fig.suptitle('Boxplot grouped by %s' % byline)
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1, right=0.9, wspace=0.2)
return result
def table(ax, data, rowLabels=None, colLabels=None,
**kwargs):
"""
Helper function to convert DataFrame and Series to matplotlib.table
Parameters
----------
`ax`: Matplotlib axes object
`data`: DataFrame or Series
data for table contents
`kwargs`: keywords, optional
keyword arguments which passed to matplotlib.table.table.
If `rowLabels` or `colLabels` is not specified, data index or column name will be used.
Returns
-------
matplotlib table object
"""
from pandas import DataFrame
if isinstance(data, Series):
data = DataFrame(data, columns=[data.name])
elif isinstance(data, DataFrame):
pass
else:
raise ValueError('Input data must be DataFrame or Series')
if rowLabels is None:
rowLabels = data.index
if colLabels is None:
colLabels = data.columns
cellText = data.values
import matplotlib.table
table = matplotlib.table.table(ax, cellText=cellText,
rowLabels=rowLabels, colLabels=colLabels, **kwargs)
return table
def _get_layout(nplots, layout=None, layout_type='box'):
if layout is not None:
if not isinstance(layout, (tuple, list)) or len(layout) != 2:
raise ValueError('Layout must be a tuple of (rows, columns)')
nrows, ncols = layout
# Python 2 compat
ceil_ = lambda x: int(ceil(x))
if nrows == -1 and ncols >0:
layout = nrows, ncols = (ceil_(float(nplots) / ncols), ncols)
elif ncols == -1 and nrows > 0:
layout = nrows, ncols = (nrows, ceil_(float(nplots) / nrows))
elif ncols <= 0 and nrows <= 0:
msg = "At least one dimension of layout must be positive"
raise ValueError(msg)
if nrows * ncols < nplots:
raise ValueError('Layout of %sx%s must be larger than required size %s' %
(nrows, ncols, nplots))
return layout
if layout_type == 'single':
return (1, 1)
elif layout_type == 'horizontal':
return (1, nplots)
elif layout_type == 'vertical':
return (nplots, 1)
layouts = {1: (1, 1), 2: (1, 2), 3: (2, 2), 4: (2, 2)}
try:
return layouts[nplots]
except KeyError:
k = 1
while k ** 2 < nplots:
k += 1
if (k - 1) * k >= nplots:
return k, (k - 1)
else:
return k, k
# copied from matplotlib/pyplot.py and modified for pandas.plotting
def _subplots(naxes=None, sharex=False, sharey=False, squeeze=True,
subplot_kw=None, ax=None, layout=None, layout_type='box', **fig_kw):
"""Create a figure with a set of subplots already made.
This utility wrapper makes it convenient to create common layouts of
subplots, including the enclosing figure object, in a single call.
Keyword arguments:
naxes : int
Number of required axes. Exceeded axes are set invisible. Default is
nrows * ncols.
sharex : bool
If True, the X axis will be shared amongst all subplots.
sharey : bool
If True, the Y axis will be shared amongst all subplots.
squeeze : bool
If True, extra dimensions are squeezed out from the returned axis object:
- if only one subplot is constructed (nrows=ncols=1), the resulting
single Axis object is returned as a scalar.
- for Nx1 or 1xN subplots, the returned object is a 1-d numpy object
array of Axis objects are returned as numpy 1-d arrays.
- for NxM subplots with N>1 and M>1 are returned as a 2d array.
If False, no squeezing at all is done: the returned axis object is always
a 2-d array containing Axis instances, even if it ends up being 1x1.
subplot_kw : dict
Dict with keywords passed to the add_subplot() call used to create each
subplots.
ax : Matplotlib axis object, optional
layout : tuple
Number of rows and columns of the subplot grid.
If not specified, calculated from naxes and layout_type
layout_type : {'box', 'horziontal', 'vertical'}, default 'box'
Specify how to layout the subplot grid.
fig_kw : Other keyword arguments to be passed to the figure() call.
Note that all keywords not recognized above will be
automatically included here.
Returns:
fig, ax : tuple
- fig is the Matplotlib Figure object
- ax can be either a single axis object or an array of axis objects if
more than one subplot was created. The dimensions of the resulting array
can be controlled with the squeeze keyword, see above.
**Examples:**
x = np.linspace(0, 2*np.pi, 400)
y = np.sin(x**2)
# Just a figure and one subplot
f, ax = plt.subplots()
ax.plot(x, y)
ax.set_title('Simple plot')
# Two subplots, unpack the output array immediately
f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
ax1.plot(x, y)
ax1.set_title('Sharing Y axis')
ax2.scatter(x, y)
# Four polar axes
plt.subplots(2, 2, subplot_kw=dict(polar=True))
"""
import matplotlib.pyplot as plt
if subplot_kw is None:
subplot_kw = {}
if ax is None:
fig = plt.figure(**fig_kw)
else:
if com.is_list_like(ax):
ax = _flatten(ax)
if layout is not None:
warnings.warn("When passing multiple axes, layout keyword is ignored", UserWarning)
if sharex or sharey:
warnings.warn("When passing multiple axes, sharex and sharey are ignored."
"These settings must be specified when creating axes", UserWarning)
if len(ax) == naxes:
fig = ax[0].get_figure()
return fig, ax
else:
raise ValueError("The number of passed axes must be {0}, the same as "
"the output plot".format(naxes))
fig = ax.get_figure()
# if ax is passed and a number of subplots is 1, return ax as it is
if naxes == 1:
if squeeze:
return fig, ax
else:
return fig, _flatten(ax)
else:
warnings.warn("To output multiple subplots, the figure containing the passed axes "
"is being cleared", UserWarning)
fig.clear()
nrows, ncols = _get_layout(naxes, layout=layout, layout_type=layout_type)
nplots = nrows * ncols
# Create empty object array to hold all axes. It's easiest to make it 1-d
# so we can just append subplots upon creation, and then
axarr = np.empty(nplots, dtype=object)
# Create first subplot separately, so we can share it if requested
ax0 = fig.add_subplot(nrows, ncols, 1, **subplot_kw)
if sharex:
subplot_kw['sharex'] = ax0
if sharey:
subplot_kw['sharey'] = ax0
axarr[0] = ax0
# Note off-by-one counting because add_subplot uses the MATLAB 1-based
# convention.
for i in range(1, nplots):
kwds = subplot_kw.copy()
# Set sharex and sharey to None for blank/dummy axes, these can
# interfere with proper axis limits on the visible axes if
# they share axes e.g. issue #7528
if i >= naxes:
kwds['sharex'] = None
kwds['sharey'] = None
ax = fig.add_subplot(nrows, ncols, i + 1, **kwds)
axarr[i] = ax
if naxes != nplots:
for ax in axarr[naxes:]:
ax.set_visible(False)
_handle_shared_axes(axarr, nplots, naxes, nrows, ncols, sharex, sharey)
if squeeze:
# Reshape the array to have the final desired dimension (nrow,ncol),
# though discarding unneeded dimensions that equal 1. If we only have
# one subplot, just return it instead of a 1-element array.
if nplots == 1:
axes = axarr[0]
else:
axes = axarr.reshape(nrows, ncols).squeeze()
else:
# returned axis array will be always 2-d, even if nrows=ncols=1
axes = axarr.reshape(nrows, ncols)
return fig, axes
def _remove_xlabels_from_axis(ax):
for label in ax.get_xticklabels():
label.set_visible(False)
try:
# set_visible will not be effective if
# minor axis has NullLocator and NullFormattor (default)
import matplotlib.ticker as ticker
if isinstance(ax.xaxis.get_minor_locator(), ticker.NullLocator):
ax.xaxis.set_minor_locator(ticker.AutoLocator())
if isinstance(ax.xaxis.get_minor_formatter(), ticker.NullFormatter):
ax.xaxis.set_minor_formatter(ticker.FormatStrFormatter(''))
for label in ax.get_xticklabels(minor=True):
label.set_visible(False)
except Exception: # pragma no cover
pass
ax.xaxis.get_label().set_visible(False)
def _remove_ylables_from_axis(ax):
for label in ax.get_yticklabels():
label.set_visible(False)
try:
import matplotlib.ticker as ticker
if isinstance(ax.yaxis.get_minor_locator(), ticker.NullLocator):
ax.yaxis.set_minor_locator(ticker.AutoLocator())
if isinstance(ax.yaxis.get_minor_formatter(), ticker.NullFormatter):
ax.yaxis.set_minor_formatter(ticker.FormatStrFormatter(''))
for label in ax.get_yticklabels(minor=True):
label.set_visible(False)
except Exception: # pragma no cover
pass
ax.yaxis.get_label().set_visible(False)
def _handle_shared_axes(axarr, nplots, naxes, nrows, ncols, sharex, sharey):
if nplots > 1:
# first find out the ax layout, so that we can correctly handle 'gaps"
layout = np.zeros((nrows+1,ncols+1), dtype=np.bool)
for ax in axarr:
layout[ax.rowNum, ax.colNum] = ax.get_visible()
if sharex and nrows > 1:
for ax in axarr:
# only the last row of subplots should get x labels -> all other off
# layout handles the case that the subplot is the last in the column,
# because below is no subplot/gap.
if not layout[ax.rowNum+1, ax.colNum]:
continue
_remove_xlabels_from_axis(ax)
if sharey and ncols > 1:
for ax in axarr:
# only the first column should get y labels -> set all other to off
# as we only have labels in teh first column and we always have a subplot there,
# we can skip the layout test
if ax.is_first_col():
continue
_remove_ylables_from_axis(ax)
def _flatten(axes):
if not com.is_list_like(axes):
return np.array([axes])
elif isinstance(axes, (np.ndarray, Index)):
return axes.ravel()
return np.array(axes)
def _get_all_lines(ax):
lines = ax.get_lines()
if hasattr(ax, 'right_ax'):
lines += ax.right_ax.get_lines()
if hasattr(ax, 'left_ax'):
lines += ax.left_ax.get_lines()
return lines
def _get_xlim(lines):
left, right = np.inf, -np.inf
for l in lines:
x = l.get_xdata(orig=False)
left = min(x[0], left)
right = max(x[-1], right)
return left, right
def _set_ticks_props(axes, xlabelsize=None, xrot=None,
ylabelsize=None, yrot=None):
import matplotlib.pyplot as plt
for ax in _flatten(axes):
if xlabelsize is not None:
plt.setp(ax.get_xticklabels(), fontsize=xlabelsize)
if xrot is not None:
plt.setp(ax.get_xticklabels(), rotation=xrot)
if ylabelsize is not None:
plt.setp(ax.get_yticklabels(), fontsize=ylabelsize)
if yrot is not None:
plt.setp(ax.get_yticklabels(), rotation=yrot)
return axes
if __name__ == '__main__':
# import pandas.rpy.common as com
# sales = com.load_data('sanfrancisco.home.sales', package='nutshell')
# top10 = sales['zip'].value_counts()[:10].index
# sales2 = sales[sales.zip.isin(top10)]
# _ = scatter_plot(sales2, 'squarefeet', 'price', by='zip')
# plt.show()
import matplotlib.pyplot as plt
import pandas.tools.plotting as plots
import pandas.core.frame as fr
reload(plots)
reload(fr)
from pandas.core.frame import DataFrame
data = DataFrame([[3, 6, -5], [4, 8, 2], [4, 9, -6],
[4, 9, -3], [2, 5, -1]],
columns=['A', 'B', 'C'])
data.plot(kind='barh', stacked=True)
plt.show()<|fim▁end|> | values = values[~com.isnull(values)]
|
<|file_name|>test_indexing.py<|end_file_name|><|fim▁begin|>from datetime import (
datetime,
timedelta,
)
import re
import numpy as np
import pytest
from pandas import (
Index,
NaT,
Timedelta,
TimedeltaIndex,
Timestamp,
notna,
timedelta_range,
to_timedelta,
)
import pandas._testing as tm
class TestGetItem:
def test_ellipsis(self):
# GH#21282
idx = timedelta_range("1 day", "31 day", freq="D", name="idx")
result = idx[...]
assert result.equals(idx)
assert result is not idx
def test_getitem_slice_keeps_name(self):
# GH#4226
tdi = timedelta_range("1d", "5d", freq="H", name="timebucket")
assert tdi[1:].name == tdi.name
def test_getitem(self):
idx1 = timedelta_range("1 day", "31 day", freq="D", name="idx")
for idx in [idx1]:
result = idx[0]
assert result == Timedelta("1 day")
result = idx[0:5]
expected = timedelta_range("1 day", "5 day", freq="D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[0:10:2]
expected = timedelta_range("1 day", "9 day", freq="2D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[-20:-5:3]
expected = timedelta_range("12 day", "24 day", freq="3D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[4::-1]
expected = TimedeltaIndex(
["5 day", "4 day", "3 day", "2 day", "1 day"], freq="-1D", name="idx"
)
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
@pytest.mark.parametrize(
"key",
[
Timestamp("1970-01-01"),
Timestamp("1970-01-02"),
datetime(1970, 1, 1),
Timestamp("1970-01-03").to_datetime64(),
# non-matching NA values
np.datetime64("NaT"),
],
)
def test_timestamp_invalid_key(self, key):
# GH#20464
tdi = timedelta_range(0, periods=10)
with pytest.raises(KeyError, match=re.escape(repr(key))):
tdi.get_loc(key)
class TestGetLoc:
@pytest.mark.filterwarnings("ignore:Passing method:FutureWarning")
def test_get_loc(self):
idx = to_timedelta(["0 days", "1 days", "2 days"])
for method in [None, "pad", "backfill", "nearest"]:
assert idx.get_loc(idx[1], method) == 1
assert idx.get_loc(idx[1].to_pytimedelta(), method) == 1
assert idx.get_loc(str(idx[1]), method) == 1
assert idx.get_loc(idx[1], "pad", tolerance=Timedelta(0)) == 1
assert idx.get_loc(idx[1], "pad", tolerance=np.timedelta64(0, "s")) == 1
assert idx.get_loc(idx[1], "pad", tolerance=timedelta(0)) == 1
with pytest.raises(ValueError, match="unit abbreviation w/o a number"):
idx.get_loc(idx[1], method="nearest", tolerance="foo")
with pytest.raises(ValueError, match="tolerance size must match"):
idx.get_loc(
idx[1],
method="nearest",
tolerance=[
Timedelta(0).to_timedelta64(),
Timedelta(0).to_timedelta64(),
],
)
for method, loc in [("pad", 1), ("backfill", 2), ("nearest", 1)]:
assert idx.get_loc("1 day 1 hour", method) == loc
# GH 16909
assert idx.get_loc(idx[1].to_timedelta64()) == 1
# GH 16896
assert idx.get_loc("0 days") == 0
def test_get_loc_nat(self):
tidx = TimedeltaIndex(["1 days 01:00:00", "NaT", "2 days 01:00:00"])
assert tidx.get_loc(NaT) == 1
assert tidx.get_loc(None) == 1
assert tidx.get_loc(float("nan")) == 1
assert tidx.get_loc(np.nan) == 1
class TestGetIndexer:
def test_get_indexer(self):
idx = to_timedelta(["0 days", "1 days", "2 days"])
tm.assert_numpy_array_equal(
idx.get_indexer(idx), np.array([0, 1, 2], dtype=np.intp)
)
target = to_timedelta(["-1 hour", "12 hours", "1 day 1 hour"])
tm.assert_numpy_array_equal(
idx.get_indexer(target, "pad"), np.array([-1, 0, 1], dtype=np.intp)
)
tm.assert_numpy_array_equal(
idx.get_indexer(target, "backfill"), np.array([0, 1, 2], dtype=np.intp)
)
tm.assert_numpy_array_equal(
idx.get_indexer(target, "nearest"), np.array([0, 1, 1], dtype=np.intp)
)
res = idx.get_indexer(target, "nearest", tolerance=Timedelta("1 hour"))
tm.assert_numpy_array_equal(res, np.array([0, -1, 1], dtype=np.intp))
class TestWhere:
def test_where_doesnt_retain_freq(self):
tdi = timedelta_range("1 day", periods=3, freq="D", name="idx")
cond = [True, True, False]
expected = TimedeltaIndex([tdi[0], tdi[1], tdi[0]], freq=None, name="idx")
result = tdi.where(cond, tdi[::-1])
tm.assert_index_equal(result, expected)
def test_where_invalid_dtypes(self):
tdi = timedelta_range("1 day", periods=3, freq="D", name="idx")
tail = tdi[2:].tolist()
i2 = Index([NaT, NaT] + tail)
mask = notna(i2)
expected = Index([NaT.value, NaT.value] + tail, dtype=object, name="idx")
assert isinstance(expected[0], int)
result = tdi.where(mask, i2.asi8)
tm.assert_index_equal(result, expected)
ts = i2 + Timestamp.now()
expected = Index([ts[0], ts[1]] + tail, dtype=object, name="idx")
result = tdi.where(mask, ts)
tm.assert_index_equal(result, expected)
per = (i2 + Timestamp.now()).to_period("D")
expected = Index([per[0], per[1]] + tail, dtype=object, name="idx")
result = tdi.where(mask, per)
tm.assert_index_equal(result, expected)
ts = Timestamp.now()
expected = Index([ts, ts] + tail, dtype=object, name="idx")
result = tdi.where(mask, ts)
tm.assert_index_equal(result, expected)
def test_where_mismatched_nat(self):
tdi = timedelta_range("1 day", periods=3, freq="D", name="idx")
cond = np.array([True, False, False])
dtnat = np.datetime64("NaT", "ns")
expected = Index([tdi[0], dtnat, dtnat], dtype=object, name="idx")
assert expected[2] is dtnat
result = tdi.where(cond, dtnat)
tm.assert_index_equal(result, expected)
class TestTake:
def test_take(self):
# GH 10295
idx1 = timedelta_range("1 day", "31 day", freq="D", name="idx")
for idx in [idx1]:
result = idx.take([0])
assert result == Timedelta("1 day")
result = idx.take([-1])
assert result == Timedelta("31 day")
result = idx.take([0, 1, 2])
expected = timedelta_range("1 day", "3 day", freq="D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([0, 2, 4])
expected = timedelta_range("1 day", "5 day", freq="2D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([7, 4, 1])
expected = timedelta_range("8 day", "2 day", freq="-3D", name="idx")
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([3, 2, 5])
expected = TimedeltaIndex(["4 day", "3 day", "6 day"], name="idx")
tm.assert_index_equal(result, expected)
assert result.freq is None
result = idx.take([-3, 2, 5])
expected = TimedeltaIndex(["29 day", "3 day", "6 day"], name="idx")
tm.assert_index_equal(result, expected)
assert result.freq is None
def test_take_invalid_kwargs(self):
idx = timedelta_range("1 day", "31 day", freq="D", name="idx")
indices = [1, 6, 5, 9, 10, 13, 15, 3]
msg = r"take\(\) got an unexpected keyword argument 'foo'"
with pytest.raises(TypeError, match=msg):
idx.take(indices, foo=2)
msg = "the 'out' parameter is not supported"
with pytest.raises(ValueError, match=msg):
idx.take(indices, out=indices)
msg = "the 'mode' parameter is not supported"
with pytest.raises(ValueError, match=msg):
idx.take(indices, mode="clip")
def test_take_equiv_getitem(self):
tds = ["1day 02:00:00", "1 day 04:00:00", "1 day 10:00:00"]
idx = timedelta_range(start="1d", end="2d", freq="H", name="idx")
expected = TimedeltaIndex(tds, freq=None, name="idx")
taken1 = idx.take([2, 4, 10])
taken2 = idx[[2, 4, 10]]
for taken in [taken1, taken2]:
tm.assert_index_equal(taken, expected)
assert isinstance(taken, TimedeltaIndex)<|fim▁hole|> assert taken.freq is None
assert taken.name == expected.name
def test_take_fill_value(self):
# GH 12631
idx = TimedeltaIndex(["1 days", "2 days", "3 days"], name="xxx")
result = idx.take(np.array([1, 0, -1]))
expected = TimedeltaIndex(["2 days", "1 days", "3 days"], name="xxx")
tm.assert_index_equal(result, expected)
# fill_value
result = idx.take(np.array([1, 0, -1]), fill_value=True)
expected = TimedeltaIndex(["2 days", "1 days", "NaT"], name="xxx")
tm.assert_index_equal(result, expected)
# allow_fill=False
result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
expected = TimedeltaIndex(["2 days", "1 days", "3 days"], name="xxx")
tm.assert_index_equal(result, expected)
msg = (
"When allow_fill=True and fill_value is not None, "
"all indices must be >= -1"
)
with pytest.raises(ValueError, match=msg):
idx.take(np.array([1, 0, -2]), fill_value=True)
with pytest.raises(ValueError, match=msg):
idx.take(np.array([1, 0, -5]), fill_value=True)
msg = "index -5 is out of bounds for (axis 0 with )?size 3"
with pytest.raises(IndexError, match=msg):
idx.take(np.array([1, -5]))
class TestMaybeCastSliceBound:
@pytest.fixture(params=["increasing", "decreasing", None])
def monotonic(self, request):
return request.param
@pytest.fixture
def tdi(self, monotonic):
tdi = timedelta_range("1 Day", periods=10)
if monotonic == "decreasing":
tdi = tdi[::-1]
elif monotonic is None:
taker = np.arange(10, dtype=np.intp)
np.random.shuffle(taker)
tdi = tdi.take(taker)
return tdi
def test_maybe_cast_slice_bound_invalid_str(self, tdi):
# test the low-level _maybe_cast_slice_bound and that we get the
# expected exception+message all the way up the stack
msg = (
"cannot do slice indexing on TimedeltaIndex with these "
r"indexers \[foo\] of type str"
)
with pytest.raises(TypeError, match=msg):
tdi._maybe_cast_slice_bound("foo", side="left")
with pytest.raises(TypeError, match=msg):
tdi.get_slice_bound("foo", side="left")
with pytest.raises(TypeError, match=msg):
tdi.slice_locs("foo", None, None)
def test_slice_invalid_str_with_timedeltaindex(
self, tdi, frame_or_series, indexer_sl
):
obj = frame_or_series(range(10), index=tdi)
msg = (
"cannot do slice indexing on TimedeltaIndex with these "
r"indexers \[foo\] of type str"
)
with pytest.raises(TypeError, match=msg):
indexer_sl(obj)["foo":]
with pytest.raises(TypeError, match=msg):
indexer_sl(obj)["foo":-1]
with pytest.raises(TypeError, match=msg):
indexer_sl(obj)[:"foo"]
with pytest.raises(TypeError, match=msg):
indexer_sl(obj)[tdi[0] : "foo"]<|fim▁end|> | |
<|file_name|>resolution_utils.ts<|end_file_name|><|fim▁begin|>import { TILE_SIZE } from '../consts';
export default class ResolutionUtils {
public scale : number;<|fim▁hole|> public canvas : {
width: number,
height: number
};
constructor() {
this.canvas = { width: 0, height: 0 };
this.recalculate();
}
/**
* Recalculate game size and etc
*/
public recalculate() : void {
const MAX_SIZE_USING_TILE_SIZE : number = this.tileCountForSmallSide * TILE_SIZE;
let smallestSideScale : number = this.smallestSideSize / MAX_SIZE_USING_TILE_SIZE;
this.scale = smallestSideScale;
this.canvas.width = Math.round(this.realLargestSideSize / smallestSideScale);
this.canvas.height = Math.round(this.smallestSideSize / this.scale);
}
public get tileCountForSmallSide() {
let currentTileCount : number = Math.round(this.smallestSideSize / TILE_SIZE);
switch(Math.round(window.devicePixelRatio)) {
case 1:
return 10;
default:
return Math.max(currentTileCount, 8);
}
}
public get maxCanvasSize() : number {
return Math.max(this.canvas.width, this.canvas.height);
}
private get smallestSideSize() : number {
return Math.min(window.innerWidth, window.innerHeight);
}
private get realLargestSideSize() : number {
return Math.max(window.innerWidth, window.innerHeight);
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.<|fim▁hole|>##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
name = "R"
identifier = "org.vistrails.vistrails.rpy"
version = "0.1.2"
old_identifiers = ["edu.utah.sci.vistrails.rpy"]<|fim▁end|> | ## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected] |
<|file_name|>confirm.component.ts<|end_file_name|><|fim▁begin|>import { Component, Inject, OnInit } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
interface IConfirmData {
title: string;
content: string;
extraData: {
npcSprite?: number;
okText?: string;
cancelText?: string;
};
}
@Component({<|fim▁hole|> styleUrls: ['./confirm.component.scss']
})
export class ConfirmModalComponent implements OnInit {
constructor(
public dialogRef: MatDialogRef<ConfirmModalComponent>,
@Inject(MAT_DIALOG_DATA) public data: IConfirmData
) { }
ngOnInit() {
}
}<|fim▁end|> | selector: 'app-confirm',
templateUrl: './confirm.component.html', |
<|file_name|>uploader.js<|end_file_name|><|fim▁begin|>/*!
* Uploader - Uploader library implements html5 file upload and provides multiple simultaneous, stable, fault tolerant and resumable uploads
* @version v0.5.6
* @author dolymood <[email protected]>
* @link https://github.com/simple-uploader/Uploader
* @license MIT
*/
!function(e){if("object"==typeof exports)module.exports=e();else if("function"==typeof define&&define.amd)define(e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.Uploader=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
var utils = _dereq_('./utils')
function Chunk (uploader, file, offset) {
utils.defineNonEnumerable(this, 'uploader', uploader)
utils.defineNonEnumerable(this, 'file', file)
utils.defineNonEnumerable(this, 'bytes', null)
this.offset = offset
this.tested = false
this.retries = 0
this.pendingRetry = false
this.preprocessState = 0
this.readState = 0
this.loaded = 0
this.total = 0
this.chunkSize = this.uploader.opts.chunkSize
this.startByte = this.offset * this.chunkSize
this.endByte = this.computeEndByte()
this.xhr = null
}
var STATUS = Chunk.STATUS = {
PENDING: 'pending',
UPLOADING: 'uploading',
READING: 'reading',
SUCCESS: 'success',
ERROR: 'error',
COMPLETE: 'complete',
PROGRESS: 'progress',
RETRY: 'retry'
}
utils.extend(Chunk.prototype, {
_event: function (evt, args) {
args = utils.toArray(arguments)
args.unshift(this)
this.file._chunkEvent.apply(this.file, args)
},
computeEndByte: function () {
var endByte = Math.min(this.file.size, (this.offset + 1) * this.chunkSize)
if (this.file.size - endByte < this.chunkSize && !this.uploader.opts.forceChunkSize) {
// The last chunk will be bigger than the chunk size,
// but less than 2 * this.chunkSize
endByte = this.file.size
}
return endByte
},
getParams: function () {
return {
chunkNumber: this.offset + 1,
chunkSize: this.uploader.opts.chunkSize,
currentChunkSize: this.endByte - this.startByte,
totalSize: this.file.size,
identifier: this.file.uniqueIdentifier,
filename: this.file.name,
relativePath: this.file.relativePath,
totalChunks: this.file.chunks.length
}
},
getTarget: function (target, params) {
if (!params.length) {
return target
}
if (target.indexOf('?') < 0) {
target += '?'
} else {
target += '&'
}
return target + params.join('&')
},
test: function () {
this.xhr = new XMLHttpRequest()
this.xhr.addEventListener('load', testHandler, false)
this.xhr.addEventListener('error', testHandler, false)
var testMethod = utils.evalOpts(this.uploader.opts.testMethod, this.file, this)
var data = this.prepareXhrRequest(testMethod, true)
this.xhr.send(data)
var $ = this
function testHandler (event) {
var status = $.status(true)
if (status === STATUS.ERROR) {
$._event(status, $.message())
$.uploader.uploadNextChunk()
} else if (status === STATUS.SUCCESS) {
$._event(status, $.message())
$.tested = true
} else if (!$.file.paused) {
// Error might be caused by file pause method
// Chunks does not exist on the server side
$.tested = true
$.send()
}
}
},
preprocessFinished: function () {
// Compute the endByte after the preprocess function to allow an
// implementer of preprocess to set the fileObj size
this.endByte = this.computeEndByte()
this.preprocessState = 2
this.send()
},
readFinished: function (bytes) {
this.readState = 2
this.bytes = bytes
this.send()
},
send: function () {
var preprocess = this.uploader.opts.preprocess
var read = this.uploader.opts.readFileFn
if (utils.isFunction(preprocess)) {
switch (this.preprocessState) {
case 0:
this.preprocessState = 1
preprocess(this)
return
case 1:
return
}
}
switch (this.readState) {
case 0:
this.readState = 1
read(this.file, this.file.fileType, this.startByte, this.endByte, this)
return
case 1:
return
}
if (this.uploader.opts.testChunks && !this.tested) {
this.test()
return
}
this.loaded = 0
this.total = 0
this.pendingRetry = false
// Set up request and listen for event
this.xhr = new XMLHttpRequest()
this.xhr.upload.addEventListener('progress', progressHandler, false)
this.xhr.addEventListener('load', doneHandler, false)
this.xhr.addEventListener('error', doneHandler, false)
var uploadMethod = utils.evalOpts(this.uploader.opts.uploadMethod, this.file, this)
var data = this.prepareXhrRequest(uploadMethod, false, this.uploader.opts.method, this.bytes)
this.xhr.send(data)
var $ = this
function progressHandler (event) {
if (event.lengthComputable) {
$.loaded = event.loaded
$.total = event.total
}
$._event(STATUS.PROGRESS, event)
}
function doneHandler (event) {
var msg = $.message()
$.processingResponse = true
$.uploader.opts.processResponse(msg, function (err, res) {
$.processingResponse = false
if (!$.xhr) {
return
}
$.processedState = {
err: err,
res: res
}
var status = $.status()
if (status === STATUS.SUCCESS || status === STATUS.ERROR) {
// delete this.data
$._event(status, res)
status === STATUS.ERROR && $.uploader.uploadNextChunk()
} else {
$._event(STATUS.RETRY, res)
$.pendingRetry = true
$.abort()
$.retries++
var retryInterval = $.uploader.opts.chunkRetryInterval
if (retryInterval !== null) {
setTimeout(function () {
$.send()
}, retryInterval)
} else {
$.send()
}
}
}, $.file, $)
}
},
abort: function () {
var xhr = this.xhr
this.xhr = null
this.processingResponse = false
this.processedState = null
if (xhr) {
xhr.abort()
}
},
status: function (isTest) {
if (this.readState === 1) {
return STATUS.READING
} else if (this.pendingRetry || this.preprocessState === 1) {
// if pending retry then that's effectively the same as actively uploading,
// there might just be a slight delay before the retry starts
return STATUS.UPLOADING
} else if (!this.xhr) {
return STATUS.PENDING
} else if (this.xhr.readyState < 4 || this.processingResponse) {
// Status is really 'OPENED', 'HEADERS_RECEIVED'
// or 'LOADING' - meaning that stuff is happening
return STATUS.UPLOADING
} else {
var _status
if (this.uploader.opts.successStatuses.indexOf(this.xhr.status) > -1) {
// HTTP 200, perfect
// HTTP 202 Accepted - The request has been accepted for processing, but the processing has not been completed.
_status = STATUS.SUCCESS
} else if (this.uploader.opts.permanentErrors.indexOf(this.xhr.status) > -1 ||
!isTest && this.retries >= this.uploader.opts.maxChunkRetries) {
// HTTP 415/500/501, permanent error
_status = STATUS.ERROR
} else {
// this should never happen, but we'll reset and queue a retry
// a likely case for this would be 503 service unavailable
this.abort()
_status = STATUS.PENDING
}
var processedState = this.processedState
if (processedState && processedState.err) {
_status = STATUS.ERROR
}
return _status
}
},
message: function () {
return this.xhr ? this.xhr.responseText : ''
},
progress: function () {
if (this.pendingRetry) {
return 0
}
var s = this.status()
if (s === STATUS.SUCCESS || s === STATUS.ERROR) {
return 1
} else if (s === STATUS.PENDING) {
return 0
} else {
return this.total > 0 ? this.loaded / this.total : 0
}
},
sizeUploaded: function () {
var size = this.endByte - this.startByte
// can't return only chunk.loaded value, because it is bigger than chunk size
if (this.status() !== STATUS.SUCCESS) {
size = this.progress() * size
}
return size
},
prepareXhrRequest: function (method, isTest, paramsMethod, blob) {
// Add data from the query options
var query = utils.evalOpts(this.uploader.opts.query, this.file, this, isTest)
query = utils.extend(this.getParams(), query)
// processParams
query = this.uploader.opts.processParams(query, this.file, this, isTest)
var target = utils.evalOpts(this.uploader.opts.target, this.file, this, isTest)
var data = null
if (method === 'GET' || paramsMethod === 'octet') {
// Add data from the query options
var params = []
utils.each(query, function (v, k) {
params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='))
})
target = this.getTarget(target, params)
data = blob || null
} else {
// Add data from the query options
data = new FormData()
utils.each(query, function (v, k) {
data.append(k, v)
})
if (typeof blob !== 'undefined') {
data.append(this.uploader.opts.fileParameterName, blob, this.file.name)
}
}
this.xhr.open(method, target, true)
this.xhr.withCredentials = this.uploader.opts.withCredentials
// Add data from header options
utils.each(utils.evalOpts(this.uploader.opts.headers, this.file, this, isTest), function (v, k) {
this.xhr.setRequestHeader(k, v)
}, this)
return data
}
})
module.exports = Chunk
},{"./utils":5}],2:[function(_dereq_,module,exports){
var each = _dereq_('./utils').each
var event = {
_eventData: null,
on: function (name, func) {
if (!this._eventData) this._eventData = {}
if (!this._eventData[name]) this._eventData[name] = []
var listened = false
each(this._eventData[name], function (fuc) {
if (fuc === func) {
listened = true
return false
}
})
if (!listened) {
this._eventData[name].push(func)
}
},
off: function (name, func) {
if (!this._eventData) this._eventData = {}
if (!this._eventData[name] || !this._eventData[name].length) return
if (func) {
each(this._eventData[name], function (fuc, i) {
if (fuc === func) {
this._eventData[name].splice(i, 1)
return false
}
}, this)
} else {
this._eventData[name] = []
}
},
trigger: function (name) {
if (!this._eventData) this._eventData = {}
if (!this._eventData[name]) return true
var args = this._eventData[name].slice.call(arguments, 1)
var preventDefault = false
each(this._eventData[name], function (fuc) {
preventDefault = fuc.apply(this, args) === false || preventDefault
}, this)
return !preventDefault
}
}
module.exports = event
},{"./utils":5}],3:[function(_dereq_,module,exports){
var utils = _dereq_('./utils')
var event = _dereq_('./event')
var File = _dereq_('./file')
var Chunk = _dereq_('./chunk')
var version = '0.5.6'
var isServer = typeof window === 'undefined'
// ie10+
var ie10plus = isServer ? false : window.navigator.msPointerEnabled
var support = (function () {
if (isServer) {
return false
}
var sliceName = 'slice'
var _support = utils.isDefined(window.File) && utils.isDefined(window.Blob) &&
utils.isDefined(window.FileList)
var bproto = null
if (_support) {
bproto = window.Blob.prototype
utils.each(['slice', 'webkitSlice', 'mozSlice'], function (n) {
if (bproto[n]) {
sliceName = n
return false
}
})
_support = !!bproto[sliceName]
}
if (_support) Uploader.sliceName = sliceName
bproto = null
return _support
})()
var supportDirectory = (function () {
if (isServer) {
return false
}
var input = window.document.createElement('input')
input.type = 'file'
var sd = 'webkitdirectory' in input || 'directory' in input
input = null
return sd
})()
function Uploader (opts) {
this.support = support
/* istanbul ignore if */
if (!this.support) {
return
}
this.supportDirectory = supportDirectory
utils.defineNonEnumerable(this, 'filePaths', {})
this.opts = utils.extend({}, Uploader.defaults, opts || {})
this.preventEvent = utils.bind(this._preventEvent, this)
File.call(this, this)
}
/**
* Default read function using the webAPI
*
* @function webAPIFileRead(fileObj, fileType, startByte, endByte, chunk)
*
*/
var webAPIFileRead = function (fileObj, fileType, startByte, endByte, chunk) {
chunk.readFinished(fileObj.file[Uploader.sliceName](startByte, endByte, fileType))
}
Uploader.version = version
Uploader.defaults = {
chunkSize: 1024 * 1024,
forceChunkSize: false,
simultaneousUploads: 3,
singleFile: false,
fileParameterName: 'file',
progressCallbacksInterval: 500,
speedSmoothingFactor: 0.1,
query: {},
headers: {},
withCredentials: false,
preprocess: null,
method: 'multipart',
testMethod: 'GET',
uploadMethod: 'POST',
prioritizeFirstAndLastChunk: false,
allowDuplicateUploads: false,
target: '/',
testChunks: true,
generateUniqueIdentifier: null,
maxChunkRetries: 0,
chunkRetryInterval: null,
permanentErrors: [404, 415, 500, 501],
successStatuses: [200, 201, 202],
onDropStopPropagation: false,
initFileFn: null,
readFileFn: webAPIFileRead,
checkChunkUploadedByResponse: null,
initialPaused: false,
processResponse: function (response, cb) {
cb(null, response)
},
processParams: function (params) {
return params
}
}
Uploader.utils = utils
Uploader.event = event
Uploader.File = File
Uploader.Chunk = Chunk
// inherit file
Uploader.prototype = utils.extend({}, File.prototype)
// inherit event
utils.extend(Uploader.prototype, event)
utils.extend(Uploader.prototype, {
constructor: Uploader,
_trigger: function (name) {
var args = utils.toArray(arguments)
var preventDefault = !this.trigger.apply(this, arguments)
if (name !== 'catchAll') {
args.unshift('catchAll')
preventDefault = !this.trigger.apply(this, args) || preventDefault
}
return !preventDefault
},
_triggerAsync: function () {
var args = arguments
utils.nextTick(function () {
this._trigger.apply(this, args)
}, this)
},
addFiles: function (files, evt) {
var _files = []
var oldFileListLen = this.fileList.length
utils.each(files, function (file) {
// Uploading empty file IE10/IE11 hangs indefinitely
// Directories have size `0` and name `.`
// Ignore already added files if opts.allowDuplicateUploads is set to false
if ((!ie10plus || ie10plus && file.size > 0) && !(file.size % 4096 === 0 && (file.name === '.' || file.fileName === '.'))) {
var uniqueIdentifier = this.generateUniqueIdentifier(file)
if (this.opts.allowDuplicateUploads || !this.getFromUniqueIdentifier(uniqueIdentifier)) {
var _file = new File(this, file, this)
_file.uniqueIdentifier = uniqueIdentifier
if (this._trigger('fileAdded', _file, evt)) {
_files.push(_file)
} else {
File.prototype.removeFile.call(this, _file)
}
}
}
}, this)
// get new fileList
var newFileList = this.fileList.slice(oldFileListLen)
if (this._trigger('filesAdded', _files, newFileList, evt)) {
utils.each(_files, function (file) {
if (this.opts.singleFile && this.files.length > 0) {
this.removeFile(this.files[0])
}
this.files.push(file)
}, this)
this._trigger('filesSubmitted', _files, newFileList, evt)
} else {
utils.each(newFileList, function (file) {
File.prototype.removeFile.call(this, file)
}, this)
}
},
addFile: function (file, evt) {
this.addFiles([file], evt)
},
cancel: function () {
for (var i = this.fileList.length - 1; i >= 0; i--) {
this.fileList[i].cancel()
}
},
removeFile: function (file) {
File.prototype.removeFile.call(this, file)
this._trigger('fileRemoved', file)
},
generateUniqueIdentifier: function (file) {
var custom = this.opts.generateUniqueIdentifier
if (utils.isFunction(custom)) {
return custom(file)
}
/* istanbul ignore next */
// Some confusion in different versions of Firefox
var relativePath = file.relativePath || file.webkitRelativePath || file.fileName || file.name
/* istanbul ignore next */
return file.size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, '')
},
getFromUniqueIdentifier: function (uniqueIdentifier) {
var ret = false
utils.each(this.files, function (file) {
if (file.uniqueIdentifier === uniqueIdentifier) {
ret = file
return false
}
})
return ret
},
uploadNextChunk: function (preventEvents) {
var found = false
var pendingStatus = Chunk.STATUS.PENDING
var checkChunkUploaded = this.uploader.opts.checkChunkUploadedByResponse
if (this.opts.prioritizeFirstAndLastChunk) {
utils.each(this.files, function (file) {
if (file.paused) {
return
}
if (checkChunkUploaded && !file._firstResponse && file.isUploading()) {
// waiting for current file's first chunk response
return
}
if (file.chunks.length && file.chunks[0].status() === pendingStatus) {
file.chunks[0].send()
found = true
return false
}
if (file.chunks.length > 1 && file.chunks[file.chunks.length - 1].status() === pendingStatus) {
file.chunks[file.chunks.length - 1].send()
found = true
return false
}
})
if (found) {
return found
}
}
// Now, simply look for the next, best thing to upload
utils.each(this.files, function (file) {
if (!file.paused) {
if (checkChunkUploaded && !file._firstResponse && file.isUploading()) {
// waiting for current file's first chunk response
return
}
utils.each(file.chunks, function (chunk) {
if (chunk.status() === pendingStatus) {
chunk.send()
found = true
return false
}
})
}
if (found) {
return false
}
})
if (found) {
return true
}
// The are no more outstanding chunks to upload, check is everything is done
var outstanding = false
utils.each(this.files, function (file) {
if (!file.isComplete()) {
outstanding = true
return false
}
})
// should check files now
// if now files in list
// should not trigger complete event
if (!outstanding && !preventEvents && this.files.length) {
// All chunks have been uploaded, complete
this._triggerAsync('complete')
}
return outstanding
},
upload: function (preventEvents) {
// Make sure we don't start too many uploads at once
var ret = this._shouldUploadNext()
if (ret === false) {
return
}
!preventEvents && this._trigger('uploadStart')
var started = false
for (var num = 1; num <= this.opts.simultaneousUploads - ret; num++) {
started = this.uploadNextChunk(!preventEvents) || started
if (!started && preventEvents) {
// completed
break
}
}
if (!started && !preventEvents) {
this._triggerAsync('complete')
}
},
/**
* should upload next chunk
* @function
* @returns {Boolean|Number}
*/
_shouldUploadNext: function () {
var num = 0
var should = true
var simultaneousUploads = this.opts.simultaneousUploads
var uploadingStatus = Chunk.STATUS.UPLOADING
utils.each(this.files, function (file) {
utils.each(file.chunks, function (chunk) {
if (chunk.status() === uploadingStatus) {
num++
if (num >= simultaneousUploads) {
should = false
return false
}
}
})
return should
})
// if should is true then return uploading chunks's length
return should && num
},
/**
* Assign a browse action to one or more DOM nodes.
* @function
* @param {Element|Array.<Element>} domNodes
* @param {boolean} isDirectory Pass in true to allow directories to
* @param {boolean} singleFile prevent multi file upload
* @param {Object} attributes set custom attributes:
* http://www.w3.org/TR/html-markup/input.file.html#input.file-attributes
* eg: accept: 'image/*'
* be selected (Chrome only).
*/
assignBrowse: function (domNodes, isDirectory, singleFile, attributes) {
if (typeof domNodes.length === 'undefined') {
domNodes = [domNodes]
}
utils.each(domNodes, function (domNode) {
var input
if (domNode.tagName === 'INPUT' && domNode.type === 'file') {
input = domNode
} else {
input = document.createElement('input')
input.setAttribute('type', 'file')
// display:none - not working in opera 12
utils.extend(input.style, {
visibility: 'hidden',
position: 'absolute',
width: '1px',
height: '1px'
})
// for opera 12 browser, input must be assigned to a document
domNode.appendChild(input)
// https://developer.mozilla.org/en/using_files_from_web_applications)
// event listener is executed two times
// first one - original mouse click event
// second - input.click(), input is inside domNode
domNode.addEventListener('click', function (e) {
if (domNode.tagName.toLowerCase() === 'label') {
return
}
input.click()
}, false)
}
if (!this.opts.singleFile && !singleFile) {
input.setAttribute('multiple', 'multiple')
}
if (isDirectory) {
input.setAttribute('webkitdirectory', 'webkitdirectory')
}
attributes && utils.each(attributes, function (value, key) {
input.setAttribute(key, value)
})
// When new files are added, simply append them to the overall list
var that = this
input.addEventListener('change', function (e) {
that._trigger(e.type, e)
if (e.target.value) {
that.addFiles(e.target.files, e)
e.target.value = ''
}
}, false)
}, this)
},
onDrop: function (evt) {
this._trigger(evt.type, evt)
if (this.opts.onDropStopPropagation) {
evt.stopPropagation()
}
evt.preventDefault()
this._parseDataTransfer(evt.dataTransfer, evt)
},
_parseDataTransfer: function (dataTransfer, evt) {
if (dataTransfer.items && dataTransfer.items[0] &&
dataTransfer.items[0].webkitGetAsEntry) {
this.webkitReadDataTransfer(dataTransfer, evt)
} else {
this.addFiles(dataTransfer.files, evt)
}
},
webkitReadDataTransfer: function (dataTransfer, evt) {
var self = this
var queue = dataTransfer.items.length
var files = []
utils.each(dataTransfer.items, function (item) {
var entry = item.webkitGetAsEntry()
if (!entry) {
decrement()
return
}
if (entry.isFile) {
// due to a bug in Chrome's File System API impl - #149735
fileReadSuccess(item.getAsFile(), entry.fullPath)
} else {
readDirectory(entry.createReader())
}
})
function readDirectory (reader) {
reader.readEntries(function (entries) {
if (entries.length) {
queue += entries.length
utils.each(entries, function (entry) {
if (entry.isFile) {
var fullPath = entry.fullPath
entry.file(function (file) {
fileReadSuccess(file, fullPath)
}, readError)
} else if (entry.isDirectory) {
readDirectory(entry.createReader())
}
})
readDirectory(reader)
} else {
decrement()
}
}, readError)
}
function fileReadSuccess (file, fullPath) {
// relative path should not start with "/"
file.relativePath = fullPath.substring(1)
files.push(file)
decrement()
}
function readError (fileError) {
throw fileError
}
function decrement () {
if (--queue === 0) {
self.addFiles(files, evt)
}
}
},
_assignHelper: function (domNodes, handles, remove) {
if (typeof domNodes.length === 'undefined') {
domNodes = [domNodes]
}
var evtMethod = remove ? 'removeEventListener' : 'addEventListener'
utils.each(domNodes, function (domNode) {
utils.each(handles, function (handler, name) {
domNode[evtMethod](name, handler, false)
}, this)
}, this)
},
_preventEvent: function (e) {
utils.preventEvent(e)
this._trigger(e.type, e)
},
/**
* Assign one or more DOM nodes as a drop target.
* @function
* @param {Element|Array.<Element>} domNodes
*/
assignDrop: function (domNodes) {
this._onDrop = utils.bind(this.onDrop, this)
this._assignHelper(domNodes, {
dragover: this.preventEvent,
dragenter: this.preventEvent,
dragleave: this.preventEvent,
drop: this._onDrop
})
},
/**
* Un-assign drop event from DOM nodes
* @function
* @param domNodes
*/
unAssignDrop: function (domNodes) {
this._assignHelper(domNodes, {
dragover: this.preventEvent,
dragenter: this.preventEvent,
dragleave: this.preventEvent,
drop: this._onDrop
}, true)
this._onDrop = null
}
})
module.exports = Uploader
},{"./chunk":1,"./event":2,"./file":4,"./utils":5}],4:[function(_dereq_,module,exports){
var utils = _dereq_('./utils')
var Chunk = _dereq_('./chunk')
function File (uploader, file, parent) {
utils.defineNonEnumerable(this, 'uploader', uploader)
this.isRoot = this.isFolder = uploader === this
utils.defineNonEnumerable(this, 'parent', parent || null)
utils.defineNonEnumerable(this, 'files', [])
utils.defineNonEnumerable(this, 'fileList', [])
utils.defineNonEnumerable(this, 'chunks', [])
utils.defineNonEnumerable(this, '_errorFiles', [])
utils.defineNonEnumerable(this, 'file', null)
this.id = utils.uid()
if (this.isRoot || !file) {
this.file = null
} else {
if (utils.isString(file)) {
// folder
this.isFolder = true
this.file = null
this.path = file
if (this.parent.path) {
file = file.substr(this.parent.path.length)
}
this.name = file.charAt(file.length - 1) === '/' ? file.substr(0, file.length - 1) : file
} else {
this.file = file
this.fileType = this.file.type
this.name = file.fileName || file.name
this.size = file.size
this.relativePath = file.relativePath || file.webkitRelativePath || this.name
this._parseFile()
}
}
this.paused = uploader.opts.initialPaused
this.error = false
this.allError = false
this.aborted = false
this.completed = false
this.averageSpeed = 0
this.currentSpeed = 0
this._lastProgressCallback = Date.now()
this._prevUploadedSize = 0
this._prevProgress = 0
this.bootstrap()
}
utils.extend(File.prototype, {
_parseFile: function () {
var ppaths = parsePaths(this.relativePath)
if (ppaths.length) {
var filePaths = this.uploader.filePaths
utils.each(ppaths, function (path, i) {
var folderFile = filePaths[path]
if (!folderFile) {
folderFile = new File(this.uploader, path, this.parent)
filePaths[path] = folderFile
this._updateParentFileList(folderFile)
}
this.parent = folderFile
folderFile.files.push(this)
if (!ppaths[i + 1]) {
folderFile.fileList.push(this)
}
}, this)
} else {
this._updateParentFileList()
}
},
_updateParentFileList: function (file) {
if (!file) {
file = this
}
var p = this.parent
if (p) {
p.fileList.push(file)
}
},
_eachAccess: function (eachFn, fileFn) {
if (this.isFolder) {
utils.each(this.files, function (f, i) {
return eachFn.call(this, f, i)
}, this)
return
}
fileFn.call(this, this)
},
bootstrap: function () {
if (this.isFolder) return
var opts = this.uploader.opts
if (utils.isFunction(opts.initFileFn)) {
opts.initFileFn.call(this, this)
}
this.abort(true)
this._resetError()
// Rebuild stack of chunks from file
this._prevProgress = 0
var round = opts.forceChunkSize ? Math.ceil : Math.floor
var chunks = Math.max(round(this.size / opts.chunkSize), 1)
for (var offset = 0; offset < chunks; offset++) {
this.chunks.push(new Chunk(this.uploader, this, offset))
}
},
_measureSpeed: function () {
var smoothingFactor = this.uploader.opts.speedSmoothingFactor
var timeSpan = Date.now() - this._lastProgressCallback
if (!timeSpan) {
return
}
var uploaded = this.sizeUploaded()
// Prevent negative upload speed after file upload resume
this.currentSpeed = Math.max((uploaded - this._prevUploadedSize) / timeSpan * 1000, 0)
this.averageSpeed = smoothingFactor * this.currentSpeed + (1 - smoothingFactor) * this.averageSpeed
this._prevUploadedSize = uploaded
if (this.parent && this.parent._checkProgress()) {
this.parent._measureSpeed()
}
},
_checkProgress: function (file) {
return Date.now() - this._lastProgressCallback >= this.uploader.opts.progressCallbacksInterval
},
_chunkEvent: function (chunk, evt, message) {
var uploader = this.uploader
var STATUS = Chunk.STATUS
var that = this
var rootFile = this.getRoot()
var triggerProgress = function () {
that._measureSpeed()
uploader._trigger('fileProgress', rootFile, that, chunk)
that._lastProgressCallback = Date.now()
}
switch (evt) {
case STATUS.PROGRESS:
if (this._checkProgress()) {
triggerProgress()
}
break
case STATUS.ERROR:
this._error()
this.abort(true)
uploader._trigger('fileError', rootFile, this, message, chunk)
break
case STATUS.SUCCESS:
this._updateUploadedChunks(message, chunk)
if (this.error) {
return
}
clearTimeout(this._progeressId)
this._progeressId = 0
var timeDiff = Date.now() - this._lastProgressCallback
if (timeDiff < uploader.opts.progressCallbacksInterval) {
this._progeressId = setTimeout(triggerProgress, uploader.opts.progressCallbacksInterval - timeDiff)
}
if (this.isComplete()) {
clearTimeout(this._progeressId)
triggerProgress()
this.currentSpeed = 0
this.averageSpeed = 0
uploader._trigger('fileSuccess', rootFile, this, message, chunk)
if (rootFile.isComplete()) {
uploader._trigger('fileComplete', rootFile, this)
}
} else if (!this._progeressId) {
triggerProgress()
}
break
case STATUS.RETRY:
uploader._trigger('fileRetry', rootFile, this, chunk)
break
}
},
_updateUploadedChunks: function (message, chunk) {
var checkChunkUploaded = this.uploader.opts.checkChunkUploadedByResponse
if (checkChunkUploaded) {
var xhr = chunk.xhr
utils.each(this.chunks, function (_chunk) {
if (!_chunk.tested) {
var uploaded = checkChunkUploaded.call(this, _chunk, message)
if (_chunk === chunk && !uploaded) {
// fix the first chunk xhr status
// treated as success but checkChunkUploaded is false
// so the current chunk should be uploaded again
_chunk.xhr = null
}
if (uploaded) {
// first success and other chunks are uploaded
// then set xhr, so the uploaded chunks
// will be treated as success too
_chunk.xhr = xhr
}
_chunk.tested = true
}
}, this)
if (!this._firstResponse) {
this._firstResponse = true
this.uploader.upload(true)
} else {
this.uploader.uploadNextChunk()
}
} else {
this.uploader.uploadNextChunk()
}
},
_error: function () {
this.error = this.allError = true
var parent = this.parent
while (parent && parent !== this.uploader) {
parent._errorFiles.push(this)
parent.error = true
if (parent._errorFiles.length === parent.files.length) {
parent.allError = true
}
parent = parent.parent
}
},
_resetError: function () {
this.error = this.allError = false
var parent = this.parent
var index = -1
while (parent && parent !== this.uploader) {
index = parent._errorFiles.indexOf(this)
parent._errorFiles.splice(index, 1)
parent.allError = false
if (!parent._errorFiles.length) {
parent.error = false
}
parent = parent.parent
}
},
isComplete: function () {
if (!this.completed) {
var outstanding = false
this._eachAccess(function (file) {
if (!file.isComplete()) {
outstanding = true
return false
}
}, function () {
if (this.error) {
outstanding = true
} else {
var STATUS = Chunk.STATUS
utils.each(this.chunks, function (chunk) {
var status = chunk.status()
if (status === STATUS.ERROR || status === STATUS.PENDING || status === STATUS.UPLOADING || status === STATUS.READING || chunk.preprocessState === 1 || chunk.readState === 1) {
outstanding = true
return false
}
})
}
})
this.completed = !outstanding
}
return this.completed
},
isUploading: function () {
var uploading = false
this._eachAccess(function (file) {
if (file.isUploading()) {
uploading = true
return false
}
}, function () {
var uploadingStatus = Chunk.STATUS.UPLOADING
utils.each(this.chunks, function (chunk) {
if (chunk.status() === uploadingStatus) {
uploading = true
return false
}
})
})
return uploading
},
resume: function () {
this._eachAccess(function (f) {
f.resume()
}, function () {
this.paused = false
this.aborted = false
this.uploader.upload()
})
this.paused = false
this.aborted = false
},
pause: function () {
this._eachAccess(function (f) {
f.pause()
}, function () {
this.paused = true
this.abort()
})
this.paused = true
},
cancel: function () {
this.uploader.removeFile(this)
},
retry: function (file) {
var fileRetry = function (file) {
if (file.error) {
file.bootstrap()
}
}
if (file) {
file.bootstrap()
} else {
this._eachAccess(fileRetry, function () {
this.bootstrap()
})
}
this.uploader.upload()
},
abort: function (reset) {
if (this.aborted) {
return
}
this.currentSpeed = 0
this.averageSpeed = 0
this.aborted = !reset
var chunks = this.chunks
if (reset) {
this.chunks = []
}
var uploadingStatus = Chunk.STATUS.UPLOADING
utils.each(chunks, function (c) {
if (c.status() === uploadingStatus) {
c.abort()
this.uploader.uploadNextChunk()
}
}, this)
},
progress: function () {
var totalDone = 0
var totalSize = 0
var ret = 0
this._eachAccess(function (file, index) {
totalDone += file.progress() * file.size
totalSize += file.size
if (index === this.files.length - 1) {
ret = totalSize > 0 ? totalDone / totalSize : this.isComplete() ? 1 : 0
}
}, function () {
if (this.error) {
ret = 1
return
}
if (this.chunks.length === 1) {
this._prevProgress = Math.max(this._prevProgress, this.chunks[0].progress())
ret = this._prevProgress
return
}
// Sum up progress across everything
var bytesLoaded = 0
utils.each(this.chunks, function (c) {
// get chunk progress relative to entire file
bytesLoaded += c.progress() * (c.endByte - c.startByte)
})
var percent = bytesLoaded / this.size
// We don't want to lose percentages when an upload is paused
this._prevProgress = Math.max(this._prevProgress, percent > 0.9999 ? 1 : percent)
ret = this._prevProgress
})
return ret
},
getSize: function () {
var size = 0
this._eachAccess(function (file) {
size += file.size
}, function () {
size += this.size
})
return size
},
getFormatSize: function () {
var size = this.getSize()
return utils.formatSize(size)
},
getRoot: function () {
if (this.isRoot) {
return this
}
var parent = this.parent
while (parent) {
if (parent.parent === this.uploader) {
// find it
return parent
}
parent = parent.parent
}
return this
},
sizeUploaded: function () {
var size = 0
this._eachAccess(function (file) {
size += file.sizeUploaded()
}, function () {
utils.each(this.chunks, function (chunk) {
size += chunk.sizeUploaded()
})
})
return size
},
timeRemaining: function () {
var ret = 0
var sizeDelta = 0
var averageSpeed = 0
this._eachAccess(function (file, i) {
if (!file.paused && !file.error) {
sizeDelta += file.size - file.sizeUploaded()
averageSpeed += file.averageSpeed
}
if (i === this.files.length - 1) {
ret = calRet(sizeDelta, averageSpeed)
}
}, function () {
if (this.paused || this.error) {
ret = 0
return
}
var delta = this.size - this.sizeUploaded()
ret = calRet(delta, this.averageSpeed)
})
return ret
function calRet (delta, averageSpeed) {
if (delta && !averageSpeed) {
return Number.POSITIVE_INFINITY
}
if (!delta && !averageSpeed) {
return 0
}
return Math.floor(delta / averageSpeed)
}
},
removeFile: function (file) {
if (file.isFolder) {
while (file.files.length) {
var f = file.files[file.files.length - 1]
this._removeFile(f)
}
}
this._removeFile(file)
},
_delFilePath: function (file) {
if (file.path && this.filePaths) {
delete this.filePaths[file.path]
}
utils.each(file.fileList, function (file) {
this._delFilePath(file)
}, this)
},
_removeFile: function (file) {
if (!file.isFolder) {
utils.each(this.files, function (f, i) {
if (f === file) {
this.files.splice(i, 1)
return false
}
}, this)
file.abort()
var parent = file.parent
var newParent
while (parent && parent !== this) {
newParent = parent.parent
parent._removeFile(file)
parent = newParent
}
}
file.parent === this && utils.each(this.fileList, function (f, i) {
if (f === file) {
this.fileList.splice(i, 1)
return false
}
}, this)
if (!this.isRoot && this.isFolder && !this.files.length) {
this.parent._removeFile(this)
this.uploader._delFilePath(this)
}
file.parent = null
},
getType: function () {
if (this.isFolder) {
return 'folder'
}
return this.file.type && this.file.type.split('/')[1]
},
getExtension: function () {
if (this.isFolder) {
return ''
}
return this.name.substr((~-this.name.lastIndexOf('.') >>> 0) + 2).toLowerCase()
}
})
module.exports = File
function parsePaths (path) {
var ret = []
var paths = path.split('/')
var len = paths.length
var i = 1
paths.splice(len - 1, 1)
len--
if (paths.length) {
while (i <= len) {
ret.push(paths.slice(0, i++).join('/') + '/')
}
}
return ret
}
},{"./chunk":1,"./utils":5}],5:[function(_dereq_,module,exports){
var oproto = Object.prototype
var aproto = Array.prototype
var serialize = oproto.toString
var isFunction = function (fn) {
return serialize.call(fn) === '[object Function]'
}
var isArray = Array.isArray || /* istanbul ignore next */ function (ary) {
return serialize.call(ary) === '[object Array]'
}
var isPlainObject = function (obj) {
return serialize.call(obj) === '[object Object]' && Object.getPrototypeOf(obj) === oproto
}
var i = 0
var utils = {
uid: function () {
return ++i
},
noop: function () {},
bind: function (fn, context) {
return function () {
return fn.apply(context, arguments)
}
},
preventEvent: function (evt) {
evt.preventDefault()
},
stop: function (evt) {
evt.preventDefault()
evt.stopPropagation()
},
nextTick: function (fn, context) {
setTimeout(utils.bind(fn, context), 0)
},
toArray: function (ary, start, end) {
if (start === undefined) start = 0
if (end === undefined) end = ary.length
return aproto.slice.call(ary, start, end)
},
isPlainObject: isPlainObject,
isFunction: isFunction,
isArray: isArray,
isObject: function (obj) {
return Object(obj) === obj
},
isString: function (s) {
return typeof s === 'string'
},
isUndefined: function (a) {
return typeof a === 'undefined'
},
isDefined: function (a) {
return typeof a !== 'undefined'
},
each: function (ary, func, context) {
if (utils.isDefined(ary.length)) {
for (var i = 0, len = ary.length; i < len; i++) {
if (func.call(context, ary[i], i, ary) === false) {
break
}
}
} else {
for (var k in ary) {
if (func.call(context, ary[k], k, ary) === false) {
break
}
}
}
},
/**
* If option is a function, evaluate it with given params
* @param {*} data
* @param {...} args arguments of a callback
* @returns {*}
*/
evalOpts: function (data, args) {
if (utils.isFunction(data)) {
// `arguments` is an object, not array, in FF, so:
args = utils.toArray(arguments)
data = data.apply(null, args.slice(1))
}
return data
},
extend: function () {
var options
var name
var src
var copy
var copyIsArray
var clone
var target = arguments[0] || {}
var i = 1
var length = arguments.length
var force = false
// 如果第一个参数为布尔,判定是否深拷贝
if (typeof target === 'boolean') {
force = target
target = arguments[1] || {}
i++
}
// 确保接受方为一个复杂的数据类型
if (typeof target !== 'object' && !isFunction(target)) {
target = {}
}
<|fim▁hole|> // 如果只有一个参数,那么新成员添加于 extend 所在的对象上
if (i === length) {
target = this
i--
}
for (; i < length; i++) {
// 只处理非空参数
if ((options = arguments[i]) != null) {
for (name in options) {
src = target[name]
copy = options[name]
// 防止环引用
if (target === copy) {
continue
}
if (force && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {
if (copyIsArray) {
copyIsArray = false
clone = src && isArray(src) ? src : []
} else {
clone = src && isPlainObject(src) ? src : {}
}
target[name] = utils.extend(force, clone, copy)
} else if (copy !== undefined) {
target[name] = copy
}
}
}
}
return target
},
formatSize: function (size) {
if (size < 1024) {
return size.toFixed(0) + ' bytes'
} else if (size < 1024 * 1024) {
return (size / 1024.0).toFixed(0) + ' KB'
} else if (size < 1024 * 1024 * 1024) {
return (size / 1024.0 / 1024.0).toFixed(1) + ' MB'
} else {
return (size / 1024.0 / 1024.0 / 1024.0).toFixed(1) + ' GB'
}
},
defineNonEnumerable: function (target, key, value) {
Object.defineProperty(target, key, {
enumerable: false,
configurable: true,
writable: true,
value: value
})
}
}
module.exports = utils
},{}]},{},[3])
(3)
});<|fim▁end|> | |
<|file_name|>codec.rs<|end_file_name|><|fim▁begin|>//! `codec` module contains the `Package` (frame) decoding and an `tokio_core::io::Codec`
//! implementation.
use std::io::{self, Read, Write};
use uuid::Uuid;
use byteorder::{ReadBytesExt, WriteBytesExt, LittleEndian};
use tokio_io::codec::{Encoder, Decoder};
use bytes::{BytesMut, BufMut};
use errors::ErrorKind;
use package::Package;
use {UsernamePassword};
use raw::RawMessage;
bitflags!{
/// `TcpFlags` describes if optional fields (authentication) is present.
pub flags TcpFlags: u8 {
/// No authentication information present
const FLAG_NONE = 0x00,
/// Package contains authentication
const FLAG_AUTHENTICATED = 0x01,
//const FLAG_TRUSTED_WRITE = 0x02, // only in core
}
}
/// Stateless simple PackageCodec
pub struct PackageCodec;
impl PackageCodec {
fn decode_inner(&mut self, buf: &mut BytesMut) -> io::Result<Option<Package>> {
if buf.len() < 4 + 1 + 1 + 16 {
return Ok(None);
}
let len = io::Cursor::new(&buf[0..4]).read_u32::<LittleEndian>()? as usize;
if len < 18 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "length is too small"))
}
if buf.len() < len + 4 {
return Ok(None);
}
let decoded_frame = self.decode_body(&buf[4..(4 + len)]);
decoded_frame.and_then(|(c, a, m)| {
buf.split_to(4 + len);
Ok(Some(Package {
correlation_id: c,
authentication: a,
message: m.into(),
}))
})
}
fn decode_body(&mut self, buf: &[u8]) -> io::Result<(Uuid, Option<UsernamePassword>, RawMessage<'static>)> {
let (d, c, a, pos) = self.decode_header(buf)?;
let message = RawMessage::decode(d, &buf[pos..])?.into_owned();
Ok((c, a, message))
}
fn decode_header(&mut self, buf: &[u8]) -> io::Result<(u8, Uuid, Option<UsernamePassword>, usize)> {
let (d, c, a, pos) = {
let mut cursor = io::Cursor::new(buf);
let discriminator = cursor.read_u8()?;
let flags = cursor.read_u8()?;
let flags = match TcpFlags::from_bits(flags) {
Some(flags) => flags,
None => bail!(ErrorKind::InvalidFlags(flags)),
};
let correlation_id = {
let mut uuid_bytes = [0u8; 16];
cursor.read_exact(&mut uuid_bytes)?;
// this should only err if len is not 16
Uuid::from_bytes(&uuid_bytes).unwrap()
};
let authentication = if flags.contains(FLAG_AUTHENTICATED) {
Some(UsernamePassword::decode(&mut cursor)?)
} else {
None
};
(discriminator, correlation_id, authentication, cursor.position() as usize)
};
Ok((d, c, a, pos))
}
#[doc(hidden)]
pub fn encode_parts<'a>(&self, cursor: &mut io::Cursor<Vec<u8>>, correlation_id: &Uuid, authentication: Option<&UsernamePassword>, raw: &RawMessage<'a>) -> io::Result<()> {
let mut flags = FLAG_NONE;
if authentication.is_some() {
flags.insert(FLAG_AUTHENTICATED);
}
cursor.write_u32::<LittleEndian>(0)?; // placeholder for prefix
cursor.write_u8(raw.discriminator())?;
cursor.write_u8(flags.bits())?;
cursor.write_all(correlation_id.as_bytes())?;
if flags.contains(FLAG_AUTHENTICATED) {
authentication
.expect("According to flag authentication token is present")
.encode(cursor)?;
} else {
assert!(authentication.is_none());
}
raw.encode(cursor)?;
let at_end = cursor.position();
let len = at_end as u32 - 4;
cursor.set_position(0);
cursor.write_u32::<LittleEndian>(len)?;
Ok(())
}<|fim▁hole|>
impl Decoder for PackageCodec {
type Item = Package;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<Self::Item>> {
self.decode_inner(buf).map_err(|e| e.into())
}
}
impl Encoder for PackageCodec {
type Item = Package;
type Error = io::Error;
fn encode(&mut self, msg: Package, buf: &mut BytesMut) -> io::Result<()> {
let mut cursor = io::Cursor::new(Vec::new());
self.encode_parts(&mut cursor, &msg.correlation_id, msg.authentication.as_ref(), &msg.message)?;
let tmp = cursor.into_inner();
buf.put_slice(&tmp);
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use hex::FromHex;
use tokio_io::codec::{Decoder, Encoder};
use uuid::Uuid;
use super::{PackageCodec};
use package::Package;
use raw::RawMessage;
use raw::client_messages::{WriteEventsCompleted, OperationResult};
#[test]
fn decode_ping() {
test_decoding_hex("1200000003007b50a1b034b9224e8f9d708c394fab2d",
PackageCodec,
Package {
authentication: None,
correlation_id:
Uuid::parse_str("7b50a1b0-34b9-224e-8f9d-708c394fab2d").unwrap(),
message: RawMessage::Ping.into(),
});
}
#[test]
fn decode_ping_with_junk() {
test_decoding_hex("1300000003007b50a1b034b9224e8f9d708c394fab2d00",
PackageCodec,
Package {
authentication: None,
correlation_id:
Uuid::parse_str("7b50a1b0-34b9-224e-8f9d-708c394fab2d").unwrap(),
message: RawMessage::Ping.into(),
});
}
#[test]
fn encode_ping() {
test_encoding_hex("1200000003007b50a1b034b9224e8f9d708c394fab2d",
PackageCodec,
Package {
authentication: None,
correlation_id:
Uuid::parse_str("7b50a1b0-34b9-224e-8f9d-708c394fab2d").unwrap(),
message: RawMessage::Ping.into(),
});
}
#[test]
fn decode_unknown_discriminator() {
use std::borrow::Cow;
test_decoding_hex("12000000ff007b50a1b034b9224e8f9d708c394fab2d",
PackageCodec,
Package {
authentication: None,
correlation_id: Uuid::parse_str("7b50a1b0-34b9-224e-8f9d-708c394fab2d").unwrap(),
message: RawMessage::Unsupported(255, Cow::Owned(vec![])).into()
});
}
#[test]
fn decode_write_events_completed() {
let input = "2200000083009b59d8734e9fd84eb8a421f2666a3aa40800181e20272884d6bc563084d6bc56";
test_decoding_hex(input,
PackageCodec,
Package {
authentication: None,
correlation_id:
Uuid::parse_str("9b59d873-4e9f-d84e-b8a4-21f2666a3aa4").unwrap(),
message: RawMessage::WriteEventsCompleted(WriteEventsCompleted {
result: Some(OperationResult::Success),
message: None,
first_event_number: 30,
last_event_number: 39,
prepare_position: Some(181349124),
commit_position: Some(181349124)
}).into()
});
}
#[test]
fn encode_write_events_completed() {
test_encoding_hex("2200000083009b59d8734e9fd84eb8a421f2666a3aa40800181e20272884d6bc563084d6bc56",
PackageCodec,
Package {
authentication: None,
correlation_id:
Uuid::parse_str("9b59d873-4e9f-d84e-b8a4-21f2666a3aa4").unwrap(),
message: RawMessage::WriteEventsCompleted(WriteEventsCompleted {
result: Some(OperationResult::Success),
message: None,
first_event_number: 30,
last_event_number: 39,
prepare_position: Some(181349124),
commit_position: Some(181349124)
}).into()
});
}
#[test]
fn decode_authenticated_package() {
use bytes::BytesMut;
use auth::UsernamePassword;
let mut buf = BytesMut::with_capacity(1024);
let id = Uuid::new_v4();
let msg = Package {
correlation_id: id,
authentication: Some(UsernamePassword::new("foobar", "abbacd")),
message: RawMessage::Ping,
};
PackageCodec.encode(msg.clone(), &mut buf).unwrap();
let decoded = PackageCodec.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg, decoded);
}
fn test_decoding_hex<C: Decoder>(input: &str, codec: C, expected: C::Item)
where C::Item: Debug + PartialEq, C::Error: Debug
{
test_decoding(Vec::from_hex(input).unwrap(), codec, expected);
}
fn test_decoding<C: Decoder>(input: Vec<u8>, mut codec: C, expected: C::Item)
where C::Item: Debug + PartialEq, C::Error: Debug
{
// decode whole buffer
{
let mut buf = input.clone().into();
let item = codec.decode(&mut buf).unwrap().unwrap();
assert_eq!(item, expected);
assert_eq!(buf.len(), 0, "decoding correctly sized buffer left bytes");
}
// decoding partial buffer consumes no bytes
for len in 1..(input.len() - 1) {
let mut part = input.clone();
part.truncate(len);
let mut buf = part.into();
assert!(codec.decode(&mut buf).unwrap().is_none());
assert_eq!(buf.len(), len, "decoding partial buffer consumed bytes");
}
// decoding a too long buffer consumes no extra bytes
{
let mut input = input.clone();
let len = input.len();
input.extend(vec![0u8; len]);
let mut buf = input.into();
let item = codec.decode(&mut buf).unwrap().unwrap();
assert_eq!(item, expected);
assert_eq!(buf.len(), len, "decoding oversized buffer overused bytes");
}
}
fn test_encoding_hex<C: Encoder>(input: &str, codec: C, expected: C::Item)
where C::Item: Debug + PartialEq, C::Error: Debug
{
test_encoding(Vec::from_hex(input).unwrap(), codec, expected);
}
fn test_encoding<C: Encoder>(input: Vec<u8>, mut codec: C, expected: C::Item)
where C::Item: Debug + PartialEq, C::Error: Debug
{
use bytes::BytesMut;
let mut buf = BytesMut::with_capacity(input.len());
codec.encode(expected, &mut buf).unwrap();
assert_eq!(&buf[..],
&input[..],
"encoding did not yield same");
}
}<|fim▁end|> | } |
<|file_name|>test_life_1_05.rs<|end_file_name|><|fim▁begin|>extern crate game_of_life_parsers;
use std::fs::File;
use game_of_life_parsers::GameDescriptor;
use game_of_life_parsers::Coord;
use game_of_life_parsers::Parser;
use game_of_life_parsers::Life105Parser;
#[test]
fn parse_file() {<|fim▁hole|> let gd: Box<GameDescriptor> = parser.parse(Box::new(file)).unwrap();
assert_eq!(&[2, 3], gd.survival());
assert_eq!(&[1], gd.birth());
assert_eq!(&[
// block 1
Coord { x: 0, y: -1 },
Coord { x: 1, y: 0 },
Coord { x: -1, y: 1 }, Coord { x: 0, y: 1 }, Coord { x: 1, y: 1 },
// block 2
Coord { x: 3, y: 2 }, Coord { x: 4, y: 3 }, Coord { x: 5, y: 4 }
], gd.live_cells());
}<|fim▁end|> | let file = File::open("tests/life_1_05/glider.life").unwrap();
let mut parser = Life105Parser::new();
|
<|file_name|>renameSelfAndParameterAttribute.py<|end_file_name|><|fim▁begin|>class С:
def __init__(self, x=None):
if x is None:
self.foo = {
'A': {
'x': 0,
'y': 0,
},
}
else: # init was given the previous state
assert isinstance(x, С)
self.foo = {<|fim▁hole|> 'A': {
'x': x.f<caret>oo['A']['x'],
'y': x.foo['A']['y'],
},
}<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|># Python3
from solution1 import urlSimilarity as f
qa = [
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin',
'https://codesignal.com/home/secret/test?param3=fish¶m1=42&password=admin',
19),
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin',
'http://codesignal.org/about?42=param1&tesing=param3&admin=login',
0),
('https://www.google.com/search?q=codesignal',
'http://www.google.com/search?q=codesignal',
13),
('ftp://www.example.com/query?varName=value',<|fim▁hole|> 'http://anotherexample.com/www?ftp=http',
0),
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin¶m4=abc¶m5=codesignal',
'https://codesignal.com/home/secret/test?param3=fish¶m1=42&codesignal=admin¶m5=test',
20)
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()<|fim▁end|> | 'http://example.com/query?varName=value',
3),
('ftp://www', |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>__author__ = 'leif'<|fim▁hole|>admin.site.register(GameExperiment)
admin.site.register(UserProfile)
admin.site.register(MaxHighScore)<|fim▁end|> | from django.contrib import admin
from models import *
|
<|file_name|>dotfiles.py<|end_file_name|><|fim▁begin|>import logging
import tarfile
import tempfile
import os
import fabric.api
import fabric.operations
import cloudenvy.envy
class Dotfiles(cloudenvy.envy.Command):
def _build_subparser(self, subparsers):
help_str = 'Upload dotfiles from your local machine to an Envy.'
subparser = subparsers.add_parser('dotfiles', help=help_str,
description=help_str)
subparser.set_defaults(func=self.run)
subparser.add_argument('-n', '--name', action='store', default='',
help='Specify custom name for an Envy.')
subparser.add_argument('-f', '--files', action='store',
help='Limit operation to a specific list of '
'comma-separated files.')
return subparser
def run(self, config, args):
envy = cloudenvy.envy.Envy(config)
if envy.ip():
host_string = '%s@%s' % (envy.remote_user, envy.ip())
temp_tar = tempfile.NamedTemporaryFile(delete=True)
with fabric.api.settings(host_string=host_string):
if args.files:
dotfiles = args.files.split(',')<|fim▁hole|> dotfiles = config['defaults']['dotfiles'].split(',')
dotfiles = [dotfile.strip() for dotfile in dotfiles]
with tarfile.open(temp_tar.name, 'w') as archive:
for dotfile in dotfiles:
path = os.path.expanduser('~/%s' % dotfile)
if os.path.exists(path):
if not os.path.islink(path):
archive.add(path, arcname=dotfile)
fabric.operations.put(temp_tar, '~/dotfiles.tar')
fabric.operations.run('tar -xvf ~/dotfiles.tar')
else:
logging.error('Could not determine IP.')<|fim▁end|> | else: |
<|file_name|>native.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, ViewEncapsulation } from '@angular/core';
@Component({
selector: 'app-native',
templateUrl: './native.component.html',
styleUrls: ['./native.component.css'],
encapsulation: ViewEncapsulation.Native
})
export class NativeComponent implements OnInit {
constructor() { }
<|fim▁hole|>}<|fim▁end|> | ngOnInit() {
} |
<|file_name|>client.go<|end_file_name|><|fim▁begin|>package compilerapi
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"net/url"
"strings"
)
const (
GoogleEndpointUrl = "http://closure-compiler.appspot.com/compile"
)
// See https://developers.google.com/closure/compiler/docs/api-ref for details about the options
type Client struct {
// Possible values: ECMASCRIPT3, ECMASCRIPT5, ECMASCRIPT5_STRICT, default to ECMASCRIPT5_STRICT
Language string
// Possible values: WHITESPACE_ONLY, SIMPLE_OPTIMIZATIONS, ADVANCED_OPTIMIZATIONS, default to WHITESPACE_ONLY
CompilationLevel string
}
type OutputError struct {
Charno int `json:"charno"`
Error string `json:"error"`
Lineno int `json:"lineno"`
File string `json:"file"`
Type string `json:"type"`
Line string `json:"line"`
}
func (e *OutputError) AsLogline() string {
return fmt.Sprintf("\033[36;1m[%d, %d]\033[31m error: \033[0m%s\n\t%s\n",
e.Lineno,
e.Charno,
e.Error,
e.Line,
)
}
type OutputWarning struct {
Charno int `json:"charno"`
Warning string `json:"warning"`
Lineno int `json:"lineno"`
File string `json:"file"`
Type string `json:"type"`
Line string `json:"line"`
}
func (w *OutputWarning) AsLogline() string {
return fmt.Sprintf("\033[36;1m[%d, %d]\033[33m warning: \033[0m%s\n\t%s\n",
w.Lineno,
w.Charno,
w.Warning,
w.Line,
)
}
type OutputServerError struct {
Code int `json:"code"`
Error string `json:"error"`
}
type OutputStatistics struct {
OriginalSize int `json:"originalSize"`
CompressedSize int `json:"compressedSize"`
CompileTime int `json:"compileTime"`
}
type Output struct {
CompiledCode string `json:"compiledCode"`
Errors []OutputError `json:"errors"`
Warnings []OutputWarning `json:"warnings"`
ServerErrors *OutputServerError `json:"serverErrors"`
Statistics OutputStatistics `json:"statistics"`
}
func (client *Client) buildRequest(jsCode []byte) *http.Request {
values := url.Values{}
values.Set("js_code", string(jsCode[:]))
values.Set("output_format", "json")
values.Add("output_info", "compiled_code")
values.Add("output_info", "statistics")
values.Add("output_info", "warnings")
values.Add("output_info", "errors")
if client.Language != "" {
values.Set("language", client.Language)
} else {
values.Set("language", "ECMASCRIPT5_STRICT")
}
if client.CompilationLevel != "" {
values.Set("compilation_level", client.CompilationLevel)
} else {
values.Set("compilation_level", "WHITESPACE_ONLY")
}
req, err := http.NewRequest(
"POST",
GoogleEndpointUrl,<|fim▁hole|> }
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
return req
}
func (client *Client) Compile(jsCode []byte) *Output {
httpClient := http.Client{}
req := client.buildRequest(jsCode)
res, err := httpClient.Do(req)
if err != nil {
log.Fatalf(err.Error())
}
content, err := ioutil.ReadAll(res.Body)
res.Body.Close()
if err != nil {
log.Fatalf(err.Error())
}
output := Output{}
err = json.Unmarshal(content, &output)
if err != nil {
log.Fatalf(err.Error())
}
return &output
}<|fim▁end|> | strings.NewReader(values.Encode()),
)
if err != nil {
log.Fatalf(err.Error()) |
<|file_name|>types.go<|end_file_name|><|fim▁begin|>package api
import (
"errors"
"fmt"
"net/url"
"path/filepath"
"strings"
utilglog "github.com/openshift/source-to-image/pkg/util/glog"
"github.com/openshift/source-to-image/pkg/util/user"
)
var glog = utilglog.StderrLog
// Image label namespace constants
const (
DefaultNamespace = "io.openshift.s2i."
KubernetesNamespace = "io.k8s."
)
// invalidFilenameCharacters contains a list of character we consider malicious
// when injecting the directories into containers.
const invalidFilenameCharacters = `;*?"<>|%#$!+{}&[],"'` + "`"
const (
// PullAlways means that we always attempt to pull the latest image.
PullAlways PullPolicy = "always"
// PullNever means that we never pull an image, but only use a local image.
PullNever PullPolicy = "never"
// PullIfNotPresent means that we pull if the image isn't present on disk.
PullIfNotPresent PullPolicy = "if-not-present"
// DefaultBuilderPullPolicy specifies the default pull policy to use
DefaultBuilderPullPolicy = PullIfNotPresent
// DefaultRuntimeImagePullPolicy specifies the default pull policy to use.
DefaultRuntimeImagePullPolicy = PullIfNotPresent
// DefaultPreviousImagePullPolicy specifies policy for pulling the previously
// build Docker image when doing incremental build
DefaultPreviousImagePullPolicy = PullIfNotPresent
)
// Config contains essential fields for performing build.
type Config struct {
// DisplayName is a result image display-name label. This defaults to the
// output image name.
DisplayName string
// Description is a result image description label. The default is no
// description.
Description string
// BuilderImage describes which image is used for building the result images.
BuilderImage string
// BuilderImageVersion provides optional version information about the builder image.
BuilderImageVersion string
// BuilderBaseImageVersion provides optional version information about the builder base image.
BuilderBaseImageVersion string
// RuntimeImage specifies the image that will be a base for resulting image
// and will be used for running an application. By default, BuilderImage is
// used for building and running, but the latter may be overridden.
RuntimeImage string
// RuntimeImagePullPolicy specifies when to pull a runtime image.
RuntimeImagePullPolicy PullPolicy
// RuntimeAuthentication holds the authentication information for pulling the
// runtime Docker images from private repositories.
RuntimeAuthentication AuthConfig
// RuntimeArtifacts specifies a list of source/destination pairs that will
// be copied from builder to a runtime image. Source can be a file or
// directory. Destination must be a directory. Regardless whether it
// is an absolute or relative path, it will be placed into image's WORKDIR.
// Destination also can be empty or equals to ".", in this case it just
// refers to a root of WORKDIR.
// In case it's empty, S2I will try to get this list from
// io.openshift.s2i.assemble-input-files label on a RuntimeImage.
RuntimeArtifacts VolumeList
// DockerConfig describes how to access host docker daemon.
DockerConfig *DockerConfig
// DockerCfgPath provides the path to the .dockercfg file
DockerCfgPath string
// PullAuthentication holds the authentication information for pulling the
// Docker images from private repositories
PullAuthentication AuthConfig
// IncrementalAuthentication holds the authentication information for pulling the
// previous image from private repositories
IncrementalAuthentication AuthConfig
// DockerNetworkMode is used to set the docker network setting to --net=container:<id>
// when the builder is invoked from a container.
DockerNetworkMode DockerNetworkMode
// PreserveWorkingDir describes if working directory should be left after processing.
PreserveWorkingDir bool
// IgnoreSubmodules determines whether we will attempt to pull in submodules
// (via --recursive or submodule init)
IgnoreSubmodules bool
// Source URL describing the location of sources used to build the result image.
Source string
// Ref is a tag/branch to be used for build.
Ref string
// Tag is a result image tag name.
Tag string
// BuilderPullPolicy specifies when to pull the builder image
BuilderPullPolicy PullPolicy
// PreviousImagePullPolicy specifies when to pull the previously build image
// when doing incremental build
PreviousImagePullPolicy PullPolicy
// ForcePull defines if the builder image should be always pulled or not.
// This is now deprecated by BuilderPullPolicy and will be removed soon.
// Setting this to 'true' equals setting BuilderPullPolicy to 'PullAlways'.
// Setting this to 'false' equals setting BuilderPullPolicy to 'PullIfNotPresent'
ForcePull bool
// Incremental describes whether to try to perform incremental build.
Incremental bool
// IncrementalFromTag sets an alternative image tag to look for existing
// artifacts. Tag is used by default if this is not set.
IncrementalFromTag string
// RemovePreviousImage describes if previous image should be removed after successful build.
// This applies only to incremental builds.
RemovePreviousImage bool
// Environment is a map of environment variables to be passed to the image.
Environment EnvironmentList
// EnvironmentFile provides the path to a file with list of environment
// variables.
EnvironmentFile string
// LabelNamespace provides the namespace under which the labels will be generated.
LabelNamespace string
// CallbackURL is a URL which is called upon successful build to inform about that fact.
CallbackURL string
// ScriptsURL is a URL describing the localization of S2I scripts used during build process.
ScriptsURL string
// Destination specifies a location where the untar operation will place its artifacts.
Destination string
// WorkingDir describes temporary directory used for downloading sources, scripts and tar operations.
WorkingDir string
// WorkingSourceDir describes the subdirectory off of WorkingDir set up during the repo download
// that is later used as the root for ignore processing
WorkingSourceDir string
// LayeredBuild describes if this is build which layered scripts and sources on top of BuilderImage.
LayeredBuild bool
// Operate quietly. Progress and assemble script output are not reported, only fatal errors.
// (default: false).
Quiet bool
// ForceCopy results in only the file SCM plugin being used (i.e. no `git clone`); allows for empty directories to be included
// in resulting image (since git does not support that).
// (default: false).
ForceCopy bool
// Specify a relative directory inside the application repository that should
// be used as a root directory for the application.
ContextDir string
// AllowedUIDs is a list of user ranges of users allowed to run the builder image.
// If a range is specified and the builder (or runtime) image uses a non-numeric
// user or a user that is outside the specified range, then the build fails.
AllowedUIDs user.RangeList
// AssembleUser specifies the user to run the assemble script in container
AssembleUser string
// RunImage will trigger a "docker run ..." invocation of the produced image so the user
// can see if it operates as he would expect
RunImage bool
// Usage allows for properly shortcircuiting s2i logic when `s2i usage` is invoked
Usage bool
// Injections specifies a list source/destination folders that are injected to
// the container that runs assemble.
// All files we inject will be truncated after the assemble script finishes.
Injections VolumeList
// CGroupLimits describes the cgroups limits that will be applied to any containers
// run by s2i.
CGroupLimits *CGroupLimits
// DropCapabilities contains a list of capabilities to drop when executing containers
DropCapabilities []string
// ScriptDownloadProxyConfig optionally specifies the http and https proxy
// to use when downloading scripts
ScriptDownloadProxyConfig *ProxyConfig
// ExcludeRegExp contains a string representation of the regular expression desired for
// deciding which files to exclude from the tar stream
ExcludeRegExp string
// BlockOnBuild prevents s2i from performing a docker build operation
// if one is necessary to execute ONBUILD commands, or to layer source code into
// the container for images that don't have a tar binary available, if the
// image contains ONBUILD commands that would be executed.
BlockOnBuild bool
// HasOnBuild will be set to true if the builder image contains ONBUILD instructions
HasOnBuild bool
// BuildVolumes specifies a list of volumes to mount to container running the
// build.
BuildVolumes VolumeList
// Labels specify labels and their values to be applied to the resulting image. Label keys
// must have non-zero length. The labels defined here override generated labels in case
// they have the same name.
Labels map[string]string
// SourceInfo provides the info about the source to be built rather than relying
// on the Downloader to retrieve it.
SourceInfo *SourceInfo
}
// EnvironmentSpec specifies a single environment variable.
type EnvironmentSpec struct {
Name string
Value string
}
// EnvironmentList contains list of environment variables.
type EnvironmentList []EnvironmentSpec
// ProxyConfig holds proxy configuration.
type ProxyConfig struct {
HTTPProxy *url.URL
HTTPSProxy *url.URL
}
// CGroupLimits holds limits used to constrain container resources.
type CGroupLimits struct {
MemoryLimitBytes int64
CPUShares int64
CPUPeriod int64
CPUQuota int64
MemorySwap int64
}
// VolumeSpec represents a single volume mount point.
type VolumeSpec struct {
Source string
Destination string
}
// VolumeList contains list of VolumeSpec.
type VolumeList []VolumeSpec
// DockerConfig contains the configuration for a Docker connection.
type DockerConfig struct {
// Endpoint is the docker network endpoint or socket
Endpoint string
// CertFile is the certificate file path for a TLS connection
CertFile string
// KeyFile is the key file path for a TLS connection
KeyFile string
// CAFile is the certificate authority file path for a TLS connection
CAFile string
// UseTLS indicates if TLS must be used
UseTLS bool
// TLSVerify indicates if TLS peer must be verified
TLSVerify bool
}
// AuthConfig is our abstraction of the Registry authorization information for whatever
// docker client we happen to be based on
type AuthConfig struct {
Username string
Password string
Email string
ServerAddress string
}
// ContainerConfig is the abstraction of the docker client provider (formerly go-dockerclient, now either
// engine-api or kube docker client) container.Config type that is leveraged by s2i or origin
type ContainerConfig struct {
Labels map[string]string
Env []string
}
// Image is the abstraction of the docker client provider (formerly go-dockerclient, now either
// engine-api or kube docker client) Image type that is leveraged by s2i or origin
type Image struct {
ID string
*ContainerConfig
Config *ContainerConfig
}
// Result structure contains information from build process.
type Result struct {
// Success describes whether the build was successful.
Success bool
// Messages is a list of messages from build process.
Messages []string
// WorkingDir describes temporary directory used for downloading sources, scripts and tar operations.
WorkingDir string
// ImageID describes resulting image ID.
ImageID string
// BuildInfo holds information about the result of a build.
BuildInfo BuildInfo
}
// BuildInfo holds information about a particular step in the build process.
type BuildInfo struct {
// FailureReason is a camel case reason that is used by the machine to reply
// back to the OpenShift builder with information why any of the steps in the
// build, failed.
FailureReason FailureReason
}
// StepFailureReason holds the type of failure that occurred during the build
// process.
type StepFailureReason string
// StepFailureMessage holds the detailed message of a failure.
type StepFailureMessage string
// FailureReason holds the type of failure that occurred during the build
// process.
type FailureReason struct {
Reason StepFailureReason
Message StepFailureMessage
}
// InstallResult structure describes the result of install operation
type InstallResult struct {
// Script describes which script this result refers to
Script string
// URL describes from where the script was taken
URL string
// Downloaded describes if download operation happened, this will be true for
// external scripts, but false for scripts from inside the image
Downloaded bool
// Installed describes if script was installed to upload directory
Installed bool
// Error describes last error encountered during install operation
Error error
// FailedSources is a list of sources that were attempted but failed
// when downloading this script
FailedSources []string
}
// SourceInfo stores information about the source code
type SourceInfo struct {
// Ref represents a commit SHA-1, valid Git branch name or a Git tag
// The output image will contain this information as 'io.openshift.build.commit.ref' label.
Ref string
// CommitID represents an arbitrary extended object reference in Git as SHA-1
// The output image will contain this information as 'io.openshift.build.commit.id' label.
CommitID string
// Date contains a date when the committer created the commit.
// The output image will contain this information as 'io.openshift.build.commit.date' label.
Date string
// AuthorName contains the name of the author
// The output image will contain this information (along with AuthorEmail) as 'io.openshift.build.commit.author' label.
AuthorName string
// AuthorEmail contains the e-mail of the author
// The output image will contain this information (along with AuthorName) as 'io.openshift.build.commit.author' lablel.
AuthorEmail string
// CommitterName contains the name of the committer
CommitterName string
// CommitterEmail contains the e-mail of the committer
CommitterEmail string
// Message represents the first 80 characters from the commit message.
// The output image will contain this information as 'io.openshift.build.commit.message' label.
Message string
// Location contains a valid URL to the original repository.
// The output image will contain this information as 'io.openshift.build.source-location' label.
Location string
// ContextDir contains path inside the Location directory that
// contains the application source code.
// The output image will contain this information as 'io.openshift.build.source-context-dir'
// label.
ContextDir string
}
// CloneConfig specifies the options used when cloning the application source
// code.
type CloneConfig struct {
Recursive bool
Quiet bool
}
// DockerNetworkMode specifies the network mode setting for the docker container
type DockerNetworkMode string
const (
// DockerNetworkModeHost places the container in the default (host) network namespace.
DockerNetworkModeHost DockerNetworkMode = "host"
// DockerNetworkModeBridge instructs docker to create a network namespace for this container connected to the docker0 bridge via a veth-pair.
DockerNetworkModeBridge DockerNetworkMode = "bridge"
// DockerNetworkModeContainerPrefix is the string prefix used by NewDockerNetworkModeContainer.
DockerNetworkModeContainerPrefix string = "container:"
)
// NewDockerNetworkModeContainer creates a DockerNetworkMode value which instructs docker to place the container in the network namespace of an existing container.
// It can be used, for instance, to place the s2i container in the network namespace of the infrastructure container of a k8s pod.
func NewDockerNetworkModeContainer(id string) DockerNetworkMode {
return DockerNetworkMode(DockerNetworkModeContainerPrefix + id)
}
// PullPolicy specifies a type for the method used to retrieve the Docker image
type PullPolicy string
// String implements the String() function of pflags.Value so this can be used as
// command line parameter.
// This method is really used just to show the default value when printing help.
// It will not default the configuration.<|fim▁hole|> }
return string(*p)
}
// Type implements the Type() function of pflags.Value interface
func (p *PullPolicy) Type() string {
return "string"
}
// Set implements the Set() function of pflags.Value interface
// The valid options are "always", "never" or "if-not-present"
func (p *PullPolicy) Set(v string) error {
switch v {
case "always":
*p = PullAlways
case "never":
*p = PullNever
case "if-not-present":
*p = PullIfNotPresent
default:
return fmt.Errorf("invalid value %q, valid values are: always, never or if-not-present", v)
}
return nil
}
// IsInvalidFilename verifies if the provided filename contains malicious
// characters.
func IsInvalidFilename(name string) bool {
return strings.ContainsAny(name, invalidFilenameCharacters)
}
// Set implements the Set() function of pflags.Value interface.
// This function parses the string that contains source:destination pair.
// When the destination is not specified, the source get copied into current
// working directory in container.
func (l *VolumeList) Set(value string) error {
if len(value) == 0 {
return errors.New("invalid format, must be source:destination")
}
var mount []string
pos := strings.LastIndex(value, ":")
if pos == -1 {
mount = []string{value, ""}
} else {
mount = []string{value[:pos], value[pos+1:]}
}
mount[0] = strings.Trim(mount[0], `"'`)
mount[1] = strings.Trim(mount[1], `"'`)
s := VolumeSpec{Source: filepath.Clean(mount[0]), Destination: filepath.ToSlash(filepath.Clean(mount[1]))}
if IsInvalidFilename(s.Source) || IsInvalidFilename(s.Destination) {
return fmt.Errorf("invalid characters in filename: %q", value)
}
*l = append(*l, s)
return nil
}
// String implements the String() function of pflags.Value interface.
func (l *VolumeList) String() string {
result := []string{}
for _, i := range *l {
result = append(result, strings.Join([]string{i.Source, i.Destination}, ":"))
}
return strings.Join(result, ",")
}
// Type implements the Type() function of pflags.Value interface.
func (l *VolumeList) Type() string {
return "string"
}
// Set implements the Set() function of pflags.Value interface.
func (e *EnvironmentList) Set(value string) error {
parts := strings.SplitN(value, "=", 2)
if len(parts) != 2 || len(parts[0]) == 0 {
return fmt.Errorf("invalid environment format %q, must be NAME=VALUE", value)
}
if strings.Contains(parts[1], ",") && strings.Contains(parts[1], "=") {
glog.Warningf("DEPRECATED: Use multiple -e flags to specify multiple environment variables instead of comma (%q)", value)
}
*e = append(*e, EnvironmentSpec{
Name: strings.TrimSpace(parts[0]),
Value: strings.TrimSpace(parts[1]),
})
return nil
}
// String implements the String() function of pflags.Value interface.
func (e *EnvironmentList) String() string {
result := []string{}
for _, i := range *e {
result = append(result, strings.Join([]string{i.Name, i.Value}, "="))
}
return strings.Join(result, ",")
}
// Type implements the Type() function of pflags.Value interface.
func (e *EnvironmentList) Type() string {
return "string"
}
// AsBinds converts the list of volume definitions to go-dockerclient compatible
// list of bind mounts.
func (l *VolumeList) AsBinds() []string {
result := make([]string, len(*l))
for index, v := range *l {
result[index] = strings.Join([]string{v.Source, v.Destination}, ":")
}
return result
}<|fim▁end|> | func (p *PullPolicy) String() string {
if len(string(*p)) == 0 {
return string(DefaultBuilderPullPolicy) |
<|file_name|>provisioner.go<|end_file_name|><|fim▁begin|>// This package implements a provisioner for Packer that executes
// shell scripts within the remote machine.
package shell
import (
"bufio"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"math/rand"
"os"
"strings"
"time"
"github.com/mitchellh/packer/common"
"github.com/mitchellh/packer/helper/config"
"github.com/mitchellh/packer/packer"
"github.com/mitchellh/packer/template/interpolate"
)
type Config struct {
common.PackerConfig `mapstructure:",squash"`
// If true, the script contains binary and line endings will not be
// converted from Windows to Unix-style.
Binary bool
// An inline script to execute. Multiple strings are all executed
// in the context of a single shell.
Inline []string
// The shebang value used when running inline scripts.
InlineShebang string `mapstructure:"inline_shebang"`
// The local path of the shell script to upload and execute.
Script string
// An array of multiple scripts to run.
Scripts []string
// An array of environment variables that will be injected before
// your command(s) are executed.
Vars []string `mapstructure:"environment_vars"`
// The remote path where the local shell script will be uploaded to.
// This should be set to a writable file that is in a pre-existing directory.
RemotePath string `mapstructure:"remote_path"`
// The command used to execute the script. The '{{ .Path }}' variable
// should be used to specify where the script goes, {{ .Vars }}
// can be used to inject the environment_vars into the environment.
ExecuteCommand string `mapstructure:"execute_command"`
// The timeout for retrying to start the process. Until this timeout
// is reached, if the provisioner can't start a process, it retries.
// This can be set high to allow for reboots.
RawStartRetryTimeout string `mapstructure:"start_retry_timeout"`
startRetryTimeout time.Duration
ctx interpolate.Context
}
type Provisioner struct {
config Config
}
type ExecuteCommandTemplate struct {
Vars string
Path string
}
func (p *Provisioner) Prepare(raws ...interface{}) error {
err := config.Decode(&p.config, &config.DecodeOpts{
Interpolate: true,
InterpolateContext: &p.config.ctx,
InterpolateFilter: &interpolate.RenderFilter{
Exclude: []string{
"execute_command",
},
},
}, raws...)
if err != nil {
return err
}
if p.config.ExecuteCommand == "" {
p.config.ExecuteCommand = "chmod +x {{.Path}}; {{.Vars}} {{.Path}}"
}
if p.config.Inline != nil && len(p.config.Inline) == 0 {
p.config.Inline = nil
}
if p.config.InlineShebang == "" {
p.config.InlineShebang = "/bin/sh -e"
}
if p.config.RawStartRetryTimeout == "" {
p.config.RawStartRetryTimeout = "5m"
}
if p.config.RemotePath == "" {
p.config.RemotePath = randomScriptName()
}
if p.config.Scripts == nil {
p.config.Scripts = make([]string, 0)
}
if p.config.Vars == nil {
p.config.Vars = make([]string, 0)
}
var errs *packer.MultiError
if p.config.Script != "" && len(p.config.Scripts) > 0 {
errs = packer.MultiErrorAppend(errs,
errors.New("Only one of script or scripts can be specified."))
}
if p.config.Script != "" {
p.config.Scripts = []string{p.config.Script}
}
if len(p.config.Scripts) == 0 && p.config.Inline == nil {
errs = packer.MultiErrorAppend(errs,
errors.New("Either a script file or inline script must be specified."))
} else if len(p.config.Scripts) > 0 && p.config.Inline != nil {
errs = packer.MultiErrorAppend(errs,
errors.New("Only a script file or an inline script can be specified, not both."))
}
for _, path := range p.config.Scripts {
if _, err := os.Stat(path); err != nil {
errs = packer.MultiErrorAppend(errs,
fmt.Errorf("Bad script '%s': %s", path, err))
}
}
// Do a check for bad environment variables, such as '=foo', 'foobar'
for idx, kv := range p.config.Vars {
vs := strings.SplitN(kv, "=", 2)
if len(vs) != 2 || vs[0] == "" {
errs = packer.MultiErrorAppend(errs,
fmt.Errorf("Environment variable not in format 'key=value': %s", kv))
} else {
// Replace single quotes so they parse
vs[1] = strings.Replace(vs[1], "'", `'"'"'`, -1)
// Single quote env var values
p.config.Vars[idx] = fmt.Sprintf("%s='%s'", vs[0], vs[1])
}
}
if p.config.RawStartRetryTimeout != "" {
p.config.startRetryTimeout, err = time.ParseDuration(p.config.RawStartRetryTimeout)
if err != nil {
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("Failed parsing start_retry_timeout: %s", err))
}
}
if errs != nil && len(errs.Errors) > 0 {
return errs
}
return nil
}
func (p *Provisioner) Provision(ui packer.Ui, comm packer.Communicator) error {
scripts := make([]string, len(p.config.Scripts))
copy(scripts, p.config.Scripts)
// If we have an inline script, then turn that into a temporary
// shell script and use that.
if p.config.Inline != nil {
tf, err := ioutil.TempFile("", "packer-shell")
if err != nil {
return fmt.Errorf("Error preparing shell script: %s", err)
}
defer os.Remove(tf.Name())
// Set the path to the temporary file
scripts = append(scripts, tf.Name())
// Write our contents to it
writer := bufio.NewWriter(tf)
writer.WriteString(fmt.Sprintf("#!%s\n", p.config.InlineShebang))
for _, command := range p.config.Inline {
if _, err := writer.WriteString(command + "\n"); err != nil {
return fmt.Errorf("Error preparing shell script: %s", err)
}
}
if err := writer.Flush(); err != nil {
return fmt.Errorf("Error preparing shell script: %s", err)
}
tf.Close()
}
// Build our variables up by adding in the build name and builder type
envVars := make([]string, len(p.config.Vars)+2)
envVars[0] = fmt.Sprintf("PACKER_BUILD_NAME='%s'", p.config.PackerBuildName)
envVars[1] = fmt.Sprintf("PACKER_BUILDER_TYPE='%s'", p.config.PackerBuilderType)
copy(envVars[2:], p.config.Vars)
for _, path := range scripts {
ui.Say(fmt.Sprintf("Provisioning with shell script: %s", path))
log.Printf("Opening %s for reading", path)
f, err := os.Open(path)
if err != nil {
return fmt.Errorf("Error opening shell script: %s", err)
}
defer f.Close()
// Flatten the environment variables
flattendVars := strings.Join(envVars, " ")
// Compile the command
p.config.ctx.Data = &ExecuteCommandTemplate{
Vars: flattendVars,
Path: p.config.RemotePath,
}
command, err := interpolate.Render(p.config.ExecuteCommand, &p.config.ctx)
if err != nil {
return fmt.Errorf("Error processing command: %s", err)
}
// Upload the file and run the command. Do this in the context of
// a single retryable function so that we don't end up with
// the case that the upload succeeded, a restart is initiated,
// and then the command is executed but the file doesn't exist
// any longer.
var cmd *packer.RemoteCmd
err = p.retryable(func() error {
if _, err := f.Seek(0, 0); err != nil {
return err
}
var r io.Reader = f
if !p.config.Binary {
r = &UnixReader{Reader: r}
}
if err := comm.Upload(p.config.RemotePath, r, nil); err != nil {
return fmt.Errorf("Error uploading script: %s", err)
}
cmd = &packer.RemoteCmd{
Command: fmt.Sprintf("chmod 0755 %s", p.config.RemotePath),
}
if err := comm.Start(cmd); err != nil {
return fmt.Errorf(
"Error chmodding script file to 0755 in remote "+
"machine: %s", err)
}
cmd.Wait()
cmd = &packer.RemoteCmd{Command: command}
return cmd.StartWithUi(comm, ui)
})
if err != nil {
return err
}
if cmd.ExitStatus != 0 {
return fmt.Errorf("Script exited with non-zero exit status: %d", cmd.ExitStatus)
}
// Delete the temporary file we created. We retry this a few times
// since if the above rebooted we have to wait until the reboot
// completes.
err = p.retryable(func() error {
cmd = &packer.RemoteCmd{
Command: fmt.Sprintf("rm -f %s", p.config.RemotePath),
}
if err := comm.Start(cmd); err != nil {
return fmt.Errorf(
"Error removing temporary script at %s: %s",
p.config.RemotePath, err)<|fim▁hole|> cmd.Wait()
return nil
})
if err != nil {
return err
}
if cmd.ExitStatus != 0 {
return fmt.Errorf(
"Error removing temporary script at %s!",
p.config.RemotePath)
}
}
return nil
}
func (p *Provisioner) Cancel() {
// Just hard quit. It isn't a big deal if what we're doing keeps
// running on the other side.
os.Exit(0)
}
// retryable will retry the given function over and over until a
// non-error is returned.
func (p *Provisioner) retryable(f func() error) error {
startTimeout := time.After(p.config.startRetryTimeout)
for {
var err error
if err = f(); err == nil {
return nil
}
// Create an error and log it
err = fmt.Errorf("Retryable error: %s", err)
log.Printf(err.Error())
// Check if we timed out, otherwise we retry. It is safe to
// retry since the only error case above is if the command
// failed to START.
select {
case <-startTimeout:
return err
default:
time.Sleep(2 * time.Second)
}
}
}
func init() {
rand.Seed(time.Now().UnixNano())
}
func randomScriptName() string {
return fmt.Sprintf("/tmp/script_%d.sh", rand.Intn(9999999))
}<|fim▁end|> | } |
<|file_name|>graphics.cpp<|end_file_name|><|fim▁begin|>#include <string>
#include <vector>
#include <stdio.h>
#include "graphics.h"
#include "engine.h"
#include "Lib/BufferedFile.h"
#include "Lib/getvar.h"
using namespace std;
using namespace sf;
int GraphicResources::GetGraphicCount()
{
return Graphics.size() - 1;
}
GraphicData& GraphicResources::GetGraphic(int Index)
{
return Graphics[Index];
}
void GraphicResources::LoadAll()
{
LoadGraphicData();
LoadTextures();
LoadAnimations();
}
void GraphicResources::LoadTextures()
{
int GraphicCount = GetGraphicCount();
for (int I = 1; I <= GraphicCount; I++)
{
GraphicData& Graphic = GetGraphic(I);
if (Textures.count(Graphic.FileID) == 0)
{
Textures[Graphic.FileID] = Texture();
Textures[Graphic.FileID].loadFromFile("Resources/Graphics/" + to_string(Graphic.FileID) + ".png");
}
}
}
int GraphicResources::GetAnimation(int BodyPart, int AnimIndex, int Heading)
{
return Animations[BodyPart][AnimIndex][Heading];
}
void GraphicResources::LoadGraphicData()
{
BufferedFile File = BufferedFile("Resources/Graficos.ind", "rb");
int FileVersion = File.Read(&FileVersion);
unsigned int GraphicCount = File.Read(&GraphicCount);
printf("GrhCount %u\n", GraphicCount);
Graphics.resize(GraphicCount + 1);
while(true)
{
GraphicData Graphic;
unsigned int Index = File.Read(&Index);
if (Index != 0)
{
Graphic.FrameCount = File.Read(&Graphic.FrameCount);
if (Graphic.FrameCount > 1)
{
for (int I = 0; I < Graphic.FrameCount; I++)
{
Graphic.Frames[I] = File.Read(&Graphic.Frames[I]);
if (Graphic.Frames[I] <= 0 || Graphic.Frames[I] > GraphicCount)
return;
}
Graphic.Speed = File.Read(&Graphic.Speed);
if (Graphic.Speed <= 0)
return;
Graphic.StartX = Graphics[Graphic.Frames[0]].StartX;
if (Graphic.StartX < 0)
return;
Graphic.StartY = Graphics[Graphic.Frames[0]].StartY;
if (Graphic.StartY < 0)
return;
Graphic.TileWidth = Graphics[Graphic.Frames[0]].TileWidth;
Graphic.TileHeight = Graphics[Graphic.Frames[0]].TileHeight;
}
else
{
Graphic.FileID = File.Read(&Graphic.FileID);
if (Graphic.FileID <= 0)
return;
Graphic.StartX = File.Read(&Graphic.StartX);
if (Graphic.StartX < 0)
return;
Graphic.StartY = File.Read(&Graphic.StartY);
if (Graphic.StartY < 0)
return;
Graphic.PixelWidth = File.Read(&Graphic.PixelWidth);
if (Graphic.PixelWidth <= 0)
return;
Graphic.PixelHeight = File.Read(&Graphic.PixelHeight);
if (Graphic.PixelHeight <= 0)
return;
Graphic.TileWidth = Graphic.PixelWidth / 32;
Graphic.TileHeight = Graphic.PixelHeight / 32;
Graphic.Frames[0] = Index;
}
Graphics[Index] = Graphic;
}<|fim▁hole|> }
}
void GraphicResources::LoadAnimations()
{
printf("Loading animations\n");
Animations.resize(5);
int Count = GetVarInt("Resources/Cabezas.ini", "INIT", "NumHeads");
printf("Head count: %u\n", Count);
for (int I = 0; I < Count; ++I)
{
vector<int> Anim;
Anim.push_back(GetVarInt("Resources/Cabezas.ini", "HEAD" + to_string(I), "Head1"));
Anim.push_back(GetVarInt("Resources/Cabezas.ini", "HEAD" + to_string(I), "Head2"));
Anim.push_back(GetVarInt("Resources/Cabezas.ini", "HEAD" + to_string(I), "Head3"));
Anim.push_back(GetVarInt("Resources/Cabezas.ini", "HEAD" + to_string(I), "Head4"));
Animations[0].push_back(Anim);
}
Count = GetVarInt("Resources/Cuerpos.ini", "INIT", "NumBodies");
printf("Torso count: %u\n", Count);
for (int I = 0; I < Count; ++I)
{
vector<int> Anim;
Anim.push_back(GetVarInt("Resources/Cuerpos.ini", "BODY" + to_string(I), "WALK1"));
Anim.push_back(GetVarInt("Resources/Cuerpos.ini", "BODY" + to_string(I), "WALK2"));
Anim.push_back(GetVarInt("Resources/Cuerpos.ini", "BODY" + to_string(I), "WALK3"));
Anim.push_back(GetVarInt("Resources/Cuerpos.ini", "BODY" + to_string(I), "WALK4"));
Animations[1].push_back(Anim);
}
Count = GetVarInt("Resources/Cascos.ini", "INIT", "NumCascos");
printf("Helmet count: %u\n", Count);
for (int I = 0; I < Count; ++I)
{
vector<int> Anim;
Anim.push_back(GetVarInt("Resources/Cascos.ini", "CASCO" + to_string(I), "Head1"));
Anim.push_back(GetVarInt("Resources/Cascos.ini", "CASCO" + to_string(I), "Head2"));
Anim.push_back(GetVarInt("Resources/Cascos.ini", "CASCO" + to_string(I), "Head3"));
Anim.push_back(GetVarInt("Resources/Cascos.ini", "CASCO" + to_string(I), "Head4"));
Animations[2].push_back(Anim);
}
Count = GetVarInt("Resources/Armas.ini", "INIT", "NumArmas");
printf("Weapon count: %u\n", Count);
for (int I = 0; I < Count; ++I)
{
vector<int> Anim;
Anim.push_back(GetVarInt("Resources/Armas.ini", "Arma" + to_string(I), "Dir1"));
Anim.push_back(GetVarInt("Resources/Armas.ini", "Arma" + to_string(I), "Dir2"));
Anim.push_back(GetVarInt("Resources/Armas.ini", "Arma" + to_string(I), "Dir3"));
Anim.push_back(GetVarInt("Resources/Armas.ini", "Arma" + to_string(I), "Dir4"));
Animations[3].push_back(Anim);
}
Count = GetVarInt("Resources/Escudos.ini", "INIT", "NumEscudos");
printf("Shield count: %u\n", Count);
for (int I = 0; I < Count; ++I)
{
vector<int> Anim;
Anim.push_back(GetVarInt("Resources/Escudos.ini", "ESC" + to_string(I), "Dir1"));
Anim.push_back(GetVarInt("Resources/Escudos.ini", "ESC" + to_string(I), "Dir2"));
Anim.push_back(GetVarInt("Resources/Escudos.ini", "ESC" + to_string(I), "Dir3"));
Anim.push_back(GetVarInt("Resources/Escudos.ini", "ESC" + to_string(I), "Dir4"));
Animations[4].push_back(Anim);
}
}
Sprite GraphicResources::GenerateSpriteFromGrhIndex(int GrhIndex, bool Centered)
{
Sprite Sprite;
GraphicData& Frame = GetGraphic(GetGraphic(GrhIndex).Frames[0]);
Sprite.setTexture(Textures[Frame.FileID]);
Sprite.setTextureRect(IntRect(Frame.StartX, Frame.StartY, Frame.PixelWidth, Frame.PixelHeight));
if (Centered)
{
int OriginX = (int)(((int)Frame.TileWidth) * 16) + 16;
int OriginY = (int)((((int)Frame.TileHeight) * 32) + 32);
Sprite.setOrigin(OriginX, OriginY);
}
return Sprite;
}<|fim▁end|> |
if (Index == GraphicCount)
break; |
<|file_name|>engine.py<|end_file_name|><|fim▁begin|>## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2014, 2015 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
"""BibMatch - tool to match records with database content of an Invenio instance,
either locally or remotely through invenio_connector."""
__revision__ = "$Id$"
import sys
if sys.hexversion < 0x2040000:
# pylint: disable=W0622
from sets import Set as set #for "&" intersection
# pylint: enable=W0622
import string
import os
import getopt
import re
import getpass
from six import iteritems
from tempfile import mkstemp
from time import sleep
from invenio.config import CFG_SITE_SECURE_URL, CFG_BIBMATCH_FUZZY_WORDLIMITS, \
CFG_BIBMATCH_QUERY_TEMPLATES, \
CFG_BIBMATCH_FUZZY_EMPTY_RESULT_LIMIT, \
CFG_BIBMATCH_LOCAL_SLEEPTIME, \
CFG_BIBMATCH_REMOTE_SLEEPTIME, \
CFG_SITE_RECORD, \
CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT
from invenio.legacy.bibmatch.config import CFG_BIBMATCH_LOGGER, \
CFG_LOGFILE
from invenio_client import InvenioConnector, \
InvenioConnectorAuthError
from invenio.legacy.bibrecord import create_records, \
record_get_field_values, record_xml_output, record_modify_controlfield, \
record_has_field, record_add_field
from invenio.legacy.bibconvert import api as bibconvert
from invenio.legacy.search_engine import get_fieldcodes, \
re_pattern_single_quotes, \
re_pattern_double_quotes, \
re_pattern_regexp_quotes, \
re_pattern_spaces_after_colon
from invenio.legacy.search_engine.query_parser import SearchQueryParenthesisedParser
from invenio.legacy.dbquery import run_sql
from invenio.legacy.bibrecord.textmarc2xmlmarc import transform_file
from invenio.legacy.bibmatch.validator import validate_matches, transform_record_to_marc, \
validate_tag, BibMatchValidationError
from invenio.utils.text import translate_to_ascii, xml_entities_to_utf8
try:
from six import StringIO
except ImportError:
from StringIO import StringIO
re_querystring = re.compile("\s?([^\s$]*)\[(.+?)\]([^\s$]*).*?", re.DOTALL)
def usage():
"""Print help"""
print(""" BibMatch - match bibliographic data against database, either locally or remotely
Usage: %s [options] [QUERY]
Options:
Output:
-0 --print-new (default) print unmatched in stdout
-1 --print-match print matched records in stdout
-2 --print-ambiguous print records that match more than 1 existing records
-3 --print-fuzzy print records that match the longest words in existing records
-b --batch-output=(filename). filename.new will be new records, filename.matched will be matched,
filename.ambiguous will be ambiguous, filename.fuzzy will be fuzzy match
-t --text-marc-output transform the output to text-marc format instead of the default MARCXML
Simple query:
-q --query-string=(search-query/predefined-query) See "Querystring"-section below.
-f --field=(field)
General options:
-n --noprocess Do not print records in stdout.
-i, --input use a named file instead of stdin for input
-v, --verbose=LEVEL verbose level (from 0 to 9, default 1)
-r, --remote=URL match against a remote Invenio installation (Full URL, no trailing '/')
Beware: Only searches public records attached to home collection
-a, --alter-recid The recid (controlfield 001) of matched or fuzzy matched records in
output will be replaced by the 001 value of the matched record.
Note: Useful if you want to replace matched records using BibUpload.
-z, --clean clean queries before searching
--no-validation do not perform post-match validation
-h, --help print this help and exit
-V, --version print version information and exit
Advanced options:
-m --mode=(a|e|o|p|r) perform an advanced search using special search mode.
Where mode is:
"a" all of the words,
"o" any of the words,
"e" exact phrase,
"p" partial phrase,
"r" regular expression.
-o --operator(a|o) used to concatenate identical fields in search query (i.e. several report-numbers)
Where operator is:
"a" boolean AND (default)
"o" boolean OR
-c --config=filename load querystrings from a config file. Each line starting with QRYSTR will
be added as a query. i.e. QRYSTR --- [title] [author]
-x --collection only perform queries in certain collection(s).
Note: matching against restricted collections requires authentication.
--user=USERNAME username to use when connecting to Invenio instance. Useful when searching
restricted collections. You will be prompted for password.
QUERYSTRINGS
Querystrings determine which type of query/strategy to use when searching for the
matching records in the database.
Predefined querystrings:
There are some predefined querystrings available:
title - standard title search. (i.e. "this is a title") (default)
title-author - title and author search (i.e. "this is a title AND Lastname, F")
reportnumber - reportnumber search (i.e. reportnumber:REP-NO-123).
You can also add your own predefined querystrings inside invenio.conf file.
You can structure your query in different ways:
* Old-style: fieldnames separated by '||' (conforms with earlier BibMatch versions):
-q "773__p||100__a"
* New-style: Invenio query syntax with "bracket syntax":
-q "773__p:\"[773__p]\" 100__a:[100__a]"
Depending on the structure of the query, it will fetch associated values from each record and put it into
the final search query. i.e in the above example it will put journal-title from 773__p.
When more then one value/datafield is found, i.e. when looking for 700__a (additional authors),
several queries will be put together to make sure all combinations of values are accounted for.
The queries are separated with given operator (-o, --operator) value.
Note: You can add more then one query to a search, just give more (-q, --query-string) arguments.
The results of all queries will be combined when matching.
BibConvert formats:
Another option to further improve your matching strategy is to use BibConvert formats. By using the formats
available by BibConvert you can change the values from the retrieved record-fields.
i.e. using WORDS(1,R) will only return the first (1) word from the right (R). This can be very useful when
adjusting your matching parameters to better match the content. For example only getting authors last-name
instead of full-name.
You can use these formats directly in the querystrings (indicated by '::'):
* Old-style: -q "100__a::WORDS(1,R)::DOWN()"
This query will take first word from the right from 100__a and also convert it to lower-case.
* New-style: -q "100__a:[100__a::WORDS(1,R)::DOWN()]"
See BibConvert documentation for a more detailed explanation of formats.
Predefined fields:
In addition to specifying distinct MARC fields in the querystrings you can use predefined
fields as configured in the LOCAL(!) Invenio system. These fields will then be mapped to one
or more fieldtags to be retrieved from input records.
Common predefined fields used in querystrings: (for Invenio demo site, your fields may vary!)
'abstract', 'affiliation', 'anyfield', 'author', 'coden', 'collaboration',
'collection', 'datecreated', 'datemodified', 'division', 'exactauthor', 'exactfirstauthor',
'experiment', 'fulltext', 'isbn', 'issn', 'journal', 'keyword', 'recid',
'reference', 'reportnumber', 'subject', 'title', 'year'
Examples:
$ bibmatch [options] < input.xml > unmatched.xml
$ bibmatch -b out -n < input.xml
$ bibmatch -a -1 < input.xml > modified_match.xml
$ bibmatch --field=title < input.xml
$ bibmatch --field=245__a --mode=a < input.xml
$ bibmatch --print-ambiguous -q title-author < input.xml > ambigmatched.xml
$ bibmatch -q "980:Thesis 773__p:\"[773__p]\" 100__a:[100__a]" -r "http://inspirebeta.net" < input.xml
$ bibmatch --collection 'Books,Articles' < input.xml
$ bibmatch --collection 'Theses' --user admin < input.xml
""" % (sys.argv[0],), file=sys.stderr)
sys.exit(1)
return
class Querystring:
"""
Holds the information about a querystring.
The object contains lists of fields, formats and queries which generates search queries.
self.fields is a dict of found field-data {"tag": [list of found record data]}
self.formats is a dict of found BibConvert formats {"tag": [list of found format-values]}
self.pattern contains the original search string
self.query contains the generated query
self.operator holds the current active operator, upper-case (OR/AND)
To populate the Querystring instance with values and search string structure,
call create_query(..) with BibRecord structure and a query-string to populate with retrieved values.
Example: The template "title:[245__a]" will retrieve the value from subfield 245__a in
given record. If any BibConvert formats are specified for this field, these will
be applied.
"""
def __init__(self, operator="AND", clean=False, ascii_mode=False):
"""
Creates Querystring instance.
@param operator: operator used to concatenate several queries
@type operator: str
@param clean: indicates if queries should be sanitized
@type clean: bool
"""
self.fields = {}
self.operator = operator.upper()
self.pattern = ""
self.query = ""
self.clean = clean
self.ascii_mode = ascii_mode
self.formats = {}
def create_query(self, record, qrystr="[title]"):
"""
Main method that parses and generates a search query from
given query-string structure and record data. Returns the
resulting query-string and completeness determination as a tuple.
A query is 'complete' when all found field references has a value
in the passed record. Should a value be missing, the query is
incomplete.
@param record: bibrecord to retrive field-values from
@type record: dict
@param qrystr: proper query string template. (i.e. title:[245__a])
defaults to: [title]
@type qrystr: str
@return: (query-string, complete flag)
@rtype: tuple
"""
if qrystr == "":
qrystr = "[title]"
if "||" in qrystr or not "[" in qrystr:
# Assume old style query-strings
qrystr = self._convert_qrystr(qrystr)
# FIXME: Convert to lower case, since fuzzy_parser
# which treats everything lower-case, and will cause problems when
# retrieving data from the self.fields dict.
# Also BibConvert formats are currently case sensitive, so we cannot
# force lower-case yet.
self.pattern = qrystr.lower()
self.fields = {}
# Extract referenced field-values from given record
complete, fieldtags_found = self._extract_fieldvalues(record, qrystr)
# If no field references are found, we exit as empty query.
if len(self.fields) == 0:
self.query = ""
return self.query, False
# Now we assemble the found values into a proper search query
all_queries = []
operator_delimiter = " %s " % (self.operator,)
if self.operator == "AND":
# We gather all the values from the self.fields and put them
# in a list together with any prefix/suffix associated with the field.
new_query = self.pattern
for (field_prefix, field_reference, field_suffix), value_list in iteritems(self.fields):
new_values = []
for value in value_list:
new_values.append("%s%s%s" % (field_prefix, value, field_suffix))
new_query = new_query.replace("%s[%s]%s" % (field_prefix, field_reference, field_suffix), \
operator_delimiter.join(set(new_values)))
all_queries = [new_query]
else:
# operator is OR, which means a more elaborate approach to multi-value fields
field_tuples = []
for key, values in iteritems(self.fields):
field_list = []
for value in values:
# We add key here to be able to associate the value later
field_list.append((key, value))
field_tuples.append(field_list)
# Grab all combinations of queries
query_tuples = cproduct(field_tuples)
for query in query_tuples:
new_query = self.pattern
for (field_prefix, field_reference, field_suffix), value in query:
new_query = new_query.replace("%s[%s]%s" % (field_prefix, field_reference, field_suffix), \
"%s%s%s" % (field_prefix, value, field_suffix))
all_queries.append(new_query)
# Finally we concatenate all unique queries into one, delimited by chosen operator
self.query = operator_delimiter.join(set(all_queries))
if not complete:
# Clean away any leftover field-name references from query
for fieldtag in fieldtags_found:
self.query = self.query.replace("%s" % (fieldtag,), "")
# Clean query?
if self.clean:
self._clean_query()
return self.query, complete
def fuzzy_queries(self):
"""
Returns a list of queries that are built more 'fuzzily' using the main query as base.
The list returned also contains the current operator in context, so each query is a tuple
of (operator, query).
@return: list of tuples [(operator, query), ..]
@rtype: list [(str, str), ..]
"""
fuzzy_query_list = []
operator_delimiter = " %s " % (self.operator,)
parser = SearchQueryParenthesisedParser()
query_parts = parser.parse_query(self.pattern)
author_query = []
author_operator = None
# Go through every expression in the query and generate fuzzy searches
for i in xrange(0, len(query_parts) - 1, 2):
current_operator = query_parts[i]
current_pattern = query_parts[i + 1]
fieldname_list = re_querystring.findall(current_pattern)
if fieldname_list == []:
# No reference to record value, add query 'as is'
fuzzy_query_list.append((current_operator, current_pattern))
else:
# Each reference will be split into prefix, field-ref and suffix.
# Example:
# 773__p:"[773__p]" 100__a:/.*[100__a].*/ =>
# [('773__p:"', '773__p', '"'), ('100__a:/.*', '100__a', '.*/')]
for field_prefix, field_reference, field_suffix in fieldname_list:
if field_reference == '245__a':
new_query = []
for value in self.fields.get((field_prefix, field_reference, field_suffix), []):
# Grab the x+1 longest words in the string and perform boolean OR
# for all combinations of x words (boolean AND)
# x is determined by the configuration dict and is tag-based. Defaults to 3 words
word_list = get_longest_words(value, limit=CFG_BIBMATCH_FUZZY_WORDLIMITS.get(field_reference, 3)+1)
for i in range(len(word_list)):
words = list(word_list)
words.pop(i)
new_query.append("(" + current_pattern.replace("[%s]" % (field_reference,), " ".join(words)) + ")")
fuzzy_query_list.append((current_operator, " OR ".join(new_query)))
elif field_reference == '100__a':
for value in self.fields.get((field_prefix, field_reference, field_suffix), []):
author_query.append(current_pattern.replace("[%s]" % (field_reference,), value))
author_operator = current_operator
elif field_reference == '700__a':
for value in self.fields.get((field_prefix, field_reference, field_suffix), []):
# take only the first 2nd author
author_query.append(current_pattern.replace("[%s]" % (field_reference,), value))
if not author_operator:
author_operator = current_operator
break
# for unique idenifier (DOI, repno) fuzzy search makes no sense
elif field_reference == '037__a':
continue
elif field_reference == '0247_a':
continue
else:
new_query = []
for value in self.fields.get((field_prefix, field_reference, field_suffix), []):
# Grab the x longest words in the string and perform boolean AND for each word
# x is determined by the configuration dict and is tag-based. Defaults to 3 words
# AND can be overwritten by command line argument -o o
word_list = get_longest_words(value, limit=CFG_BIBMATCH_FUZZY_WORDLIMITS.get(field_reference, 3))
for word in word_list:
# Create fuzzy query with key + word, including any surrounding elements like quotes, regexp etc.
new_query.append(current_pattern.replace("[%s]" % (field_reference,), word))
fuzzy_query_list.append((current_operator, operator_delimiter.join(new_query)))
if author_query:
fuzzy_query_list.append((author_operator, " OR ".join(author_query)))
# Return a list of unique queries
return list(set(fuzzy_query_list))
def _clean_query(self):
"""
This function will remove erroneous characters and combinations from
a the generated search query that might cause problems when searching.
@return: cleaned query
@rtype: str
"""
#FIXME: Extend cleaning to account for encodings and LaTeX symbols
query = self.query.replace("''", "")
query = query.replace('""', "")
return query
def _convert_qrystr(self, qrystr):
"""
Converts old-style query-strings into new-style.
"""
fields = qrystr.split("||")
converted_query = []
for field in fields:
converted_query.append("[%s]" % (field,))
return self.operator.join(converted_query)
def _extract_fieldvalues(self, record, qrystr):<|fim▁hole|> is complete, and a list of all field references found.
Field references is checked to be valid MARC tag references and all values
found are added to self.fields as a list, hashed by the full reference including
prefix and suffix.
If ascii_mode is enabled, the record values will be translated to its ascii
representation.
e.g. for the query-string: 700__a:"[700__a]"
{ ('700__a:"', '700__a', '"') : ["Ellis, J.", "Olive, K. A."]}
Should no values be found for a field references, the query will be flagged
as incomplete.
@param record: bibrecord to retrive field-values from
@type record: dict
@param qrystr: proper query string template. (i.e. title:[245__a])
defaults to: [title]
@type qrystr: str
@return: complete flag, [field references found]
@rtype: tuple
"""
complete = True
fieldtags_found = []
# Find all potential references to record tag values and
# add to fields-dict as a list of values using field-name tuple as key.
#
# Each reference will be split into prefix, field-ref and suffix.
# Example:
# 773__p:"[773__p]" 100__a:/.*[100__a].*/ =>
# [('773__p:"', '773__p', '"'), ('100__a:/.*', '100__a', '.*/')]
for field_prefix, field_reference, field_suffix in re_querystring.findall(qrystr):
# First we see if there is any special formats for this field_reference
# The returned value from _extract_formats is the field-name stripped from formats.
# e.g. 245__a::SUP(NUM) => 245__a
fieldname = self._extract_formats(field_reference)
# We need everything in lower-case
field_prefix = field_prefix.lower()
field_suffix = field_suffix.lower()
# Find proper MARC tag(s) for the stripped field-name, if fieldname is used.
# e.g. author -> [100__a, 700__a]
# FIXME: Local instance only!
tag_list = get_field_tags_from_fieldname(fieldname)
if len(tag_list) == 0:
tag_list = [fieldname]
for field in tag_list:
# Check if it is really a reference to a tag to not confuse with e.g. regex syntax
tag_structure = validate_tag(field)
if tag_structure != None:
tag, ind1, ind2, code = tag_structure
value_list = record_get_field_values(record, tag, ind1, ind2, code)
if len(value_list) > 0:
# Apply any BibConvert formatting functions to each value
updated_value_list = self._apply_formats(fieldname, value_list)
# Also remove any errornous XML entities. I.e. & -> &
updated_value_list = [xml_entities_to_utf8(v, skip=[]) \
for v in updated_value_list]
if self.ascii_mode:
updated_value_list = translate_to_ascii(updated_value_list)
# Store found values linked to full field reference tuple including
# (prefix, field, suffix)
self.fields[(field_prefix,
fieldname,
field_suffix)] = updated_value_list
else:
# No values found. The query is deemed incomplete
complete = False
fieldtags_found.append("%s[%s]%s" % (field_prefix, fieldname, field_suffix))
return complete, fieldtags_found
def _extract_formats(self, field_reference):
"""
Looks for BibConvert formats within query-strings and adds to
the instance. Formats are defined by one or more '::' followed
by a format keyword which is defined in BibConvert FormatField()
method.
The function also removes the references to formatting functions
in the query (self.pattern)
Returns the field_reference reference, with formats stripped.
"""
field_parts = field_reference.split("::")
if len(field_parts) > 1:
# Remove any references to BibConvert functions in pattern. e.g. 245__a::SUP(PUNCT, ) -> 245__a
# self.pattern is lower cased. Returned value is field-name stripped from formats.
for aformat in field_parts[1:]:
self.formats.setdefault(field_parts[0], []).append(aformat)
self.pattern = self.pattern.replace("[%s]" % (field_reference.lower(),), "[%s]" % (field_parts[0],))
return field_parts[0]
def _apply_formats(self, fieldname, value_list):
"""
Apply the current stored BibConvert formating operations for a
field-name to the given list of strings. The list is then returned.
@param fieldname: name of field - used as key in the formats dict
@type fieldname: string
@param value_list: list of strings to apply formats to
@type value_list: list
@return: list of values with formatting functions applied
@rtype: list
"""
if fieldname in self.formats:
new_list = []
for value in value_list:
if value.strip() != "":
# Apply BibConvert formats if applicable
for aformat in self.formats[fieldname]:
value = bibconvert.FormatField(value, aformat)
new_list.append(value)
return new_list
else:
return value_list
def get_field_tags_from_fieldname(field):
"""
Gets list of field 'field' for the record with 'sysno' system number from the database.
"""
query = "select tag.value from tag left join field_tag on tag.id=field_tag.id_tag " \
+ "left join field on field_tag.id_field=field.id where field.code='%s'" % (field,)
out = []
res = run_sql(query)
for row in res:
out.append(row[0])
return out
def cproduct(args):
"""
Returns the Cartesian product of passed arguments as a list of tuples.
'12','34' -> ('1', '3'), ('1', '4'), ('2', '3'), ('2', '4')
@param args: iterable with elements to compute
@type args: iterable
@return list containing tuples for each computed combination
@rtype list of tuples
Based on http://docs.python.org/library/itertools.html#itertools.product
"""
values = map(tuple, args)
result = [[]]
for value in values:
result = [x + [y] for x in result for y in value]
return [tuple(res) for res in result]
def bylen(word1, word2):
""" Sort comparison method that compares by length """
return len(word1) - len(word2)
def get_longest_words(wstr, limit=5):
"""
Select the longest words for matching. It selects the longest words from
the string, according to a given limit of words. By default the 5 longest word are selected
@param wstr: string to extract the longest words from
@type wstr: str
@param limit: maximum number of words extracted
@type limit: int
@return: list of long words
@rtype: list
"""
words = []
if wstr:
# Protect spaces within quotes
wstr = re_pattern_single_quotes.sub(
lambda x: "'" + string.replace(x.group(1), ' ', '__SPACE__') + "'",
wstr)
wstr = re_pattern_double_quotes.sub(
lambda x: "\"" + string.replace(x.group(1), ' ', '__SPACE__') + "\"",
wstr)
wstr = re_pattern_regexp_quotes.sub(
lambda x: "/" + string.replace(x.group(1), ' ', '__SPACE__') + "/",
wstr)
# and spaces after colon as well:
wstr = re_pattern_spaces_after_colon.sub(
lambda x: string.replace(x.group(1), ' ', '__SPACE__'),
wstr)
words = wstr.split()
for i in range(len(words)):
words[i] = words[i].replace('__SPACE__', ' ')
words.sort(cmp=bylen)
words.reverse()
words = words[:limit]
return words
def add_recid(record, recid):
"""
Add a given record-id to the record as $$001 controlfield. If an 001 field already
exists it will be replaced.
@param record: the record to retrive field-values from
@type record: a bibrecord instance
@param recid: record-id to be added
@type recid: int
"""
if record_has_field(record, '001'):
record_modify_controlfield(record, '001', \
controlfield_value=str(recid), \
field_position_global=1)
else:
record_add_field(record, '001', controlfield_value=str(recid))
def match_result_output(bibmatch_recid, recID_list, server_url, query, matchmode="no match"):
"""
Generates result as XML comments from passed record and matching parameters.
@param bibmatch_recid: BibMatch record identifier
@type bibmatch_recid: int
@param recID_list: record matched with record
@type recID_list: list
@param server_url: url to the server the matching has been performed
@type server_url: str
@param query: matching query
@type query: str
@param matchmode: matching type
@type matchmode: str
@rtype str
@return XML result string
"""
result = ["<!-- BibMatch-Matching-Results: -->", \
"<!-- BibMatch-Matching-Record-Identifier: %s -->" % (bibmatch_recid,)]
for recID in recID_list:
result.append("<!-- BibMatch-Matching-Found: %s/%s/%s -->" \
% (server_url, CFG_SITE_RECORD, recID))
result.append("<!-- BibMatch-Matching-Mode: %s -->" \
% (matchmode,))
result.append("<!-- BibMatch-Matching-Criteria: %s -->" \
% (query,))
return "\n".join(result)
def match_records(records, qrystrs=None, search_mode=None, operator="and", \
verbose=1, server_url=CFG_SITE_SECURE_URL, modify=0, \
sleeptime=CFG_BIBMATCH_LOCAL_SLEEPTIME, \
clean=False, collections=[], user="", password="", \
fuzzy=True, validate=True, ascii_mode=False,
insecure_login=False):
"""
Match passed records with existing records on a local or remote Invenio
installation. Returns which records are new (no match), which are matched,
which are ambiguous and which are fuzzy-matched. A formatted result of each
records matching are appended to each record tuple:
(record, status_code, list_of_errors, result)
@param records: records to analyze
@type records: list of records
@param qrystrs: list of tuples (field, querystring)
@type qrystrs: list
@param search_mode: if mode is given, the search will perform an advanced
query using the desired mode. Otherwise 'simple search'
is used.
@type search_mode: str
@param operator: operator used to concatenate values of fields occurring more then once.
Valid types are: AND, OR. Defaults to AND.
@type operator: str
@param verbose: be loud
@type verbose: int
@param server_url: which server to search on. Local installation by default
@type server_url: str
@param modify: output modified records of matches
@type modify: int
@param sleeptime: amount of time to wait between each query
@type sleeptime: float
@param clean: should the search queries be cleaned before passed them along?
@type clean: bool
@param collections: list of collections to search, if specified
@type collections: list
@param user: username in case of authenticated search requests
@type user: string
@param password: password in case of authenticated search requests
@type password: string
@param fuzzy: True to activate fuzzy query matching step
@type fuzzy: bool
@param validate: True to activate match validation
@type validate: bool
@param ascii_mode: True to transform values to its ascii representation
@type ascii_mode: bool
@rtype: list of lists
@return an array of arrays of records, like this [newrecs,matchedrecs,
ambiguousrecs,fuzzyrecs]
"""
newrecs = []
matchedrecs = []
ambiguousrecs = []
fuzzyrecs = []
CFG_BIBMATCH_LOGGER.info("-- BibMatch starting match of %d records --" % (len(records),))
try:
server = InvenioConnector(server_url, user=user, password=password,
insecure_login=insecure_login)
except InvenioConnectorAuthError as error:
if verbose > 0:
sys.stderr.write("Authentication error when connecting to server: %s" \
% (str(error),))
CFG_BIBMATCH_LOGGER.info("-- BibMatch ending match with errors (AuthError) --")
return [newrecs, matchedrecs, ambiguousrecs, fuzzyrecs]
## Go through each record and try to find matches using defined querystrings
record_counter = 0
for record in records:
record_counter += 1
if (verbose > 1):
sys.stderr.write("\n Processing record: #%d .." % (record_counter,))
# At least one (field, querystring) tuple is needed for default search query
if not qrystrs:
qrystrs = [("", "")]
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Started" % (record_counter,))
[matched_results, ambiguous_results, fuzzy_results] = match_record(bibmatch_recid=record_counter,
record=record[0],
server=server,
qrystrs=qrystrs,
search_mode=search_mode,
operator=operator,
verbose=verbose,
sleeptime=sleeptime,
clean=clean,
collections=collections,
fuzzy=fuzzy,
validate=validate,
ascii_mode=ascii_mode)
## Evaluate final results for record
# Add matched record iff number found is equal to one, otherwise return fuzzy,
# ambiguous or no match
if len(matched_results) == 1:
results, query = matched_results[0]
# If one match, add it as exact match, otherwise ambiguous
if len(results) == 1:
if modify:
add_recid(record[0], results[0])
matchedrecs.append((record[0], match_result_output(record_counter, results, server_url, \
query, "exact-matched")))
if (verbose > 1):
sys.stderr.write("Final result: match - %s/record/%s\n" % (server_url, str(results[0])))
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'match'" % (record_counter,))
else:
ambiguousrecs.append((record[0], match_result_output(record_counter, results, server_url, \
query, "ambiguous-matched")))
if (verbose > 1):
sys.stderr.write("Final result: ambiguous\n")
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'ambiguous'" % (record_counter,))
else:
if len(fuzzy_results) > 0:
# Find common record-id for all fuzzy results and grab first query
# as "representative" query
query = fuzzy_results[0][1]
result_lists = []
for res, dummy in fuzzy_results:
result_lists.extend(res)
results = set([res for res in result_lists])
if len(results) == 1:
fuzzyrecs.append((record[0], match_result_output(record_counter, results, server_url, \
query, "fuzzy-matched")))
if (verbose > 1):
sys.stderr.write("Final result: fuzzy\n")
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'fuzzy'" % (record_counter,))
else:
ambiguousrecs.append((record[0], match_result_output(record_counter, results, server_url, \
query, "ambiguous-matched")))
if (verbose > 1):
sys.stderr.write("Final result: ambiguous\n")
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'ambiguous'" % (record_counter,))
elif len(ambiguous_results) > 0:
# Find common record-id for all ambiguous results and grab first query
# as "representative" query
query = ambiguous_results[0][1]
result_lists = []
for res, dummy in ambiguous_results:
result_lists.extend(res)
results = set([res for res in result_lists])
ambiguousrecs.append((record[0], match_result_output(record_counter, results, server_url, \
query, "ambiguous-matched")))
if (verbose > 1):
sys.stderr.write("Final result: ambiguous\n")
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'ambiguous'" % (record_counter,))
else:
newrecs.append((record[0], match_result_output(record_counter, [], server_url, str(qrystrs))))
if (verbose > 1):
sys.stderr.write("Final result: new\n")
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Completed as 'new'" % (record_counter,))
CFG_BIBMATCH_LOGGER.info("-- BibMatch ending match: New(%d), Matched(%d), Ambiguous(%d), Fuzzy(%d) --" % \
(len(newrecs), len(matchedrecs), len(ambiguousrecs), len(fuzzyrecs)))
return [newrecs, matchedrecs, ambiguousrecs, fuzzyrecs]
def match_record(bibmatch_recid, record, server, qrystrs=None, search_mode=None, operator="and", \
verbose=1, sleeptime=CFG_BIBMATCH_LOCAL_SLEEPTIME, \
clean=False, collections=[], fuzzy=True, validate=True, \
ascii_mode=False):
"""
Matches a single record.
@param bibmatch_recid: Current record number. Used for logging.
@type bibmatch_recid: int
@param record: record to match in BibRecord structure
@type record: dict
@param server: InvenioConnector server object
@type server: object
@param qrystrs: list of tuples (field, querystring)
@type qrystrs: list
@param search_mode: if mode is given, the search will perform an advanced
query using the desired mode. Otherwise 'simple search'
is used.
@type search_mode: str
@param operator: operator used to concatenate values of fields occurring more then once.
Valid types are: AND, OR. Defaults to AND.
@type operator: str
@param verbose: be loud
@type verbose: int
@param server_url: which server to search on. Local installation by default
@type server_url: str
@param sleeptime: amount of time to wait between each query
@type sleeptime: float
@param clean: should the search queries be cleaned before passed them along?
@type clean: bool
@param collections: list of collections to search, if specified
@type collections: list
@param fuzzy: True to activate fuzzy query matching step
@type fuzzy: bool
@param validate: True to activate match validation
@type validate: bool
@param ascii_mode: True to transform values to its ascii representation
@type ascii_mode: bool
"""
matched_results = []
ambiguous_results = []
fuzzy_results = []
# Keep a list of generated querystring objects for later use in fuzzy match
query_list = []
# Go through each querystring, trying to find a matching record
# Stops on first valid match, if no exact-match we continue with fuzzy match
for field, qrystr in qrystrs:
querystring = Querystring(operator, clean=clean, ascii_mode=ascii_mode)
query, complete = querystring.create_query(record, qrystr)
if query == "":
if (verbose > 1):
sys.stderr.write("\nEmpty query. Skipping...\n")
# Empty query, no point searching database
continue
query_list.append((querystring, complete, field))
if not complete:
if (verbose > 1):
sys.stderr.write("\nQuery not complete. Flagged as uncertain/ambiguous...\n")
# Determine proper search parameters
if search_mode != None:
search_params = dict(p1=query, f1=field, m1=search_mode, of='id', c=collections)
else:
search_params = dict(p=query, f=field, of='id', c=collections)
if (verbose > 8):
sys.stderr.write("\nSearching with values %s\n" %
(search_params,))
CFG_BIBMATCH_LOGGER.info("Searching with values %s" % (search_params,))
## Perform the search with retries
try:
result_recids = server.search_with_retry(**search_params)
except InvenioConnectorAuthError as error:
if verbose > 0:
sys.stderr.write("Authentication error when searching: %s" \
% (str(error),))
break
sleep(sleeptime)
## Check results:
if len(result_recids) > 0:
# Matches detected
CFG_BIBMATCH_LOGGER.info("Results: %s" % (result_recids[:15],))
if len(result_recids) > CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT:
# Too many matches, treat as non-match
if (verbose > 8):
sys.stderr.write("result=More then %d results...\n" % \
(CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT,))
continue
if (verbose > 8):
sys.stderr.write("result=%s\n" % (result_recids,))
if validate:
# Validation can be run
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Query (%s) found %d records: %s" % \
(bibmatch_recid,
query,
len(result_recids),
str(result_recids)))
exact_matches = []
fuzzy_matches = []
try:
exact_matches, fuzzy_matches = validate_matches(bibmatch_recid=bibmatch_recid, \
record=record, \
server=server, \
result_recids=result_recids, \
collections=collections, \
verbose=verbose, \
ascii_mode=ascii_mode)
except BibMatchValidationError, e:
sys.stderr.write("ERROR: %s\n" % (str(e),))
if len(exact_matches) > 0:
if (verbose > 8):
sys.stderr.write("Match validated\n")
matched_results.append((exact_matches, query))
break
elif len(fuzzy_matches) > 0:
if (verbose > 8):
sys.stderr.write("Match validated fuzzily\n")
fuzzy_results.append((fuzzy_matches, query))
continue
else:
if (verbose > 8):
sys.stderr.write("Match could not be validated\n")
else:
# No validation
# Ambiguous match
if len(result_recids) > 1:
ambiguous_results.append((result_recids, query))
if (verbose > 8):
sys.stderr.write("Ambiguous\n")
continue
# Match
elif len(result_recids) == 1:
if complete:
matched_results.append((result_recids, query))
if (verbose > 8):
sys.stderr.write("Match\n")
# This was a complete match, so let's break out to avoid more searching
break
else:
# We treat the result as ambiguous (uncertain) when query is not complete
# and we are not validating it.
ambiguous_results.append((result_recids, query))
if (verbose > 8):
sys.stderr.write("Ambiguous\n")
continue
# No match
if (verbose > 8):
sys.stderr.write("result=No matches\n")
# No complete matches, lets try fuzzy matching of all the queries
else:
if fuzzy:
if (verbose > 8):
sys.stderr.write("\nFuzzy query mode...\n")
## Fuzzy matching: Analyze all queries and perform individual searches, then intersect results.
for querystring, complete, field in query_list:
result_hitset = None
if (verbose > 8):
sys.stderr.write("\n Start new search ------------ \n")
fuzzy_query_list = querystring.fuzzy_queries()
empty_results = 0
# Go through every expression in the query and generate fuzzy searches
for current_operator, qry in fuzzy_query_list:
current_resultset = None
if qry == "":
if (verbose > 1):
sys.stderr.write("\nEmpty query. Skipping...\n")
# Empty query, no point searching database
continue
search_params = dict(p=qry, f=field, of='id', c=collections)
CFG_BIBMATCH_LOGGER.info("Fuzzy searching with values %s" % (search_params,))
try:
current_resultset = server.search_with_retry(**search_params)
except InvenioConnectorAuthError as error:
if (verbose > 0):
sys.stderr.write("Authentication error when searching: %s" \
% (str(error),))
break
CFG_BIBMATCH_LOGGER.info("Results: %s" % (current_resultset[:15],))
if (verbose > 8):
if len(current_resultset) > CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT:
sys.stderr.write("\nSearching with values %s result=%s\n" %
(search_params, "More then %d results..." % \
(CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT,)))
else:
sys.stderr.write("\nSearching with values %s result=%s\n" %
(search_params, current_resultset))
sleep(sleeptime)
if current_resultset == None:
continue
if current_resultset == [] and empty_results < CFG_BIBMATCH_FUZZY_EMPTY_RESULT_LIMIT:
# Allows some empty results
empty_results += 1
else:
# Intersect results with previous results depending on current operator
if result_hitset == None:
result_hitset = current_resultset
if current_operator == '+':
result_hitset = list(set(result_hitset) & set(current_resultset))
elif current_operator == '-':
result_hitset = list(set(result_hitset) - set(current_resultset))
elif current_operator == '|':
result_hitset = list(set(result_hitset) | set(current_resultset))
else:
# We did not hit a break in the for-loop: we were allowed to search.
if result_hitset and len(result_hitset) > CFG_BIBMATCH_SEARCH_RESULT_MATCH_LIMIT:
if (verbose > 1):
sys.stderr.write("\nToo many results... %d " % (len(result_hitset)))
elif result_hitset:
# This was a fuzzy match
query_out = " ".join(["%s %s" % (op, qu) for op, qu in fuzzy_query_list])
if validate:
# We can run validation
CFG_BIBMATCH_LOGGER.info("Matching of record %d: Fuzzy query (%s) found %d records: %s" % \
(bibmatch_recid,
query_out,
len(result_hitset),
str(result_hitset)))
exact_matches = []
fuzzy_matches = []
try:
exact_matches, fuzzy_matches = validate_matches(bibmatch_recid=bibmatch_recid, \
record=record, \
server=server, \
result_recids=result_hitset, \
collections=collections, \
verbose=verbose, \
ascii_mode=ascii_mode)
except BibMatchValidationError, e:
sys.stderr.write("ERROR: %s\n" % (str(e),))
if len(exact_matches) > 0:
if (verbose > 8):
sys.stderr.write("Match validated\n")
matched_results.append((exact_matches, query_out))
break
elif len(fuzzy_matches) > 0:
if (verbose > 8):
sys.stderr.write("Match validated fuzzily\n")
fuzzy_results.append((fuzzy_matches, query_out))
else:
if (verbose > 8):
sys.stderr.write("Match could not be validated\n")
else:
# No validation
if len(result_hitset) == 1 and complete:
fuzzy_results.append((result_hitset, query_out))
if (verbose > 8):
sys.stderr.write("Fuzzy: %s\n" % (result_hitset,))
else:
# We treat the result as ambiguous (uncertain) when:
# - query is not complete
# - more then one result
ambiguous_results.append((result_hitset, query_out))
if (verbose > 8):
sys.stderr.write("Ambiguous\n")
return [matched_results, ambiguous_results, fuzzy_results]
def transform_input_to_marcxml(filename=None, file_input=""):
"""
Takes the filename or input of text-marc and transforms it
to MARCXML.
"""
if not filename:
# Create temporary file to read from
tmp_fd, filename = mkstemp()
os.write(tmp_fd, file_input)
os.close(tmp_fd)
try:
# Redirect output, transform, restore old references
old_stdout = sys.stdout
new_stdout = StringIO()
sys.stdout = new_stdout
transform_file(filename)
finally:
sys.stdout = old_stdout
return new_stdout.getvalue()
def bibrecs_has_errors(bibrecs):
"""
Utility function to check a list of parsed BibRec objects, directly
from the output of bibrecord.create_records(), for any
badly parsed records.
If an error-code is present in the result the function will return True,
otherwise False.
"""
return 0 in [err_code for dummy, err_code, dummy2 in bibrecs]
def main():
"""
Record matches database content when defined search gives
exactly one record in the result set. By default the match is
done on the title field.
"""
try:
opts, args = getopt.getopt(sys.argv[1:], "0123hVm:fq:c:nv:o:b:i:r:tazx:",
[
"print-new",
"print-match",
"print-ambiguous",
"print-fuzzy",
"help",
"version",
"mode=",
"field=",
"query-string=",
"config=",
"no-process",
"verbose=",
"operator=",
"batch-output=",
"input=",
"remote=",
"text-marc-output",
"alter-recid",
"clean",
"collection=",
"user=",
"no-fuzzy",
"no-validation",
"ascii"
])
except getopt.GetoptError as e:
usage()
match_results = []
qrystrs = [] # list of query strings
print_mode = 0 # default match mode to print new records
noprocess = 0 # dump result in stdout?
operator = "and"
verbose = 1 # 0..be quiet
records = []
batch_output = "" # print stuff in files
f_input = "" # read from where, if param "i"
server_url = CFG_SITE_SECURE_URL # url to server performing search, local by default
modify = 0 # alter output with matched record identifiers
textmarc_output = 0 # output in MARC instead of MARCXML
field = ""
search_mode = None # activates a mode, uses advanced search instead of simple
sleeptime = CFG_BIBMATCH_LOCAL_SLEEPTIME # the amount of time to sleep between queries, changes on remote queries
clean = False # should queries be sanitized?
collections = [] # only search certain collections?
user = ""
password = ""
validate = True # should matches be validate?
fuzzy = True # Activate fuzzy-mode if no matches found for a record
ascii_mode = False # Should values be turned into ascii mode
for opt, opt_value in opts:
if opt in ["-0", "--print-new"]:
print_mode = 0
if opt in ["-1", "--print-match"]:
print_mode = 1
if opt in ["-2", "--print-ambiguous"]:
print_mode = 2
if opt in ["-3", "--print-fuzzy"]:
print_mode = 3
if opt in ["-n", "--no-process"]:
noprocess = 1
if opt in ["-h", "--help"]:
usage()
sys.exit(0)
if opt in ["-V", "--version"]:
print(__revision__)
sys.exit(0)
if opt in ["-t", "--text-marc-output"]:
textmarc_output = 1
if opt in ["-v", "--verbose"]:
verbose = int(opt_value)
if opt in ["-f", "--field"]:
if opt_value in get_fieldcodes():
field = opt_value
if opt in ["-q", "--query-string"]:
try:
template = CFG_BIBMATCH_QUERY_TEMPLATES[opt_value]
qrystrs.append((field, template))
except KeyError:
qrystrs.append((field, opt_value))
if opt in ["-m", "--mode"]:
search_mode = opt_value
if opt in ["-o", "--operator"]:
if opt_value.lower() in ["o", "or", "|"]:
operator = "or"
elif opt_value.lower() in ["a", "and", "&"]:
operator = "and"
if opt in ["-b", "--batch-output"]:
batch_output = opt_value
if opt in ["-i", "--input"]:
f_input = opt_value
if opt in ["-r", "--remote"]:
server_url = opt_value
sleeptime = CFG_BIBMATCH_REMOTE_SLEEPTIME
if opt in ["-a", "--alter-recid"]:
modify = 1
if opt in ["-z", "--clean"]:
clean = True
if opt in ["-c", "--config"]:
config_file = opt_value
config_file_read = bibconvert.read_file(config_file, 0)
for line in config_file_read:
tmp = line.split("---")
if(tmp[0] == "QRYSTR"):
qrystrs.append((field, tmp[1]))
if opt in ["-x", "--collection"]:
colls = opt_value.split(',')
for collection in colls:
if collection not in collections:
collections.append(collection)
if opt in ["--user"]:
user = opt_value
password = getpass.getpass()
if opt == "--no-fuzzy":
fuzzy = False
if opt == "--no-validation":
validate = False
if opt == "--ascii":
ascii_mode = True
if verbose:
sys.stderr.write("\nBibMatch: Parsing input file %s..." % (f_input,))
read_list = []
if not f_input:
for line_in in sys.stdin:
read_list.append(line_in)
else:
f = open(f_input)
for line_in in f:
read_list.append(line_in)
f.close()
file_read = "".join(read_list)
# Detect input type
if not file_read.strip().startswith('<'):
# Not xml, assume type textmarc
file_read = transform_input_to_marcxml(f_input, file_read)
records = create_records(file_read)
if len(records) == 0:
if verbose:
sys.stderr.write("\nBibMatch: Input file contains no records.\n")
sys.exit(1)
# Check for any parsing errors in records
if bibrecs_has_errors(records):
# Errors found. Let's try to remove any XML entities
if verbose > 8:
sys.stderr.write("\nBibMatch: Parsing error. Trying removal of XML entities..\n")
file_read = xml_entities_to_utf8(file_read)
records = create_records(file_read)
if bibrecs_has_errors(records):
# Still problems.. alert the user and exit
if verbose:
errors = "\n".join([str(err_msg) for dummy, err_code, err_msg in records \
if err_code == 0])
sys.stderr.write("\nBibMatch: Errors during record parsing:\n%s\n" % \
(errors,))
sys.exit(1)
if verbose:
sys.stderr.write("read %d records" % (len(records),))
sys.stderr.write("\nBibMatch: Matching ...")
if not validate:
if verbose:
sys.stderr.write("\nWARNING: Skipping match validation.\n")
match_results = match_records(records=records,
qrystrs=qrystrs,
search_mode=search_mode,
operator=operator,
verbose=verbose,
server_url=server_url,
modify=modify,
sleeptime=sleeptime,
clean=clean,
collections=collections,
user=user,
password=password,
fuzzy=fuzzy,
validate=validate,
ascii_mode=ascii_mode)
# set the output according to print..
# 0-newrecs 1-matchedrecs 2-ambiguousrecs 3-fuzzyrecs
recs_out = match_results[print_mode]
if verbose:
sys.stderr.write("\n\n Bibmatch report\n")
sys.stderr.write("=" * 35)
sys.stderr.write("\n New records : %d" % (len(match_results[0]),))
sys.stderr.write("\n Matched records : %d" % (len(match_results[1]),))
sys.stderr.write("\n Ambiguous records : %d" % (len(match_results[2]),))
sys.stderr.write("\n Fuzzy records : %d\n" % (len(match_results[3]),))
sys.stderr.write("=" * 35)
sys.stderr.write("\n Total records : %d\n" % (len(records),))
sys.stderr.write("\n See detailed log at %s\n" % (CFG_LOGFILE,))
if not noprocess and recs_out:
print('<collection xmlns="http://www.loc.gov/MARC21/slim">')
for record, results in recs_out:
if textmarc_output:
# FIXME: textmarc output does not print matching results
print(transform_record_to_marc(record))
else:
print(results)
print(record_xml_output(record))
print("</collection>")
if batch_output:
i = 0
outputs = ['new', 'matched', 'ambiguous', 'fuzzy']
for result in match_results:
out = []
out.append('<collection xmlns="http://www.loc.gov/MARC21/slim">')
for record, results in result:
if textmarc_output:
# FIXME: textmarc output does not print matching results
out.append(transform_record_to_marc(record))
else:
out.append(results)
out.append(record_xml_output(record))
out.append("</collection>")
filename = "%s.%s.xml" % (batch_output, outputs[i])
file_fd = open(filename, "w")
file_fd.write("\n".join(out))
file_fd.close()
i += 1<|fim▁end|> | """
Extract all the values in the given record referenced in the given query-string
and attach them to self.fields as a list. Return boolean indicating if a query |
<|file_name|>minimax_test.rs<|end_file_name|><|fim▁begin|>use crate::minimax::{Minimax, MinimaxConfig, MinimaxMovesSorting, MinimaxType};
use env_logger;
use oppai_field::construct_field::construct_field;
use oppai_field::player::Player;
use oppai_test_images::*;
use rand::SeedableRng;
use rand_xorshift::XorShiftRng;
const SEED: [u8; 16] = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53];
const MINIMAX_CONFIG_NEGASCOUT: MinimaxConfig = MinimaxConfig {
threads_count: 1,
minimax_type: MinimaxType::NegaScout,
minimax_moves_sorting: MinimaxMovesSorting::TrajectoriesCount,
hash_table_size: 10_000,
rebuild_trajectories: false,
};
const MINIMAX_CONFIG_MTDF: MinimaxConfig = MinimaxConfig {
threads_count: 1,
minimax_type: MinimaxType::MTDF,
minimax_moves_sorting: MinimaxMovesSorting::TrajectoriesCount,
hash_table_size: 10_000,
rebuild_trajectories: false,
};
macro_rules! minimax_test {
($(#[$($attr:meta),+])* $name:ident, $config:ident, $image:ident, $depth:expr) => {
#[test]
$(#[$($attr),+])*
fn $name() {
env_logger::try_init().ok();
let mut rng = XorShiftRng::from_seed(SEED);
let mut field = construct_field(&mut rng, $image.image);
let minimax = Minimax::new($config);
let pos = minimax.minimax(&mut field, Player::Red, &mut rng, $depth);
assert_eq!(pos, Some(field.to_pos($image.solution.0, $image.solution.1)));
}
}
}
minimax_test!(negascout_1, MINIMAX_CONFIG_NEGASCOUT, IMAGE_1, 8);
minimax_test!(negascout_2, MINIMAX_CONFIG_NEGASCOUT, IMAGE_2, 8);
minimax_test!(negascout_3, MINIMAX_CONFIG_NEGASCOUT, IMAGE_3, 8);
minimax_test!(negascout_4, MINIMAX_CONFIG_NEGASCOUT, IMAGE_4, 8);
minimax_test!(negascout_5, MINIMAX_CONFIG_NEGASCOUT, IMAGE_5, 8);
minimax_test!(negascout_6, MINIMAX_CONFIG_NEGASCOUT, IMAGE_6, 8);
minimax_test!(
#[ignore]
negascout_7,
MINIMAX_CONFIG_NEGASCOUT,
IMAGE_7,
10
);
minimax_test!(negascout_8, MINIMAX_CONFIG_NEGASCOUT, IMAGE_8, 8);
minimax_test!(
#[ignore]
negascout_9,
MINIMAX_CONFIG_NEGASCOUT,
IMAGE_9,
10
);
minimax_test!(negascout_10, MINIMAX_CONFIG_NEGASCOUT, IMAGE_10, 8);
minimax_test!(
#[ignore]
negascout_11,
MINIMAX_CONFIG_NEGASCOUT,
IMAGE_11,
12
);
minimax_test!(negascout_12, MINIMAX_CONFIG_NEGASCOUT, IMAGE_12, 8);
minimax_test!(negascout_13, MINIMAX_CONFIG_NEGASCOUT, IMAGE_13, 8);
minimax_test!(negascout_14, MINIMAX_CONFIG_NEGASCOUT, IMAGE_14, 8);
minimax_test!(mtdf_1, MINIMAX_CONFIG_MTDF, IMAGE_1, 8);
minimax_test!(mtdf_2, MINIMAX_CONFIG_MTDF, IMAGE_2, 8);
minimax_test!(mtdf_3, MINIMAX_CONFIG_MTDF, IMAGE_3, 8);
minimax_test!(mtdf_4, MINIMAX_CONFIG_MTDF, IMAGE_4, 8);
minimax_test!(mtdf_5, MINIMAX_CONFIG_MTDF, IMAGE_5, 8);
minimax_test!(mtdf_6, MINIMAX_CONFIG_MTDF, IMAGE_6, 8);
minimax_test!(
#[ignore]
mtdf_7,
MINIMAX_CONFIG_MTDF,
IMAGE_7,
10
);
minimax_test!(mtdf_8, MINIMAX_CONFIG_MTDF, IMAGE_8, 8);
minimax_test!(
#[ignore]
mtdf_9,
MINIMAX_CONFIG_MTDF,
IMAGE_9,
10
);
minimax_test!(mtdf_10, MINIMAX_CONFIG_MTDF, IMAGE_10, 8);
minimax_test!(
#[ignore]
mtdf_11,
MINIMAX_CONFIG_MTDF,
IMAGE_11,
12
);
minimax_test!(mtdf_12, MINIMAX_CONFIG_MTDF, IMAGE_12, 8);<|fim▁hole|><|fim▁end|> | minimax_test!(mtdf_13, MINIMAX_CONFIG_MTDF, IMAGE_13, 8);
minimax_test!(mtdf_14, MINIMAX_CONFIG_MTDF, IMAGE_14, 8); |
<|file_name|>cacheprovider.py<|end_file_name|><|fim▁begin|>"""
merged implementation of the cache provider
the name cache was not chosen to ensure pluggy automatically
ignores the external pytest-cache
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from collections import OrderedDict
import attr
import py
import six
import pytest
from .compat import _PY2 as PY2
from .pathlib import Path
from .pathlib import resolve_from_str
from .pathlib import rmtree
README_CONTENT = u"""\
# pytest cache directory #
This directory contains data from the pytest's cache plugin,
which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
**Do not** commit this to version control.
See [the docs](https://docs.pytest.org/en/latest/cache.html) for more information.
"""
CACHEDIR_TAG_CONTENT = b"""\
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by pytest.
# For information about cache directory tags, see:
# http://www.bford.info/cachedir/spec.html
"""
@attr.s
class Cache(object):
_cachedir = attr.ib(repr=False)
_config = attr.ib(repr=False)
@classmethod
def for_config(cls, config):
cachedir = cls.cache_dir_from_config(config)
if config.getoption("cacheclear") and cachedir.exists():
rmtree(cachedir, force=True)
cachedir.mkdir()
return cls(cachedir, config)
@staticmethod
def cache_dir_from_config(config):
return resolve_from_str(config.getini("cache_dir"), config.rootdir)
def warn(self, fmt, **args):
from _pytest.warnings import _issue_warning_captured
from _pytest.warning_types import PytestWarning
_issue_warning_captured(
PytestWarning(fmt.format(**args) if args else fmt),
self._config.hook,
stacklevel=3,
)
def makedir(self, name):
""" return a directory path object with the given name. If the
directory does not yet exist, it will be created. You can use it
to manage files likes e. g. store/retrieve database
dumps across test sessions.
:param name: must be a string not containing a ``/`` separator.
Make sure the name contains your plugin or application
identifiers to prevent clashes with other cache users.
"""
name = Path(name)
if len(name.parts) > 1:
raise ValueError("name is not allowed to contain path separators")
res = self._cachedir.joinpath("d", name)
res.mkdir(exist_ok=True, parents=True)
return py.path.local(res)
def _getvaluepath(self, key):
return self._cachedir.joinpath("v", Path(key))
def get(self, key, default):
""" return cached value for the given key. If no value
was yet cached or the value cannot be read, the specified
default is returned.
:param key: must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param default: must be provided in case of a cache-miss or
invalid cache values.
"""
path = self._getvaluepath(key)
try:
with path.open("r") as f:
return json.load(f)
except (ValueError, IOError, OSError):
return default
def set(self, key, value):
""" save value for the given key.
:param key: must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param value: must be of any combination of basic
python types, including nested types
like e. g. lists of dictionaries.
"""
path = self._getvaluepath(key)
try:
if path.parent.is_dir():
cache_dir_exists_already = True
else:
cache_dir_exists_already = self._cachedir.exists()
path.parent.mkdir(exist_ok=True, parents=True)
except (IOError, OSError):
self.warn("could not create cache path {path}", path=path)
return
try:
f = path.open("wb" if PY2 else "w")
except (IOError, OSError):
self.warn("cache could not write path {path}", path=path)
else:
with f:
json.dump(value, f, indent=2, sort_keys=True)
if not cache_dir_exists_already:
self._ensure_supporting_files()
def _ensure_supporting_files(self):
"""Create supporting files in the cache dir that are not really part of the cache."""
if self._cachedir.is_dir():
readme_path = self._cachedir / "README.md"
if not readme_path.is_file():
readme_path.write_text(README_CONTENT)
gitignore_path = self._cachedir.joinpath(".gitignore")
if not gitignore_path.is_file():
msg = u"# Created by pytest automatically.\n*"
gitignore_path.write_text(msg, encoding="UTF-8")
cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
if not cachedir_tag_path.is_file():
cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
class LFPlugin(object):
""" Plugin which implements the --lf (run last-failing) option """
def __init__(self, config):
self.config = config
active_keys = "lf", "failedfirst"
self.active = any(config.getoption(key) for key in active_keys)
self.lastfailed = config.cache.get("cache/lastfailed", {})
self._previously_failed_count = None
self._no_failures_behavior = self.config.getoption("last_failed_no_failures")
def pytest_report_collectionfinish(self):
if self.active and self.config.getoption("verbose") >= 0:
if not self._previously_failed_count:
return None
noun = "failure" if self._previously_failed_count == 1 else "failures"
suffix = " first" if self.config.getoption("failedfirst") else ""
mode = "rerun previous {count} {noun}{suffix}".format(
count=self._previously_failed_count, suffix=suffix, noun=noun
)
return "run-last-failure: %s" % mode
def pytest_runtest_logreport(self, report):
if (report.when == "call" and report.passed) or report.skipped:
self.lastfailed.pop(report.nodeid, None)
elif report.failed:
self.lastfailed[report.nodeid] = True
def pytest_collectreport(self, report):
passed = report.outcome in ("passed", "skipped")
if passed:
if report.nodeid in self.lastfailed:<|fim▁hole|>
def pytest_collection_modifyitems(self, session, config, items):
if self.active:
if self.lastfailed:
previously_failed = []
previously_passed = []
for item in items:
if item.nodeid in self.lastfailed:
previously_failed.append(item)
else:
previously_passed.append(item)
self._previously_failed_count = len(previously_failed)
if not previously_failed:
# running a subset of all tests with recorded failures outside
# of the set of tests currently executing
return
if self.config.getoption("lf"):
items[:] = previously_failed
config.hook.pytest_deselected(items=previously_passed)
else:
items[:] = previously_failed + previously_passed
elif self._no_failures_behavior == "none":
config.hook.pytest_deselected(items=items)
items[:] = []
def pytest_sessionfinish(self, session):
config = self.config
if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
return
saved_lastfailed = config.cache.get("cache/lastfailed", {})
if saved_lastfailed != self.lastfailed:
config.cache.set("cache/lastfailed", self.lastfailed)
class NFPlugin(object):
""" Plugin which implements the --nf (run new-first) option """
def __init__(self, config):
self.config = config
self.active = config.option.newfirst
self.cached_nodeids = config.cache.get("cache/nodeids", [])
def pytest_collection_modifyitems(self, session, config, items):
if self.active:
new_items = OrderedDict()
other_items = OrderedDict()
for item in items:
if item.nodeid not in self.cached_nodeids:
new_items[item.nodeid] = item
else:
other_items[item.nodeid] = item
items[:] = self._get_increasing_order(
six.itervalues(new_items)
) + self._get_increasing_order(six.itervalues(other_items))
self.cached_nodeids = [x.nodeid for x in items if isinstance(x, pytest.Item)]
def _get_increasing_order(self, items):
return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True)
def pytest_sessionfinish(self, session):
config = self.config
if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
return
config.cache.set("cache/nodeids", self.cached_nodeids)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--lf",
"--last-failed",
action="store_true",
dest="lf",
help="rerun only the tests that failed "
"at the last run (or all if none failed)",
)
group.addoption(
"--ff",
"--failed-first",
action="store_true",
dest="failedfirst",
help="run all tests but run the last failures first. "
"This may re-order tests and thus lead to "
"repeated fixture setup/teardown",
)
group.addoption(
"--nf",
"--new-first",
action="store_true",
dest="newfirst",
help="run tests from new files first, then the rest of the tests "
"sorted by file mtime",
)
group.addoption(
"--cache-show",
action="store_true",
dest="cacheshow",
help="show cache contents, don't perform collection or tests",
)
group.addoption(
"--cache-clear",
action="store_true",
dest="cacheclear",
help="remove all cache contents at start of test run.",
)
cache_dir_default = ".pytest_cache"
if "TOX_ENV_DIR" in os.environ:
cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
parser.addini("cache_dir", default=cache_dir_default, help="cache directory path.")
group.addoption(
"--lfnf",
"--last-failed-no-failures",
action="store",
dest="last_failed_no_failures",
choices=("all", "none"),
default="all",
help="change the behavior when no test failed in the last run or no "
"information about the last failures was found in the cache",
)
def pytest_cmdline_main(config):
if config.option.cacheshow:
from _pytest.main import wrap_session
return wrap_session(config, cacheshow)
@pytest.hookimpl(tryfirst=True)
def pytest_configure(config):
config.cache = Cache.for_config(config)
config.pluginmanager.register(LFPlugin(config), "lfplugin")
config.pluginmanager.register(NFPlugin(config), "nfplugin")
@pytest.fixture
def cache(request):
"""
Return a cache object that can persist state between testing sessions.
cache.get(key, default)
cache.set(key, value)
Keys must be a ``/`` separated value, where the first part is usually the
name of your plugin or application to avoid clashes with other cache users.
Values can be any object handled by the json stdlib module.
"""
return request.config.cache
def pytest_report_header(config):
"""Display cachedir with --cache-show and if non-default."""
if config.option.verbose or config.getini("cache_dir") != ".pytest_cache":
cachedir = config.cache._cachedir
# TODO: evaluate generating upward relative paths
# starting with .., ../.. if sensible
try:
displaypath = cachedir.relative_to(config.rootdir)
except ValueError:
displaypath = cachedir
return "cachedir: {}".format(displaypath)
def cacheshow(config, session):
from pprint import pformat
tw = py.io.TerminalWriter()
tw.line("cachedir: " + str(config.cache._cachedir))
if not config.cache._cachedir.is_dir():
tw.line("cache is empty")
return 0
dummy = object()
basedir = config.cache._cachedir
vdir = basedir / "v"
tw.sep("-", "cache values")
for valpath in sorted(x for x in vdir.rglob("*") if x.is_file()):
key = valpath.relative_to(vdir)
val = config.cache.get(key, dummy)
if val is dummy:
tw.line("%s contains unreadable content, will be ignored" % key)
else:
tw.line("%s contains:" % key)
for line in pformat(val).splitlines():
tw.line(" " + line)
ddir = basedir / "d"
if ddir.is_dir():
contents = sorted(ddir.rglob("*"))
tw.sep("-", "cache directories")
for p in contents:
# if p.check(dir=1):
# print("%s/" % p.relto(basedir))
if p.is_file():
key = p.relative_to(basedir)
tw.line("{} is a file of length {:d}".format(key, p.stat().st_size))
return 0<|fim▁end|> | self.lastfailed.pop(report.nodeid)
self.lastfailed.update((item.nodeid, True) for item in report.result)
else:
self.lastfailed[report.nodeid] = True |
<|file_name|>thinkstats2.py<|end_file_name|><|fim▁begin|>"""This file contains code for use with "Think Stats" and
"Think Bayes", both by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
"""This file contains class definitions for:
Hist: represents a histogram (map from values to integer frequencies).
Pmf: represents a probability mass function (map from values to probs).
_DictWrapper: private parent class for Hist and Pmf.
Cdf: represents a discrete cumulative distribution function
Pdf: represents a continuous probability density function
"""
import bisect
import copy
import logging
import math
import random
import re
from collections import Counter
from operator import itemgetter
import thinkplot
import numpy as np
import pandas
import scipy
from scipy import stats
from scipy import special
from scipy import ndimage
from scipy.special import gamma
from io import open
ROOT2 = math.sqrt(2)
def RandomSeed(x):
"""Initialize the random and np.random generators.
x: int seed
"""
random.seed(x)
np.random.seed(x)
def Odds(p):
"""Computes odds for a given probability.
Example: p=0.75 means 75 for and 25 against, or 3:1 odds in favor.
Note: when p=1, the formula for odds divides by zero, which is
normally undefined. But I think it is reasonable to define Odds(1)
to be infinity, so that's what this function does.
p: float 0-1
Returns: float odds
"""
if p == 1:
return float('inf')
return p / (1 - p)
def Probability(o):
"""Computes the probability corresponding to given odds.
Example: o=2 means 2:1 odds in favor, or 2/3 probability
o: float odds, strictly positive
Returns: float probability
"""
return o / (o + 1)
def Probability2(yes, no):
"""Computes the probability corresponding to given odds.
Example: yes=2, no=1 means 2:1 odds in favor, or 2/3 probability.
yes, no: int or float odds in favor
"""
return yes / (yes + no)
class Interpolator(object):
"""Represents a mapping between sorted sequences; performs linear interp.
Attributes:
xs: sorted list
ys: sorted list
"""
def __init__(self, xs, ys):
self.xs = xs
self.ys = ys
def Lookup(self, x):
"""Looks up x and returns the corresponding value of y."""
return self._Bisect(x, self.xs, self.ys)
def Reverse(self, y):
"""Looks up y and returns the corresponding value of x."""
return self._Bisect(y, self.ys, self.xs)
def _Bisect(self, x, xs, ys):
"""Helper function."""
if x <= xs[0]:
return ys[0]
if x >= xs[-1]:
return ys[-1]
i = bisect.bisect(xs, x)
frac = 1.0 * (x - xs[i - 1]) / (xs[i] - xs[i - 1])
y = ys[i - 1] + frac * 1.0 * (ys[i] - ys[i - 1])
return y
# When we plot Hist, Pmf and Cdf objects, they don't appear in
# the legend unless we override the default label.
DEFAULT_LABEL = '_nolegend_'
class _DictWrapper(object):
"""An object that contains a dictionary."""
def __init__(self, obj=None, label=None):
"""Initializes the distribution.
obj: Hist, Pmf, Cdf, Pdf, dict, pandas Series, list of pairs
label: string label
"""
self.label = label if label is not None else DEFAULT_LABEL
self.d = {}
# flag whether the distribution is under a log transform
self.log = False
if obj is None:
return
if isinstance(obj, (_DictWrapper, Cdf, Pdf)):
self.label = label if label is not None else obj.label
if isinstance(obj, dict):
self.d.update(obj.items())
elif isinstance(obj, (_DictWrapper, Cdf, Pdf)):
self.d.update(obj.Items())
elif isinstance(obj, pandas.Series):
self.d.update(obj.value_counts().iteritems())
else:
# finally, treat it like a list
self.d.update(Counter(obj))
if len(self) > 0 and isinstance(self, Pmf):
self.Normalize()
def __hash__(self):
return id(self)
def __str__(self):
cls = self.__class__.__name__
if self.label == DEFAULT_LABEL:
return '%s(%s)' % (cls, str(self.d))
else:
return self.label
def __repr__(self):
cls = self.__class__.__name__
if self.label == DEFAULT_LABEL:
return '%s(%s)' % (cls, repr(self.d))
else:
return '%s(%s, %s)' % (cls, repr(self.d), repr(self.label))
def __eq__(self, other):
try:
return self.d == other.d
except AttributeError:
return False
def __len__(self):
return len(self.d)
def __iter__(self):
return iter(self.d)
def iterkeys(self):
"""Returns an iterator over keys."""
return iter(self.d)
def __contains__(self, value):
return value in self.d
def __getitem__(self, value):
return self.d.get(value, 0)
def __setitem__(self, value, prob):
self.d[value] = prob
def __delitem__(self, value):
del self.d[value]
def Copy(self, label=None):
"""Returns a copy.
Make a shallow copy of d. If you want a deep copy of d,
use copy.deepcopy on the whole object.
label: string label for the new Hist
returns: new _DictWrapper with the same type
"""
new = copy.copy(self)
new.d = copy.copy(self.d)
new.label = label if label is not None else self.label
return new
def Scale(self, factor):
"""Multiplies the values by a factor.
factor: what to multiply by
Returns: new object
"""
new = self.Copy()
new.d.clear()
for val, prob in self.Items():
new.Set(val * factor, prob)
return new
def Log(self, m=None):
"""Log transforms the probabilities.
Removes values with probability 0.
Normalizes so that the largest logprob is 0.
"""
if self.log:
raise ValueError("Pmf/Hist already under a log transform")
self.log = True
if m is None:
m = self.MaxLike()
for x, p in self.d.items():
if p:
self.Set(x, math.log(p / m))
else:
self.Remove(x)
def Exp(self, m=None):
"""Exponentiates the probabilities.
m: how much to shift the ps before exponentiating
If m is None, normalizes so that the largest prob is 1.
"""
if not self.log:
raise ValueError("Pmf/Hist not under a log transform")
self.log = False
if m is None:
m = self.MaxLike()
for x, p in self.d.items():
self.Set(x, math.exp(p - m))
def GetDict(self):
"""Gets the dictionary."""
return self.d
def SetDict(self, d):
"""Sets the dictionary."""
self.d = d
def Values(self):
"""Gets an unsorted sequence of values.
Note: one source of confusion is that the keys of this
dictionary are the values of the Hist/Pmf, and the
values of the dictionary are frequencies/probabilities.
"""
return self.d.keys()
def Items(self):
"""Gets an unsorted sequence of (value, freq/prob) pairs."""
return self.d.items()
def SortedItems(self):
"""Gets a sorted sequence of (value, freq/prob) pairs.
It items are unsortable, the result is unsorted.
"""
def isnan(x):
try:
return math.isnan(x)
except TypeError:
return False
if any([isnan(x) for x in self.Values()]):
msg = 'Keys contain NaN, may not sort correctly.'
logging.warning(msg)
try:
return sorted(self.d.items())
except TypeError:
return self.d.items()
def Render(self, **options):
"""Generates a sequence of points suitable for plotting.
Note: options are ignored
Returns:
tuple of (sorted value sequence, freq/prob sequence)
"""
return zip(*self.SortedItems())
def MakeCdf(self, label=None):
"""Makes a Cdf."""
label = label if label is not None else self.label
return Cdf(self, label=label)
def Print(self):
"""Prints the values and freqs/probs in ascending order."""
for val, prob in self.SortedItems():
print(val, prob)
def Set(self, x, y=0):
"""Sets the freq/prob associated with the value x.
Args:
x: number value
y: number freq or prob
"""
self.d[x] = y
def Incr(self, x, term=1):
"""Increments the freq/prob associated with the value x.
Args:
x: number value
term: how much to increment by
"""
self.d[x] = self.d.get(x, 0) + term
def Mult(self, x, factor):
"""Scales the freq/prob associated with the value x.
Args:
x: number value
factor: how much to multiply by
"""
self.d[x] = self.d.get(x, 0) * factor
def Remove(self, x):
"""Removes a value.
Throws an exception if the value is not there.
Args:
x: value to remove
"""
del self.d[x]
def Total(self):
"""Returns the total of the frequencies/probabilities in the map."""
total = sum(self.d.values())
return total
def MaxLike(self):
"""Returns the largest frequency/probability in the map."""
return max(self.d.values())
def Largest(self, n=10):
"""Returns the largest n values, with frequency/probability.
n: number of items to return
"""
return sorted(self.d.items(), reverse=True)[:n]
def Smallest(self, n=10):
"""Returns the smallest n values, with frequency/probability.
n: number of items to return
"""
return sorted(self.d.items(), reverse=False)[:n]
class Hist(_DictWrapper):
"""Represents a histogram, which is a map from values to frequencies.
Values can be any hashable type; frequencies are integer counters.
"""
def Freq(self, x):
"""Gets the frequency associated with the value x.
Args:
x: number value
Returns:
int frequency
"""
return self.d.get(x, 0)
def Freqs(self, xs):
"""Gets frequencies for a sequence of values."""
return [self.Freq(x) for x in xs]
def IsSubset(self, other):
"""Checks whether the values in this histogram are a subset of
the values in the given histogram."""
for val, freq in self.Items():
if freq > other.Freq(val):
return False
return True
def Subtract(self, other):
"""Subtracts the values in the given histogram from this histogram."""
for val, freq in other.Items():
self.Incr(val, -freq)
class Pmf(_DictWrapper):
"""Represents a probability mass function.
Values can be any hashable type; probabilities are floating-point.
Pmfs are not necessarily normalized.
"""
def Prob(self, x, default=0):
"""Gets the probability associated with the value x.
Args:
x: number value
default: value to return if the key is not there
Returns:
float probability
"""
return self.d.get(x, default)
def Probs(self, xs):
"""Gets probabilities for a sequence of values."""
return [self.Prob(x) for x in xs]
def Percentile(self, percentage):
"""Computes a percentile of a given Pmf.
Note: this is not super efficient. If you are planning
to compute more than a few percentiles, compute the Cdf.
percentage: float 0-100
returns: value from the Pmf
"""
p = percentage / 100
total = 0
for val, prob in sorted(self.Items()):
total += prob
if total >= p:
return val
def ProbGreater(self, x):
"""Probability that a sample from this Pmf exceeds x.
x: number
returns: float probability
"""
if isinstance(x, _DictWrapper):
return PmfProbGreater(self, x)
else:
t = [prob for (val, prob) in self.d.items() if val > x]
return sum(t)
def ProbLess(self, x):
"""Probability that a sample from this Pmf is less than x.
x: number
returns: float probability
"""
if isinstance(x, _DictWrapper):
return PmfProbLess(self, x)
else:
t = [prob for (val, prob) in self.d.items() if val < x]
return sum(t)
def ProbEqual(self, x):
"""Probability that a sample from this Pmf is exactly x.
x: number
returns: float probability
"""
if isinstance(x, _DictWrapper):
return PmfProbEqual(self, x)
else:
return self[x]
# NOTE: I've decided to remove the magic comparators because they
# have the side-effect of making Pmf sortable, but in fact they
# don't support sorting.
def Normalize(self, fraction=1):
"""Normalizes this PMF so the sum of all probs is fraction.
Args:
fraction: what the total should be after normalization
Returns: the total probability before normalizing
"""
if self.log:
raise ValueError("Normalize: Pmf is under a log transform")
total = self.Total()
if total == 0:
raise ValueError('Normalize: total probability is zero.')
factor = fraction / total
for x in self.d:
self.d[x] *= factor
return total
def Random(self):
"""Chooses a random element from this PMF.
Note: this is not very efficient. If you plan to call
this more than a few times, consider converting to a CDF.
Returns:
float value from the Pmf
"""
target = random.random()
total = 0
for x, p in self.d.items():
total += p
if total >= target:
return x
# we shouldn't get here
raise ValueError('Random: Pmf might not be normalized.')
def Sample(self, n):
"""Generates a random sample from this distribution.
n: int length of the sample
returns: NumPy array
"""
return self.MakeCdf().Sample(n)
def Mean(self):
"""Computes the mean of a PMF.
Returns:
float mean
"""
return sum(p * x for x, p in self.Items())
def Median(self):
"""Computes the median of a PMF.
Returns:
float median
"""
return self.MakeCdf().Percentile(50)
def Var(self, mu=None):
"""Computes the variance of a PMF.
mu: the point around which the variance is computed;
if omitted, computes the mean
returns: float variance
"""
if mu is None:
mu = self.Mean()
return sum(p * (x-mu)**2 for x, p in self.Items())
def Expect(self, func):
"""Computes the expectation of func(x).
Returns:
expectation
"""
return np.sum(p * func(x) for x, p in self.Items())
def Std(self, mu=None):
"""Computes the standard deviation of a PMF.
mu: the point around which the variance is computed;
if omitted, computes the mean
returns: float standard deviation
"""
var = self.Var(mu)
return math.sqrt(var)
def Mode(self):
"""Returns the value with the highest probability.
Returns: float probability
"""
_, val = max((prob, val) for val, prob in self.Items())
return val
# The mode of a posterior is the maximum aposteori probability (MAP)
MAP = Mode
# If the distribution contains likelihoods only, the peak is the
# maximum likelihood estimator.
MaximumLikelihood = Mode
def CredibleInterval(self, percentage=90):
"""Computes the central credible interval.
If percentage=90, computes the 90% CI.
Args:
percentage: float between 0 and 100
Returns:
sequence of two floats, low and high
"""
cdf = self.MakeCdf()
return cdf.CredibleInterval(percentage)
def __add__(self, other):
"""Computes the Pmf of the sum of values drawn from self and other.
other: another Pmf or a scalar
returns: new Pmf
"""
try:
return self.AddPmf(other)
except AttributeError:
return self.AddConstant(other)
__radd__ = __add__
def AddPmf(self, other):
"""Computes the Pmf of the sum of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
pmf = Pmf()
for v1, p1 in self.Items():
for v2, p2 in other.Items():
pmf[v1 + v2] += p1 * p2
return pmf
def AddConstant(self, other):
"""Computes the Pmf of the sum a constant and values from self.
other: a number
returns: new Pmf
"""
if other == 0:
return self.Copy()
pmf = Pmf()
for v1, p1 in self.Items():
pmf.Set(v1 + other, p1)
return pmf
def __sub__(self, other):
"""Computes the Pmf of the diff of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
try:
return self.SubPmf(other)
except AttributeError:
return self.AddConstant(-other)
def SubPmf(self, other):
"""Computes the Pmf of the diff of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
pmf = Pmf()
for v1, p1 in self.Items():
for v2, p2 in other.Items():
pmf.Incr(v1 - v2, p1 * p2)
return pmf
def __mul__(self, other):
"""Computes the Pmf of the product of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
try:
return self.MulPmf(other)
except AttributeError:
return self.MulConstant(other)
def MulPmf(self, other):
"""Computes the Pmf of the diff of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
pmf = Pmf()
for v1, p1 in self.Items():
for v2, p2 in other.Items():
pmf.Incr(v1 * v2, p1 * p2)
return pmf
def MulConstant(self, other):
"""Computes the Pmf of the product of a constant and values from self.
other: a number
returns: new Pmf
"""
pmf = Pmf()
for v1, p1 in self.Items():
pmf.Set(v1 * other, p1)
return pmf
def __div__(self, other):
"""Computes the Pmf of the ratio of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
try:
return self.DivPmf(other)
except AttributeError:
return self.MulConstant(1/other)
__truediv__ = __div__
def DivPmf(self, other):
"""Computes the Pmf of the ratio of values drawn from self and other.
other: another Pmf
returns: new Pmf
"""
pmf = Pmf()
for v1, p1 in self.Items():
for v2, p2 in other.Items():
pmf.Incr(v1 / v2, p1 * p2)
return pmf
def Max(self, k):
"""Computes the CDF of the maximum of k selections from this dist.
k: int
returns: new Cdf
"""
cdf = self.MakeCdf()
cdf.ps **= k
return cdf
class Joint(Pmf):
"""Represents a joint distribution.
The values are sequences (usually tuples)
"""
def Marginal(self, i, label=None):
"""Gets the marginal distribution of the indicated variable.
i: index of the variable we want
Returns: Pmf
"""
pmf = Pmf(label=label)
for vs, prob in self.Items():
pmf.Incr(vs[i], prob)
return pmf
def Conditional(self, i, j, val, label=None):
"""Gets the conditional distribution of the indicated variable.
Distribution of vs[i], conditioned on vs[j] = val.
i: index of the variable we want
j: which variable is conditioned on
val: the value the jth variable has to have
Returns: Pmf
"""
pmf = Pmf(label=label)
for vs, prob in self.Items():
if vs[j] != val:
continue
pmf.Incr(vs[i], prob)
pmf.Normalize()
return pmf
def MaxLikeInterval(self, percentage=90):
"""Returns the maximum-likelihood credible interval.
If percentage=90, computes a 90% CI containing the values
with the highest likelihoods.
percentage: float between 0 and 100
Returns: list of values from the suite
"""
interval = []
total = 0
t = [(prob, val) for val, prob in self.Items()]
t.sort(reverse=True)
for prob, val in t:
interval.append(val)
total += prob
if total >= percentage / 100:
break
return interval
def MakeJoint(pmf1, pmf2):
"""Joint distribution of values from pmf1 and pmf2.
Assumes that the PMFs represent independent random variables.
Args:
pmf1: Pmf object
pmf2: Pmf object
Returns:
Joint pmf of value pairs
"""
joint = Joint()
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
joint.Set((v1, v2), p1 * p2)
return joint
def MakeHistFromList(t, label=None):
"""Makes a histogram from an unsorted sequence of values.
Args:
t: sequence of numbers
label: string label for this histogram
Returns:
Hist object
"""
return Hist(t, label=label)
def MakeHistFromDict(d, label=None):
"""Makes a histogram from a map from values to frequencies.
Args:
d: dictionary that maps values to frequencies
label: string label for this histogram
Returns:
Hist object
"""
return Hist(d, label)
def MakePmfFromList(t, label=None):
"""Makes a PMF from an unsorted sequence of values.
Args:
t: sequence of numbers
label: string label for this PMF
Returns:
Pmf object
"""
return Pmf(t, label=label)
def MakePmfFromDict(d, label=None):
"""Makes a PMF from a map from values to probabilities.
Args:
d: dictionary that maps values to probabilities
label: string label for this PMF
Returns:
Pmf object
"""
return Pmf(d, label=label)
def MakePmfFromItems(t, label=None):
"""Makes a PMF from a sequence of value-probability pairs
Args:
t: sequence of value-probability pairs
label: string label for this PMF
Returns:
Pmf object
"""
return Pmf(dict(t), label=label)
def MakePmfFromHist(hist, label=None):
"""Makes a normalized PMF from a Hist object.
Args:
hist: Hist object
label: string label
Returns:
Pmf object
"""
if label is None:
label = hist.label
return Pmf(hist, label=label)
def MakeMixture(metapmf, label='mix'):
"""Make a mixture distribution.
Args:
metapmf: Pmf that maps from Pmfs to probs.
label: string label for the new Pmf.
Returns: Pmf object.
"""
mix = Pmf(label=label)
for pmf, p1 in metapmf.Items():
for x, p2 in pmf.Items():
mix[x] += p1 * p2
return mix
def MakeUniformPmf(low, high, n):
"""Make a uniform Pmf.
low: lowest value (inclusive)
high: highest value (inclusize)
n: number of values
"""
pmf = Pmf()
for x in np.linspace(low, high, n):
pmf.Set(x, 1)
pmf.Normalize()
return pmf
class Cdf:
"""Represents a cumulative distribution function.
Attributes:
xs: sequence of values
ps: sequence of probabilities
label: string used as a graph label.
"""
def __init__(self, obj=None, ps=None, label=None):
"""Initializes.
If ps is provided, obj must be the corresponding list of values.
obj: Hist, Pmf, Cdf, Pdf, dict, pandas Series, list of pairs
ps: list of cumulative probabilities
label: string label
"""
self.label = label if label is not None else DEFAULT_LABEL
if isinstance(obj, (_DictWrapper, Cdf, Pdf)):
if not label:
self.label = label if label is not None else obj.label
if obj is None:
# caller does not provide obj, make an empty Cdf
self.xs = np.asarray([])
self.ps = np.asarray([])
if ps is not None:
logging.warning("Cdf: can't pass ps without also passing xs.")
return
else:
# if the caller provides xs and ps, just store them
if ps is not None:
if isinstance(ps, str):
logging.warning("Cdf: ps can't be a string")
self.xs = np.asarray(obj)
self.ps = np.asarray(ps)
return
# caller has provided just obj, not ps
if isinstance(obj, Cdf):
self.xs = copy.copy(obj.xs)
self.ps = copy.copy(obj.ps)
return
if isinstance(obj, _DictWrapper):
dw = obj
else:
dw = Hist(obj)
if len(dw) == 0:
self.xs = np.asarray([])
self.ps = np.asarray([])
return
xs, freqs = zip(*sorted(dw.Items()))
self.xs = np.asarray(xs)
self.ps = np.cumsum(freqs, dtype=np.float)
self.ps /= self.ps[-1]
def __str__(self):
cls = self.__class__.__name__
if self.label == DEFAULT_LABEL:
return '%s(%s, %s)' % (cls, str(self.xs), str(self.ps))
else:
return self.label
def __repr__(self):
cls = self.__class__.__name__
if self.label == DEFAULT_LABEL:
return '%s(%s, %s)' % (cls, str(self.xs), str(self.ps))
else:
return '%s(%s, %s, %s)' % (cls, str(self.xs), str(self.ps),
repr(self.label))
def __len__(self):
return len(self.xs)
def __getitem__(self, x):
return self.Prob(x)
def __setitem__(self):
raise UnimplementedMethodException()
def __delitem__(self):
raise UnimplementedMethodException()
def __eq__(self, other):
return np.all(self.xs == other.xs) and np.all(self.ps == other.ps)
def Print(self):
"""Prints the values and freqs/probs in ascending order."""
for val, prob in zip(self.xs, self.ps):
print(val, prob)
def Copy(self, label=None):
"""Returns a copy of this Cdf.
label: string label for the new Cdf
"""
if label is None:
label = self.label
return Cdf(list(self.xs), list(self.ps), label=label)
def MakePmf(self, label=None):
"""Makes a Pmf."""
if label is None:
label = self.label
return Pmf(self, label=label)
def Items(self):
"""Returns a sorted sequence of (value, probability) pairs.
Note: in Python3, returns an iterator.
"""
a = self.ps
b = np.roll(a, 1)
b[0] = 0
return zip(self.xs, a-b)
def Shift(self, term):
"""Adds a term to the xs.
term: how much to add
"""
new = self.Copy()
# don't use +=, or else an int array + float yields int array
new.xs = new.xs + term
return new
def Scale(self, factor):
"""Multiplies the xs by a factor.
factor: what to multiply by
"""
new = self.Copy()
# don't use *=, or else an int array * float yields int array
new.xs = new.xs * factor
return new
def Prob(self, x):
"""Returns CDF(x), the probability that corresponds to value x.
Args:
x: number
Returns:
float probability
"""
if x < self.xs[0]:
return 0
index = bisect.bisect(self.xs, x)
p = self.ps[index-1]
return p
def Probs(self, xs):
"""Gets probabilities for a sequence of values.
xs: any sequence that can be converted to NumPy array
returns: NumPy array of cumulative probabilities
"""
xs = np.asarray(xs)
index = np.searchsorted(self.xs, xs, side='right')
ps = self.ps[index-1]
ps[xs < self.xs[0]] = 0
return ps
ProbArray = Probs
def Value(self, p):
"""Returns InverseCDF(p), the value that corresponds to probability p.
Args:
p: number in the range [0, 1]
Returns:
number value
"""
if p < 0 or p > 1:
raise ValueError('Probability p must be in range [0, 1]')
index = bisect.bisect_left(self.ps, p)
return self.xs[index]
def Values(self, ps=None):
"""Returns InverseCDF(p), the value that corresponds to probability p.
If ps is not provided, returns all values.
Args:
ps: NumPy array of numbers in the range [0, 1]
Returns:
NumPy array of values
"""
if ps is None:
return self.xs
ps = np.asarray(ps)
if np.any(ps < 0) or np.any(ps > 1):
raise ValueError('Probability p must be in range [0, 1]')
index = np.searchsorted(self.ps, ps, side='left')
return self.xs[index]
ValueArray = Values
def Percentile(self, p):
"""Returns the value that corresponds to percentile p.
Args:
p: number in the range [0, 100]
Returns:
number value
"""
return self.Value(p / 100)
def Percentiles(self, ps):
"""Returns the value that corresponds to percentiles ps.
Args:
ps: numbers in the range [0, 100]
Returns:
array of values
"""
ps = np.asarray(ps)
return self.Values(ps / 100)
def PercentileRank(self, x):
"""Returns the percentile rank of the value x.
x: potential value in the CDF
returns: percentile rank in the range 0 to 100
"""
return self.Prob(x) * 100
def PercentileRanks(self, xs):
"""Returns the percentile ranks of the values in xs.
xs: potential value in the CDF
returns: array of percentile ranks in the range 0 to 100
"""
return self.Probs(x) * 100
def Random(self):
"""Chooses a random value from this distribution."""
return self.Value(random.random())
def Sample(self, n):
"""Generates a random sample from this distribution.
n: int length of the sample
returns: NumPy array
"""
ps = np.random.random(n)
return self.ValueArray(ps)
def Mean(self):
"""Computes the mean of a CDF.
Returns:
float mean
"""
old_p = 0
total = 0
for x, new_p in zip(self.xs, self.ps):
p = new_p - old_p
total += p * x
old_p = new_p
return total
def CredibleInterval(self, percentage=90):
"""Computes the central credible interval.
If percentage=90, computes the 90% CI.
Args:
percentage: float between 0 and 100
Returns:
sequence of two floats, low and high
"""
prob = (1 - percentage / 100) / 2
interval = self.Value(prob), self.Value(1 - prob)
return interval
ConfidenceInterval = CredibleInterval
def _Round(self, multiplier=1000):
"""
An entry is added to the cdf only if the percentile differs
from the previous value in a significant digit, where the number
of significant digits is determined by multiplier. The
default is 1000, which keeps log10(1000) = 3 significant digits.
"""
# TODO(write this method)
raise UnimplementedMethodException()
def Render(self, **options):
"""Generates a sequence of points suitable for plotting.
An empirical CDF is a step function; linear interpolation
can be misleading.
Note: options are ignored
Returns:
tuple of (xs, ps)
"""
def interleave(a, b):
c = np.empty(a.shape[0] + b.shape[0])
c[::2] = a
c[1::2] = b
return c
a = np.array(self.xs)
xs = interleave(a, a)
shift_ps = np.roll(self.ps, 1)
shift_ps[0] = 0
ps = interleave(shift_ps, self.ps)
return xs, ps
def Max(self, k):
"""Computes the CDF of the maximum of k selections from this dist.
k: int
returns: new Cdf
"""
cdf = self.Copy()
cdf.ps **= k
return cdf
def MakeCdfFromItems(items, label=None):
"""Makes a cdf from an unsorted sequence of (value, frequency) pairs.
Args:
items: unsorted sequence of (value, frequency) pairs
label: string label for this CDF
Returns:
cdf: list of (value, fraction) pairs
"""
return Cdf(dict(items), label=label)
def MakeCdfFromDict(d, label=None):
"""Makes a CDF from a dictionary that maps values to frequencies.
Args:
d: dictionary that maps values to frequencies.
label: string label for the data.
Returns:
Cdf object
"""
return Cdf(d, label=label)
def MakeCdfFromList(seq, label=None):
"""Creates a CDF from an unsorted sequence.
Args:
seq: unsorted sequence of sortable values
label: string label for the cdf
Returns:
Cdf object
"""
return Cdf(seq, label=label)
def MakeCdfFromHist(hist, label=None):
"""Makes a CDF from a Hist object.
Args:
hist: Pmf.Hist object
label: string label for the data.
Returns:
Cdf object
"""
if label is None:
label = hist.label
return Cdf(hist, label=label)
def MakeCdfFromPmf(pmf, label=None):
"""Makes a CDF from a Pmf object.
Args:
pmf: Pmf.Pmf object
label: string label for the data.
Returns:
Cdf object
"""
if label is None:
label = pmf.label
return Cdf(pmf, label=label)
class UnimplementedMethodException(Exception):
"""Exception if someone calls a method that should be overridden."""
class Suite(Pmf):
"""Represents a suite of hypotheses and their probabilities."""
def Update(self, data):
"""Updates each hypothesis based on the data.
data: any representation of the data
returns: the normalizing constant
"""
for hypo in self.Values():
like = self.Likelihood(data, hypo)
self.Mult(hypo, like)
return self.Normalize()
def LogUpdate(self, data):
"""Updates a suite of hypotheses based on new data.
Modifies the suite directly; if you want to keep the original, make
a copy.
Note: unlike Update, LogUpdate does not normalize.
Args:
data: any representation of the data
"""
for hypo in self.Values():
like = self.LogLikelihood(data, hypo)
self.Incr(hypo, like)
def UpdateSet(self, dataset):
"""Updates each hypothesis based on the dataset.
This is more efficient than calling Update repeatedly because
it waits until the end to Normalize.
Modifies the suite directly; if you want to keep the original, make
a copy.
dataset: a sequence of data
returns: the normalizing constant
"""
for data in dataset:
for hypo in self.Values():
like = self.Likelihood(data, hypo)
self.Mult(hypo, like)
return self.Normalize()
def LogUpdateSet(self, dataset):
"""Updates each hypothesis based on the dataset.
Modifies the suite directly; if you want to keep the original, make
a copy.
dataset: a sequence of data
returns: None
"""
for data in dataset:
self.LogUpdate(data)
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
hypo: some representation of the hypothesis
data: some representation of the data
"""
raise UnimplementedMethodException()
def LogLikelihood(self, data, hypo):
"""Computes the log likelihood of the data under the hypothesis.
hypo: some representation of the hypothesis
data: some representation of the data
"""
raise UnimplementedMethodException()
def Print(self):
"""Prints the hypotheses and their probabilities."""
for hypo, prob in sorted(self.Items()):
print(hypo, prob)
def MakeOdds(self):
"""Transforms from probabilities to odds.
Values with prob=0 are removed.
"""
for hypo, prob in self.Items():
if prob:
self.Set(hypo, Odds(prob))
else:
self.Remove(hypo)
def MakeProbs(self):
"""Transforms from odds to probabilities."""
for hypo, odds in self.Items():
self.Set(hypo, Probability(odds))
def MakeSuiteFromList(t, label=None):
"""Makes a suite from an unsorted sequence of values.
Args:
t: sequence of numbers
label: string label for this suite
Returns:
Suite object
"""
hist = MakeHistFromList(t, label=label)
d = hist.GetDict()
return MakeSuiteFromDict(d)
def MakeSuiteFromHist(hist, label=None):
"""Makes a normalized suite from a Hist object.
Args:
hist: Hist object
label: string label
Returns:
Suite object
"""
if label is None:
label = hist.label
# make a copy of the dictionary
d = dict(hist.GetDict())
return MakeSuiteFromDict(d, label)
def MakeSuiteFromDict(d, label=None):
"""Makes a suite from a map from values to probabilities.
Args:
d: dictionary that maps values to probabilities
label: string label for this suite
Returns:
Suite object
"""
suite = Suite(label=label)
suite.SetDict(d)
suite.Normalize()
return suite
class Pdf(object):
"""Represents a probability density function (PDF)."""
def Density(self, x):
"""Evaluates this Pdf at x.
Returns: float or NumPy array of probability density
"""
raise UnimplementedMethodException()
def GetLinspace(self):
"""Get a linspace for plotting.
Not all subclasses of Pdf implement this.
Returns: numpy array
"""
raise UnimplementedMethodException()
def MakePmf(self, **options):
"""Makes a discrete version of this Pdf.
options can include
label: string
low: low end of range
high: high end of range
n: number of places to evaluate
Returns: new Pmf
"""
label = options.pop('label', '')
xs, ds = self.Render(**options)
return Pmf(dict(zip(xs, ds)), label=label)
def Render(self, **options):
"""Generates a sequence of points suitable for plotting.
If options includes low and high, it must also include n;
in that case the density is evaluated an n locations between
low and high, including both.
If options includes xs, the density is evaluate at those location.
Otherwise, self.GetLinspace is invoked to provide the locations.
Returns:
tuple of (xs, densities)
"""
low, high = options.pop('low', None), options.pop('high', None)
if low is not None and high is not None:
n = options.pop('n', 101)
xs = np.linspace(low, high, n)
else:
xs = options.pop('xs', None)
if xs is None:
xs = self.GetLinspace()
ds = self.Density(xs)
return xs, ds
def Items(self):
"""Generates a sequence of (value, probability) pairs.
"""
return zip(*self.Render())
class NormalPdf(Pdf):
"""Represents the PDF of a Normal distribution."""
def __init__(self, mu=0, sigma=1, label=None):
"""Constructs a Normal Pdf with given mu and sigma.
mu: mean
sigma: standard deviation
label: string
"""
self.mu = mu
self.sigma = sigma
self.label = label if label is not None else '_nolegend_'
def __str__(self):
return 'NormalPdf(%f, %f)' % (self.mu, self.sigma)
def GetLinspace(self):
"""Get a linspace for plotting.
Returns: numpy array
"""
low, high = self.mu-3*self.sigma, self.mu+3*self.sigma
return np.linspace(low, high, 101)
def Density(self, xs):
"""Evaluates this Pdf at xs.
xs: scalar or sequence of floats
returns: float or NumPy array of probability density
"""
return stats.norm.pdf(xs, self.mu, self.sigma)
class ExponentialPdf(Pdf):
"""Represents the PDF of an exponential distribution."""
def __init__(self, lam=1, label=None):
"""Constructs an exponential Pdf with given parameter.
lam: rate parameter
label: string
"""
self.lam = lam
self.label = label if label is not None else '_nolegend_'
def __str__(self):
return 'ExponentialPdf(%f)' % (self.lam)
def GetLinspace(self):
"""Get a linspace for plotting.
Returns: numpy array
"""
low, high = 0, 5.0/self.lam
return np.linspace(low, high, 101)
def Density(self, xs):
"""Evaluates this Pdf at xs.
xs: scalar or sequence of floats
returns: float or NumPy array of probability density
"""
return stats.expon.pdf(xs, scale=1.0/self.lam)
class EstimatedPdf(Pdf):
"""Represents a PDF estimated by KDE."""
def __init__(self, sample, label=None):
"""Estimates the density function based on a sample.
sample: sequence of data
label: string
"""
self.label = label if label is not None else '_nolegend_'
self.kde = stats.gaussian_kde(sample)
low = min(sample)
high = max(sample)
self.linspace = np.linspace(low, high, 101)
def __str__(self):
return 'EstimatedPdf(label=%s)' % str(self.label)
def GetLinspace(self):
"""Get a linspace for plotting.
Returns: numpy array
"""
return self.linspace
def Density(self, xs):
"""Evaluates this Pdf at xs.
returns: float or NumPy array of probability density
"""
return self.kde.evaluate(xs)
def Sample(self, n):
"""Generates a random sample from the estimated Pdf.
n: size of sample
"""
# NOTE: we have to flatten because resample returns a 2-D
# array for some reason.
return self.kde.resample(n).flatten()
def CredibleInterval(pmf, percentage=90):
"""Computes a credible interval for a given distribution.
If percentage=90, computes the 90% CI.
Args:
pmf: Pmf object representing a posterior distribution
percentage: float between 0 and 100
Returns:
sequence of two floats, low and high
"""
cdf = pmf.MakeCdf()
prob = (1 - percentage / 100) / 2
interval = cdf.Value(prob), cdf.Value(1 - prob)
return interval
def PmfProbLess(pmf1, pmf2):
"""Probability that a value from pmf1 is less than a value from pmf2.
Args:
pmf1: Pmf object
pmf2: Pmf object
Returns:
float probability
"""
total = 0
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
if v1 < v2:
total += p1 * p2
return total
def PmfProbGreater(pmf1, pmf2):
"""Probability that a value from pmf1 is less than a value from pmf2.
Args:
pmf1: Pmf object
pmf2: Pmf object
Returns:
float probability
"""
total = 0
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
if v1 > v2:
total += p1 * p2
return total
def PmfProbEqual(pmf1, pmf2):
"""Probability that a value from pmf1 equals a value from pmf2.
Args:
pmf1: Pmf object
pmf2: Pmf object
Returns:
float probability
"""
total = 0
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
if v1 == v2:
total += p1 * p2
return total
def RandomSum(dists):
"""Chooses a random value from each dist and returns the sum.
dists: sequence of Pmf or Cdf objects
returns: numerical sum
"""
total = sum(dist.Random() for dist in dists)
return total
def SampleSum(dists, n):
"""Draws a sample of sums from a list of distributions.
dists: sequence of Pmf or Cdf objects
n: sample size
returns: new Pmf of sums
"""
pmf = Pmf(RandomSum(dists) for i in range(n))
return pmf
def EvalNormalPdf(x, mu, sigma):
"""Computes the unnormalized PDF of the normal distribution.
x: value
mu: mean
sigma: standard deviation
returns: float probability density
"""
return stats.norm.pdf(x, mu, sigma)
def MakeNormalPmf(mu, sigma, num_sigmas, n=201):
"""Makes a PMF discrete approx to a Normal distribution.
mu: float mean
sigma: float standard deviation
num_sigmas: how many sigmas to extend in each direction
n: number of values in the Pmf
returns: normalized Pmf
"""
pmf = Pmf()
low = mu - num_sigmas * sigma
high = mu + num_sigmas * sigma
for x in np.linspace(low, high, n):
p = EvalNormalPdf(x, mu, sigma)
pmf.Set(x, p)
pmf.Normalize()
return pmf
def EvalBinomialPmf(k, n, p):
"""Evaluates the binomial PMF.
Returns the probabily of k successes in n trials with probability p.
"""
return stats.binom.pmf(k, n, p)
def MakeBinomialPmf(n, p):
"""Evaluates the binomial PMF.
Returns the distribution of successes in n trials with probability p.
"""
pmf = Pmf()
for k in range(n+1):
pmf[k] = stats.binom.pmf(k, n, p)
return pmf
def EvalGammaPdf(x, a):
"""Computes the Gamma PDF.
x: where to evaluate the PDF
a: parameter of the gamma distribution
returns: float probability
"""
return x**(a-1) * np.exp(-x) / gamma(a)
def MakeGammaPmf(xs, a):
"""Makes a PMF discrete approx to a Gamma distribution.
lam: parameter lambda in events per unit time
xs: upper bound of the Pmf
returns: normalized Pmf
"""
xs = np.asarray(xs)
ps = EvalGammaPdf(xs, a)
pmf = Pmf(dict(zip(xs, ps)))
pmf.Normalize()
return pmf
def EvalGeometricPmf(k, p, loc=0):
"""Evaluates the geometric PMF.
With loc=0: Probability of `k` trials to get one success.
With loc=-1: Probability of `k` trials before first success.
k: number of trials
p: probability of success on each trial
"""
return stats.geom.pmf(k, p, loc=loc)
def MakeGeometricPmf(p, loc=0, high=10):
"""Evaluates the binomial PMF.
With loc=0: PMF of trials to get one success.
With loc=-1: PMF of trials before first success.
p: probability of success
high: upper bound where PMF is truncated
"""
pmf = Pmf()
for k in range(high):
pmf[k] = stats.geom.pmf(k, p, loc=loc)
pmf.Normalize()
return pmf
def EvalHypergeomPmf(k, N, K, n):
"""Evaluates the hypergeometric PMF.
Returns the probabily of k successes in n trials from a population
N with K successes in it.
"""
return stats.hypergeom.pmf(k, N, K, n)
def EvalPoissonPmf(k, lam):
"""Computes the Poisson PMF.
k: number of events
lam: parameter lambda in events per unit time
returns: float probability
"""
return stats.poisson.pmf(k, lam)
def MakePoissonPmf(lam, high, step=1):
"""Makes a PMF discrete approx to a Poisson distribution.
lam: parameter lambda in events per unit time
high: upper bound of the Pmf
returns: normalized Pmf
"""
pmf = Pmf()
for k in range(0, high + 1, step):
p = stats.poisson.pmf(k, lam)
pmf.Set(k, p)
pmf.Normalize()
return pmf
def EvalExponentialPdf(x, lam):
"""Computes the exponential PDF.
x: value
lam: parameter lambda in events per unit time
returns: float probability density
"""
return lam * math.exp(-lam * x)
def EvalExponentialCdf(x, lam):
"""Evaluates CDF of the exponential distribution with parameter lam."""
return 1 - math.exp(-lam * x)
def MakeExponentialPmf(lam, high, n=200):
"""Makes a PMF discrete approx to an exponential distribution.
lam: parameter lambda in events per unit time
high: upper bound
n: number of values in the Pmf
returns: normalized Pmf
"""
pmf = Pmf()
for x in np.linspace(0, high, n):
p = EvalExponentialPdf(x, lam)
pmf.Set(x, p)
pmf.Normalize()
return pmf
def EvalWeibullPdf(x, lam, k):
"""Computes the Weibull PDF.
x: value
lam: parameter lambda in events per unit time
k: parameter
returns: float probability density
"""
arg = (x / lam)
return k / lam * arg**(k-1) * np.exp(-arg**k)
def EvalWeibullCdf(x, lam, k):
"""Evaluates CDF of the Weibull distribution."""
arg = (x / lam)
return 1 - np.exp(-arg**k)
def MakeWeibullPmf(lam, k, high, n=200):
"""Makes a PMF discrete approx to a Weibull distribution.
lam: parameter lambda in events per unit time
k: parameter
high: upper bound
n: number of values in the Pmf
returns: normalized Pmf
"""
xs = np.linspace(0, high, n)
ps = EvalWeibullPdf(xs, lam, k)
ps[np.isinf(ps)] = 0
return Pmf(dict(zip(xs, ps)))
def EvalParetoPdf(x, xm, alpha):
"""Computes the Pareto.
xm: minimum value (scale parameter)
alpha: shape parameter
returns: float probability density
"""
return stats.pareto.pdf(x, alpha, scale=xm)
def MakeParetoPmf(xm, alpha, high, num=101):
"""Makes a PMF discrete approx to a Pareto distribution.
xm: minimum value (scale parameter)
alpha: shape parameter
high: upper bound value
num: number of values
returns: normalized Pmf
"""
xs = np.linspace(xm, high, num)
ps = stats.pareto.pdf(xs, alpha, scale=xm)
pmf = Pmf(dict(zip(xs, ps)))
return pmf
def StandardNormalCdf(x):
"""Evaluates the CDF of the standard Normal distribution.
See http://en.wikipedia.org/wiki/Normal_distribution
#Cumulative_distribution_function
Args:
x: float
Returns:
float
"""
return (math.erf(x / ROOT2) + 1) / 2
def EvalNormalCdf(x, mu=0, sigma=1):
"""Evaluates the CDF of the normal distribution.
Args:
x: float
mu: mean parameter
sigma: standard deviation parameter
Returns:
float
"""
return stats.norm.cdf(x, loc=mu, scale=sigma)
def EvalNormalCdfInverse(p, mu=0, sigma=1):
"""Evaluates the inverse CDF of the normal distribution.
See http://en.wikipedia.org/wiki/Normal_distribution#Quantile_function
Args:
p: float
mu: mean parameter
sigma: standard deviation parameter
Returns:
float
"""
return stats.norm.ppf(p, loc=mu, scale=sigma)
def EvalLognormalCdf(x, mu=0, sigma=1):
"""Evaluates the CDF of the lognormal distribution.
x: float or sequence
mu: mean parameter
sigma: standard deviation parameter
Returns: float or sequence
"""
return stats.lognorm.cdf(x, loc=mu, scale=sigma)
def RenderExpoCdf(lam, low, high, n=101):
"""Generates sequences of xs and ps for an exponential CDF.
lam: parameter
low: float
high: float
n: number of points to render
<|fim▁hole|> returns: numpy arrays (xs, ps)
"""
xs = np.linspace(low, high, n)
ps = 1 - np.exp(-lam * xs)
#ps = stats.expon.cdf(xs, scale=1.0/lam)
return xs, ps
def RenderNormalCdf(mu, sigma, low, high, n=101):
"""Generates sequences of xs and ps for a Normal CDF.
mu: parameter
sigma: parameter
low: float
high: float
n: number of points to render
returns: numpy arrays (xs, ps)
"""
xs = np.linspace(low, high, n)
ps = stats.norm.cdf(xs, mu, sigma)
return xs, ps
def RenderParetoCdf(xmin, alpha, low, high, n=50):
"""Generates sequences of xs and ps for a Pareto CDF.
xmin: parameter
alpha: parameter
low: float
high: float
n: number of points to render
returns: numpy arrays (xs, ps)
"""
if low < xmin:
low = xmin
xs = np.linspace(low, high, n)
ps = 1 - (xs / xmin) ** -alpha
#ps = stats.pareto.cdf(xs, scale=xmin, b=alpha)
return xs, ps
class Beta:
"""Represents a Beta distribution.
See http://en.wikipedia.org/wiki/Beta_distribution
"""
def __init__(self, alpha=1, beta=1, label=None):
"""Initializes a Beta distribution."""
self.alpha = alpha
self.beta = beta
self.label = label if label is not None else '_nolegend_'
def Update(self, data):
"""Updates a Beta distribution.
data: pair of int (heads, tails)
"""
heads, tails = data
self.alpha += heads
self.beta += tails
def Mean(self):
"""Computes the mean of this distribution."""
return self.alpha / (self.alpha + self.beta)
def MAP(self):
"""Computes the value with maximum a posteori probability."""
a = self.alpha - 1
b = self.beta - 1
return a / (a + b)
def Random(self):
"""Generates a random variate from this distribution."""
return random.betavariate(self.alpha, self.beta)
def Sample(self, n):
"""Generates a random sample from this distribution.
n: int sample size
"""
size = n,
return np.random.beta(self.alpha, self.beta, size)
def EvalPdf(self, x):
"""Evaluates the PDF at x."""
return x ** (self.alpha - 1) * (1 - x) ** (self.beta - 1)
def MakePmf(self, steps=101, label=None):
"""Returns a Pmf of this distribution.
Note: Normally, we just evaluate the PDF at a sequence
of points and treat the probability density as a probability
mass.
But if alpha or beta is less than one, we have to be
more careful because the PDF goes to infinity at x=0
and x=1. In that case we evaluate the CDF and compute
differences.
The result is a little funny, because the values at 0 and 1
are not symmetric. Nevertheless, it is a reasonable discrete
model of the continuous distribution, and behaves well as
the number of values increases.
"""
if label is None and self.label is not None:
label = self.label
if self.alpha < 1 or self.beta < 1:
cdf = self.MakeCdf()
pmf = cdf.MakePmf()
return pmf
xs = [i / (steps - 1.0) for i in range(steps)]
probs = [self.EvalPdf(x) for x in xs]
pmf = Pmf(dict(zip(xs, probs)), label=label)
return pmf
def MakeCdf(self, steps=101):
"""Returns the CDF of this distribution."""
xs = [i / (steps - 1.0) for i in range(steps)]
ps = special.betainc(self.alpha, self.beta, xs)
cdf = Cdf(xs, ps)
return cdf
def Percentile(self, ps):
"""Returns the given percentiles from this distribution.
ps: scalar, array, or list of [0-100]
"""
ps = np.asarray(ps) / 100
xs = special.betaincinv(self.alpha, self.beta, ps)
return xs
class Dirichlet(object):
"""Represents a Dirichlet distribution.
See http://en.wikipedia.org/wiki/Dirichlet_distribution
"""
def __init__(self, n, conc=1, label=None):
"""Initializes a Dirichlet distribution.
n: number of dimensions
conc: concentration parameter (smaller yields more concentration)
label: string label
"""
if n < 2:
raise ValueError('A Dirichlet distribution with '
'n<2 makes no sense')
self.n = n
self.params = np.ones(n, dtype=np.float) * conc
self.label = label if label is not None else '_nolegend_'
def Update(self, data):
"""Updates a Dirichlet distribution.
data: sequence of observations, in order corresponding to params
"""
m = len(data)
self.params[:m] += data
def Random(self):
"""Generates a random variate from this distribution.
Returns: normalized vector of fractions
"""
p = np.random.gamma(self.params)
return p / p.sum()
def Likelihood(self, data):
"""Computes the likelihood of the data.
Selects a random vector of probabilities from this distribution.
Returns: float probability
"""
m = len(data)
if self.n < m:
return 0
x = data
p = self.Random()
q = p[:m] ** x
return q.prod()
def LogLikelihood(self, data):
"""Computes the log likelihood of the data.
Selects a random vector of probabilities from this distribution.
Returns: float log probability
"""
m = len(data)
if self.n < m:
return float('-inf')
x = self.Random()
y = np.log(x[:m]) * data
return y.sum()
def MarginalBeta(self, i):
"""Computes the marginal distribution of the ith element.
See http://en.wikipedia.org/wiki/Dirichlet_distribution
#Marginal_distributions
i: int
Returns: Beta object
"""
alpha0 = self.params.sum()
alpha = self.params[i]
return Beta(alpha, alpha0 - alpha)
def PredictivePmf(self, xs, label=None):
"""Makes a predictive distribution.
xs: values to go into the Pmf
Returns: Pmf that maps from x to the mean prevalence of x
"""
alpha0 = self.params.sum()
ps = self.params / alpha0
return Pmf(zip(xs, ps), label=label)
def BinomialCoef(n, k):
"""Compute the binomial coefficient "n choose k".
n: number of trials
k: number of successes
Returns: float
"""
return scipy.misc.comb(n, k)
def LogBinomialCoef(n, k):
"""Computes the log of the binomial coefficient.
http://math.stackexchange.com/questions/64716/
approximating-the-logarithm-of-the-binomial-coefficient
n: number of trials
k: number of successes
Returns: float
"""
return n * math.log(n) - k * math.log(k) - (n - k) * math.log(n - k)
def NormalProbability(ys, jitter=0):
"""Generates data for a normal probability plot.
ys: sequence of values
jitter: float magnitude of jitter added to the ys
returns: numpy arrays xs, ys
"""
n = len(ys)
xs = np.random.normal(0, 1, n)
xs.sort()
if jitter:
ys = Jitter(ys, jitter)
else:
ys = np.array(ys)
ys.sort()
return xs, ys
def Jitter(values, jitter=0.5):
"""Jitters the values by adding a uniform variate in (-jitter, jitter).
values: sequence
jitter: scalar magnitude of jitter
returns: new numpy array
"""
n = len(values)
return np.random.normal(0, jitter, n) + values
def NormalProbabilityPlot(sample, fit_color='0.8', **options):
"""Makes a normal probability plot with a fitted line.
sample: sequence of numbers
fit_color: color string for the fitted line
options: passed along to Plot
"""
xs, ys = NormalProbability(sample)
mean, var = MeanVar(sample)
std = math.sqrt(var)
fit = FitLine(xs, mean, std)
thinkplot.Plot(*fit, color=fit_color, label='model')
xs, ys = NormalProbability(sample)
thinkplot.Plot(xs, ys, **options)
def Mean(xs):
"""Computes mean.
xs: sequence of values
returns: float mean
"""
return np.mean(xs)
def Var(xs, mu=None, ddof=0):
"""Computes variance.
xs: sequence of values
mu: option known mean
ddof: delta degrees of freedom
returns: float
"""
xs = np.asarray(xs)
if mu is None:
mu = xs.mean()
ds = xs - mu
return np.dot(ds, ds) / (len(xs) - ddof)
def Std(xs, mu=None, ddof=0):
"""Computes standard deviation.
xs: sequence of values
mu: option known mean
ddof: delta degrees of freedom
returns: float
"""
var = Var(xs, mu, ddof)
return math.sqrt(var)
def MeanVar(xs, ddof=0):
"""Computes mean and variance.
Based on http://stackoverflow.com/questions/19391149/
numpy-mean-and-variance-from-single-function
xs: sequence of values
ddof: delta degrees of freedom
returns: pair of float, mean and var
"""
xs = np.asarray(xs)
mean = xs.mean()
s2 = Var(xs, mean, ddof)
return mean, s2
def Trim(t, p=0.01):
"""Trims the largest and smallest elements of t.
Args:
t: sequence of numbers
p: fraction of values to trim off each end
Returns:
sequence of values
"""
n = int(p * len(t))
t = sorted(t)[n:-n]
return t
def TrimmedMean(t, p=0.01):
"""Computes the trimmed mean of a sequence of numbers.
Args:
t: sequence of numbers
p: fraction of values to trim off each end
Returns:
float
"""
t = Trim(t, p)
return Mean(t)
def TrimmedMeanVar(t, p=0.01):
"""Computes the trimmed mean and variance of a sequence of numbers.
Side effect: sorts the list.
Args:
t: sequence of numbers
p: fraction of values to trim off each end
Returns:
float
"""
t = Trim(t, p)
mu, var = MeanVar(t)
return mu, var
def CohenEffectSize(group1, group2):
"""Compute Cohen's d.
group1: Series or NumPy array
group2: Series or NumPy array
returns: float
"""
diff = group1.mean() - group2.mean()
n1, n2 = len(group1), len(group2)
var1 = group1.var()
var2 = group2.var()
pooled_var = (n1 * var1 + n2 * var2) / (n1 + n2)
d = diff / math.sqrt(pooled_var)
return d
def Cov(xs, ys, meanx=None, meany=None):
"""Computes Cov(X, Y).
Args:
xs: sequence of values
ys: sequence of values
meanx: optional float mean of xs
meany: optional float mean of ys
Returns:
Cov(X, Y)
"""
xs = np.asarray(xs)
ys = np.asarray(ys)
if meanx is None:
meanx = np.mean(xs)
if meany is None:
meany = np.mean(ys)
cov = np.dot(xs-meanx, ys-meany) / len(xs)
return cov
def Corr(xs, ys):
"""Computes Corr(X, Y).
Args:
xs: sequence of values
ys: sequence of values
Returns:
Corr(X, Y)
"""
xs = np.asarray(xs)
ys = np.asarray(ys)
meanx, varx = MeanVar(xs)
meany, vary = MeanVar(ys)
corr = Cov(xs, ys, meanx, meany) / math.sqrt(varx * vary)
return corr
def SerialCorr(series, lag=1):
"""Computes the serial correlation of a series.
series: Series
lag: integer number of intervals to shift
returns: float correlation
"""
xs = series[lag:]
ys = series.shift(lag)[lag:]
corr = Corr(xs, ys)
return corr
def SpearmanCorr(xs, ys):
"""Computes Spearman's rank correlation.
Args:
xs: sequence of values
ys: sequence of values
Returns:
float Spearman's correlation
"""
xranks = pandas.Series(xs).rank()
yranks = pandas.Series(ys).rank()
return Corr(xranks, yranks)
def MapToRanks(t):
"""Returns a list of ranks corresponding to the elements in t.
Args:
t: sequence of numbers
Returns:
list of integer ranks, starting at 1
"""
# pair up each value with its index
pairs = enumerate(t)
# sort by value
sorted_pairs = sorted(pairs, key=itemgetter(1))
# pair up each pair with its rank
ranked = enumerate(sorted_pairs)
# sort by index
resorted = sorted(ranked, key=lambda trip: trip[1][0])
# extract the ranks
ranks = [trip[0]+1 for trip in resorted]
return ranks
def LeastSquares(xs, ys):
"""Computes a linear least squares fit for ys as a function of xs.
Args:
xs: sequence of values
ys: sequence of values
Returns:
tuple of (intercept, slope)
"""
meanx, varx = MeanVar(xs)
meany = Mean(ys)
slope = Cov(xs, ys, meanx, meany) / varx
inter = meany - slope * meanx
return inter, slope
def FitLine(xs, inter, slope):
"""Fits a line to the given data.
xs: sequence of x
returns: tuple of numpy arrays (sorted xs, fit ys)
"""
fit_xs = np.sort(xs)
fit_ys = inter + slope * fit_xs
return fit_xs, fit_ys
def Residuals(xs, ys, inter, slope):
"""Computes residuals for a linear fit with parameters inter and slope.
Args:
xs: independent variable
ys: dependent variable
inter: float intercept
slope: float slope
Returns:
list of residuals
"""
xs = np.asarray(xs)
ys = np.asarray(ys)
res = ys - (inter + slope * xs)
return res
def CoefDetermination(ys, res):
"""Computes the coefficient of determination (R^2) for given residuals.
Args:
ys: dependent variable
res: residuals
Returns:
float coefficient of determination
"""
return 1 - Var(res) / Var(ys)
def CorrelatedGenerator(rho):
"""Generates standard normal variates with serial correlation.
rho: target coefficient of correlation
Returns: iterable
"""
x = random.gauss(0, 1)
yield x
sigma = math.sqrt(1 - rho**2)
while True:
x = random.gauss(x * rho, sigma)
yield x
def CorrelatedNormalGenerator(mu, sigma, rho):
"""Generates normal variates with serial correlation.
mu: mean of variate
sigma: standard deviation of variate
rho: target coefficient of correlation
Returns: iterable
"""
for x in CorrelatedGenerator(rho):
yield x * sigma + mu
def RawMoment(xs, k):
"""Computes the kth raw moment of xs.
"""
return sum(x**k for x in xs) / len(xs)
def CentralMoment(xs, k):
"""Computes the kth central moment of xs.
"""
mean = RawMoment(xs, 1)
return sum((x - mean)**k for x in xs) / len(xs)
def StandardizedMoment(xs, k):
"""Computes the kth standardized moment of xs.
"""
var = CentralMoment(xs, 2)
std = math.sqrt(var)
return CentralMoment(xs, k) / std**k
def Skewness(xs):
"""Computes skewness.
"""
return StandardizedMoment(xs, 3)
def Median(xs):
"""Computes the median (50th percentile) of a sequence.
xs: sequence or anything else that can initialize a Cdf
returns: float
"""
cdf = Cdf(xs)
return cdf.Value(0.5)
def IQR(xs):
"""Computes the interquartile of a sequence.
xs: sequence or anything else that can initialize a Cdf
returns: pair of floats
"""
cdf = Cdf(xs)
return cdf.Value(0.25), cdf.Value(0.75)
def PearsonMedianSkewness(xs):
"""Computes the Pearson median skewness.
"""
median = Median(xs)
mean = RawMoment(xs, 1)
var = CentralMoment(xs, 2)
std = math.sqrt(var)
gp = 3 * (mean - median) / std
return gp
class FixedWidthVariables(object):
"""Represents a set of variables in a fixed width file."""
def __init__(self, variables, index_base=0):
"""Initializes.
variables: DataFrame
index_base: are the indices 0 or 1 based?
Attributes:
colspecs: list of (start, end) index tuples
names: list of string variable names
"""
self.variables = variables
# note: by default, subtract 1 from colspecs
self.colspecs = variables[['start', 'end']] - index_base
# convert colspecs to a list of pair of int
self.colspecs = self.colspecs.astype(np.int).values.tolist()
self.names = variables['name']
def ReadFixedWidth(self, filename, **options):
"""Reads a fixed width ASCII file.
filename: string filename
returns: DataFrame
"""
df = pandas.read_fwf(filename,
colspecs=self.colspecs,
names=self.names,
**options)
return df
def ReadStataDct(dct_file, **options):
"""Reads a Stata dictionary file.
dct_file: string filename
options: dict of options passed to open()
returns: FixedWidthVariables object
"""
type_map = dict(byte=int, int=int, long=int, float=float,
double=float, numeric=float)
var_info = []
with open(dct_file, **options) as f:
for line in f:
match = re.search( r'_column\(([^)]*)\)', line)
if not match:
continue
start = int(match.group(1))
t = line.split()
vtype, name, fstring = t[1:4]
name = name.lower()
if vtype.startswith('str'):
vtype = str
else:
vtype = type_map[vtype]
long_desc = ' '.join(t[4:]).strip('"')
var_info.append((start, vtype, name, fstring, long_desc))
columns = ['start', 'type', 'name', 'fstring', 'desc']
variables = pandas.DataFrame(var_info, columns=columns)
# fill in the end column by shifting the start column
variables['end'] = variables.start.shift(-1)
variables.loc[len(variables)-1, 'end'] = 0
dct = FixedWidthVariables(variables, index_base=1)
return dct
def Resample(xs, n=None):
"""Draw a sample from xs with the same length as xs.
xs: sequence
n: sample size (default: len(xs))
returns: NumPy array
"""
if n is None:
n = len(xs)
return np.random.choice(xs, n, replace=True)
def SampleRows(df, nrows, replace=False):
"""Choose a sample of rows from a DataFrame.
df: DataFrame
nrows: number of rows
replace: whether to sample with replacement
returns: DataDf
"""
indices = np.random.choice(df.index, nrows, replace=replace)
sample = df.loc[indices]
return sample
def ResampleRows(df):
"""Resamples rows from a DataFrame.
df: DataFrame
returns: DataFrame
"""
return SampleRows(df, len(df), replace=True)
def ResampleRowsWeighted(df, column='finalwgt'):
"""Resamples a DataFrame using probabilities proportional to given column.
df: DataFrame
column: string column name to use as weights
returns: DataFrame
"""
weights = df[column].copy()
weights /= sum(weights)
indices = np.random.choice(df.index, len(df), replace=True, p=weights)
sample = df.loc[indices]
return sample
def PercentileRow(array, p):
"""Selects the row from a sorted array that maps to percentile p.
p: float 0--100
returns: NumPy array (one row)
"""
rows, cols = array.shape
index = int(rows * p / 100)
return array[index,]
def PercentileRows(ys_seq, percents):
"""Given a collection of lines, selects percentiles along vertical axis.
For example, if ys_seq contains simulation results like ys as a
function of time, and percents contains (5, 95), the result would
be a 90% CI for each vertical slice of the simulation results.
ys_seq: sequence of lines (y values)
percents: list of percentiles (0-100) to select
returns: list of NumPy arrays, one for each percentile
"""
nrows = len(ys_seq)
ncols = len(ys_seq[0])
array = np.zeros((nrows, ncols))
for i, ys in enumerate(ys_seq):
array[i,] = ys
array = np.sort(array, axis=0)
rows = [PercentileRow(array, p) for p in percents]
return rows
def Smooth(xs, sigma=2, **options):
"""Smooths a NumPy array with a Gaussian filter.
xs: sequence
sigma: standard deviation of the filter
"""
return ndimage.filters.gaussian_filter1d(xs, sigma, **options)
class HypothesisTest(object):
"""Represents a hypothesis test."""
def __init__(self, data):
"""Initializes.
data: data in whatever form is relevant
"""
self.data = data
self.MakeModel()
self.actual = self.TestStatistic(data)
self.test_stats = None
self.test_cdf = None
def PValue(self, iters=1000):
"""Computes the distribution of the test statistic and p-value.
iters: number of iterations
returns: float p-value
"""
self.test_stats = [self.TestStatistic(self.RunModel())
for _ in range(iters)]
self.test_cdf = Cdf(self.test_stats)
count = sum(1 for x in self.test_stats if x >= self.actual)
return count / iters
def MaxTestStat(self):
"""Returns the largest test statistic seen during simulations.
"""
return max(self.test_stats)
def PlotCdf(self, label=None):
"""Draws a Cdf with vertical lines at the observed test stat.
"""
def VertLine(x):
"""Draws a vertical line at x."""
thinkplot.Plot([x, x], [0, 1], color='0.8')
VertLine(self.actual)
thinkplot.Cdf(self.test_cdf, label=label)
def TestStatistic(self, data):
"""Computes the test statistic.
data: data in whatever form is relevant
"""
raise UnimplementedMethodException()
def MakeModel(self):
"""Build a model of the null hypothesis.
"""
pass
def RunModel(self):
"""Run the model of the null hypothesis.
returns: simulated data
"""
raise UnimplementedMethodException()
def main():
pass
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>sync.rs<|end_file_name|><|fim▁begin|>// * This file is part of the uutils coreutils package.
// *
// * (c) Alexander Fomin <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE
// * file that was distributed with this source code.
/* Last synced with: sync (GNU coreutils) 8.13 */
extern crate libc;
use clap::{crate_version, App, AppSettings, Arg};
use std::path::Path;
use uucore::display::Quotable;
use uucore::error::{UResult, USimpleError};
use uucore::format_usage;
static ABOUT: &str = "Synchronize cached writes to persistent storage";
const USAGE: &str = "{} [OPTION]... FILE...";
pub mod options {
pub static FILE_SYSTEM: &str = "file-system";
pub static DATA: &str = "data";
}
static ARG_FILES: &str = "files";
#[cfg(unix)]
mod platform {
use super::libc;
#[cfg(target_os = "linux")]
use std::fs::File;
#[cfg(target_os = "linux")]
use std::os::unix::io::AsRawFd;
pub unsafe fn do_sync() -> isize {
libc::sync();
0
}
#[cfg(target_os = "linux")]
pub unsafe fn do_syncfs(files: Vec<String>) -> isize {
for path in files {
let f = File::open(&path).unwrap();
let fd = f.as_raw_fd();
libc::syscall(libc::SYS_syncfs, fd);
}
0
}
#[cfg(target_os = "linux")]
pub unsafe fn do_fdatasync(files: Vec<String>) -> isize {
for path in files {
let f = File::open(&path).unwrap();
let fd = f.as_raw_fd();
libc::syscall(libc::SYS_fdatasync, fd);
}
0
}
}
#[cfg(windows)]
mod platform {
extern crate winapi;
use self::winapi::shared::minwindef;
use self::winapi::shared::winerror;
use self::winapi::um::handleapi;
use self::winapi::um::winbase;
use self::winapi::um::winnt;
use std::fs::OpenOptions;
use std::mem;
use std::os::windows::prelude::*;
use std::path::Path;
use uucore::crash;
use uucore::wide::{FromWide, ToWide};
unsafe fn flush_volume(name: &str) {
let name_wide = name.to_wide_null();
if winapi::um::fileapi::GetDriveTypeW(name_wide.as_ptr()) == winbase::DRIVE_FIXED {
let sliced_name = &name[..name.len() - 1]; // eliminate trailing backslash
match OpenOptions::new().write(true).open(sliced_name) {
Ok(file) => {
if winapi::um::fileapi::FlushFileBuffers(file.as_raw_handle()) == 0 {
crash!(
winapi::um::errhandlingapi::GetLastError() as i32,
"failed to flush file buffer"
);
}
}
Err(e) => crash!(
e.raw_os_error().unwrap_or(1),
"failed to create volume handle"
),
}
}
}
unsafe fn find_first_volume() -> (String, winnt::HANDLE) {
#[allow(deprecated)]
let mut name: [winnt::WCHAR; minwindef::MAX_PATH] = mem::uninitialized();
let handle = winapi::um::fileapi::FindFirstVolumeW(
name.as_mut_ptr(),
name.len() as minwindef::DWORD,
);
if handle == handleapi::INVALID_HANDLE_VALUE {
crash!(
winapi::um::errhandlingapi::GetLastError() as i32,
"failed to find first volume"
);
}
(String::from_wide_null(&name), handle)
}
unsafe fn find_all_volumes() -> Vec<String> {
let (first_volume, next_volume_handle) = find_first_volume();
let mut volumes = vec![first_volume];
loop {
#[allow(deprecated)]
let mut name: [winnt::WCHAR; minwindef::MAX_PATH] = mem::uninitialized();
if winapi::um::fileapi::FindNextVolumeW(
next_volume_handle,
name.as_mut_ptr(),
name.len() as minwindef::DWORD,
) == 0
{
match winapi::um::errhandlingapi::GetLastError() {
winerror::ERROR_NO_MORE_FILES => {
winapi::um::fileapi::FindVolumeClose(next_volume_handle);
return volumes;
}
err => crash!(err as i32, "failed to find next volume"),
}
} else {
volumes.push(String::from_wide_null(&name));
}
}
}
pub unsafe fn do_sync() -> isize {
let volumes = find_all_volumes();
for vol in &volumes {
flush_volume(vol);
}
0
}
pub unsafe fn do_syncfs(files: Vec<String>) -> isize {
for path in files {
flush_volume(
Path::new(&path)
.components()
.next()
.unwrap()
.as_os_str()
.to_str()
.unwrap(),
);
}
0
}
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().get_matches_from(args);
let files: Vec<String> = matches
.values_of(ARG_FILES)
.map(|v| v.map(ToString::to_string).collect())
.unwrap_or_default();
for f in &files {
if !Path::new(&f).exists() {
return Err(USimpleError::new(
1,
format!("cannot stat {}: No such file or directory", f.quote()),
));
}
}
#[allow(clippy::if_same_then_else)]
if matches.is_present(options::FILE_SYSTEM) {
#[cfg(any(target_os = "linux", target_os = "windows"))]
syncfs(files);
} else if matches.is_present(options::DATA) {
#[cfg(target_os = "linux")]
fdatasync(files);
} else {<|fim▁hole|>
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.override_usage(format_usage(USAGE))
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(options::FILE_SYSTEM)
.short('f')
.long(options::FILE_SYSTEM)
.conflicts_with(options::DATA)
.help("sync the file systems that contain the files (Linux and Windows only)"),
)
.arg(
Arg::new(options::DATA)
.short('d')
.long(options::DATA)
.conflicts_with(options::FILE_SYSTEM)
.help("sync only file data, no unneeded metadata (Linux only)"),
)
.arg(
Arg::new(ARG_FILES)
.multiple_occurrences(true)
.takes_value(true),
)
}
fn sync() -> isize {
unsafe { platform::do_sync() }
}
#[cfg(any(target_os = "linux", target_os = "windows"))]
fn syncfs(files: Vec<String>) -> isize {
unsafe { platform::do_syncfs(files) }
}
#[cfg(target_os = "linux")]
fn fdatasync(files: Vec<String>) -> isize {
unsafe { platform::do_fdatasync(files) }
}<|fim▁end|> | sync();
}
Ok(())
} |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.conf import settings as settings
from django.contrib.auth.decorators import login_required
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.utils import simplejson
from django.utils.translation import ugettext_lazy as _
from mitxmako.shortcuts import render_to_response
from courseware.courses import get_opt_course_with_access
from courseware.access import has_access
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.django import modulestore
from .models import Revision, Article, Namespace, CreateArticleForm, RevisionFormWithTitle, RevisionForm
import wiki_settings
def wiki_reverse(wiki_page, article=None, course=None, namespace=None, args=[], kwargs={}):
kwargs = dict(kwargs) # TODO: Figure out why if I don't do this kwargs sometimes contains {'article_path'}
if not 'course_id' in kwargs and course:
kwargs['course_id'] = course.id
if not 'article_path' in kwargs and article:
kwargs['article_path'] = article.get_path()
if not 'namespace' in kwargs and namespace:
kwargs['namespace'] = namespace
return reverse(wiki_page, kwargs=kwargs, args=args)
def update_template_dictionary(dictionary, request=None, course=None, article=None, revision=None):
if article:
dictionary['wiki_article'] = article
dictionary['wiki_title'] = article.title # TODO: What is the title when viewing the article in a course?
if not course and 'namespace' not in dictionary:
dictionary['namespace'] = article.namespace.name
if course:
dictionary['course'] = course
if 'namespace' not in dictionary:
dictionary['namespace'] = "edX"
else:
dictionary['course'] = None
if revision:
dictionary['wiki_article_revision'] = revision
dictionary['wiki_current_revision_deleted'] = not (revision.deleted == 0)
if request:
dictionary.update(csrf(request))
if request and course:
dictionary['staff_access'] = has_access(request.user, course, 'staff')
else:
dictionary['staff_access'] = False
def view(request, article_path, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, article_path, course)
if err:
return err
perm_err = check_permissions(request, article, course, check_read=True, check_deleted=True)
if perm_err:
return perm_err
d = {}
update_template_dictionary(d, request, course, article, article.current_revision)
return render_to_response('simplewiki/simplewiki_view.html', d)
def view_revision(request, revision_number, article_path, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, article_path, course)
if err:
return err
try:
revision = Revision.objects.get(counter=int(revision_number), article=article)
except:
d = {'wiki_err_norevision': revision_number}
update_template_dictionary(d, request, course, article)
return render_to_response('simplewiki/simplewiki_error.html', d)
perm_err = check_permissions(request, article, course, check_read=True, check_deleted=True, revision=revision)
if perm_err:
return perm_err
d = {}
update_template_dictionary(d, request, course, article, revision)
return render_to_response('simplewiki/simplewiki_view.html', d)
def root_redirect(request, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
#TODO: Add a default namespace to settings.
namespace = "edX"
try:
root = Article.get_root(namespace)
return HttpResponseRedirect(reverse('wiki_view', kwargs={'course_id': course_id, 'article_path': root.get_path()}))
except:
# If the root is not found, we probably are loading this class for the first time
# We should make sure the namespace exists so the root article can be created.
Namespace.ensure_namespace(namespace)
err = not_found(request, namespace + '/', course)
return err
def create(request, article_path, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
article_path_components = article_path.split('/')
# Ensure the namespace exists
if not len(article_path_components) >= 1 or len(article_path_components[0]) == 0:
d = {'wiki_err_no_namespace': True}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_error.html', d)
namespace = None
try:
namespace = Namespace.objects.get(name__exact=article_path_components[0])
except Namespace.DoesNotExist, ValueError:
d = {'wiki_err_bad_namespace': True}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_error.html', d)
# See if the article already exists
article_slug = article_path_components[1] if len(article_path_components) >= 2 else ''
#TODO: Make sure the slug only contains legal characters (which is already done a bit by the url regex)
try:
existing_article = Article.objects.get(namespace=namespace, slug__exact=article_slug)
#It already exists, so we just redirect to view the article
return HttpResponseRedirect(wiki_reverse("wiki_view", existing_article, course))
except Article.DoesNotExist:
#This is good. The article doesn't exist
pass
#TODO: Once we have permissions for namespaces, we should check for create permissions
#check_permissions(request, #namespace#, check_locked=False, check_write=True, check_deleted=True)
if request.method == 'POST':
f = CreateArticleForm(request.POST)
if f.is_valid():
article = Article()
article.slug = article_slug
if not request.user.is_anonymous():
article.created_by = request.user
article.title = f.cleaned_data.get('title')
article.namespace = namespace
a = article.save()
new_revision = f.save(commit=False)
if not request.user.is_anonymous():
new_revision.revision_user = request.user
new_revision.article = article
new_revision.save()
return HttpResponseRedirect(wiki_reverse("wiki_view", article, course))
else:
f = CreateArticleForm(initial={'title': request.GET.get('wiki_article_name', article_slug),
'contents': _('Headline\n===\n\n')})
d = {'wiki_form': f, 'create_article': True, 'namespace': namespace.name}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_edit.html', d)
def edit(request, article_path, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, article_path, course)
if err:
return err
# Check write permissions
perm_err = check_permissions(request, article, course, check_write=True, check_locked=True, check_deleted=False)
if perm_err:
return perm_err
if wiki_settings.WIKI_ALLOW_TITLE_EDIT:
EditForm = RevisionFormWithTitle
else:
EditForm = RevisionForm
if request.method == 'POST':
f = EditForm(request.POST)
if f.is_valid():
new_revision = f.save(commit=False)
new_revision.article = article
if request.POST.__contains__('delete'):
if (article.current_revision.deleted == 1): # This article has already been deleted. Redirect
return HttpResponseRedirect(wiki_reverse('wiki_view', article, course))
new_revision.contents = ""
new_revision.deleted = 1
elif not new_revision.get_diff():
return HttpResponseRedirect(wiki_reverse('wiki_view', article, course))
if not request.user.is_anonymous():
new_revision.revision_user = request.user
new_revision.save()
if wiki_settings.WIKI_ALLOW_TITLE_EDIT:
new_revision.article.title = f.cleaned_data['title']
new_revision.article.save()
return HttpResponseRedirect(wiki_reverse('wiki_view', article, course))
else:
startContents = article.current_revision.contents if (article.current_revision.deleted == 0) else 'Headline\n===\n\n'
f = EditForm({'contents': startContents, 'title': article.title})
d = {'wiki_form': f}
update_template_dictionary(d, request, course, article)
return render_to_response('simplewiki/simplewiki_edit.html', d)
def history(request, article_path, page=1, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, article_path, course)
if err:
return err
perm_err = check_permissions(request, article, course, check_read=True, check_deleted=False)
if perm_err:
return perm_err
page_size = 10
if page is None:
page = 1
try:
p = int(page)
except ValueError:
p = 1
history = Revision.objects.filter(article__exact=article).order_by('-counter').select_related('previous_revision__counter', 'revision_user', 'wiki_article')
if request.method == 'POST':
if request.POST.__contains__('revision'): # They selected a version, but they can be either deleting or changing the version
perm_err = check_permissions(request, article, course, check_write=True, check_locked=True)
if perm_err:
return perm_err
redirectURL = wiki_reverse('wiki_view', article, course)
try:
r = int(request.POST['revision'])
revision = Revision.objects.get(id=r)
if request.POST.__contains__('change'):
article.current_revision = revision
article.save()
elif request.POST.__contains__('view'):
redirectURL = wiki_reverse('wiki_view_revision', course=course,
kwargs={'revision_number': revision.counter, 'article_path': article.get_path()})
#The rese of these are admin functions
elif request.POST.__contains__('delete') and request.user.is_superuser:
if (revision.deleted == 0):
revision.adminSetDeleted(2)
elif request.POST.__contains__('restore') and request.user.is_superuser:
if (revision.deleted == 2):
revision.adminSetDeleted(0)
elif request.POST.__contains__('delete_all') and request.user.is_superuser:
Revision.objects.filter(article__exact=article, deleted=0).update(deleted=2)
elif request.POST.__contains__('lock_article'):
article.locked = not article.locked
article.save()
except Exception as e:
print str(e)
pass
finally:
return HttpResponseRedirect(redirectURL)
#
#
# <input type="submit" name="delete" value="Delete revision"/>
# <input type="submit" name="restore" value="Restore revision"/>
# <input type="submit" name="delete_all" value="Delete all revisions">
# %else:
# <input type="submit" name="delete_article" value="Delete all revisions">
#
page_count = (history.count() + (page_size - 1)) / page_size
if p > page_count:
p = 1
beginItem = (p - 1) * page_size
next_page = p + 1 if page_count > p else None
prev_page = p - 1 if p > 1 else None<|fim▁hole|>
d = {'wiki_page': p,
'wiki_next_page': next_page,
'wiki_prev_page': prev_page,
'wiki_history': history[beginItem:beginItem + page_size],
'show_delete_revision': request.user.is_superuser}
update_template_dictionary(d, request, course, article)
return render_to_response('simplewiki/simplewiki_history.html', d)
def revision_feed(request, page=1, namespace=None, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
page_size = 10
if page is None:
page = 1
try:
p = int(page)
except ValueError:
p = 1
history = Revision.objects.order_by('-revision_date').select_related('revision_user', 'article', 'previous_revision')
page_count = (history.count() + (page_size - 1)) / page_size
if p > page_count:
p = 1
beginItem = (p - 1) * page_size
next_page = p + 1 if page_count > p else None
prev_page = p - 1 if p > 1 else None
d = {'wiki_page': p,
'wiki_next_page': next_page,
'wiki_prev_page': prev_page,
'wiki_history': history[beginItem:beginItem + page_size],
'show_delete_revision': request.user.is_superuser,
'namespace': namespace}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_revision_feed.html', d)
def search_articles(request, namespace=None, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
# blampe: We should check for the presence of other popular django search
# apps and use those if possible. Only fall back on this as a last resort.
# Adding some context to results (eg where matches were) would also be nice.
# todo: maybe do some perm checking here
if request.method == 'GET':
querystring = request.GET.get('value', '').strip()
else:
querystring = ""
results = Article.objects.all()
if namespace:
results = results.filter(namespace__name__exact=namespace)
if request.user.is_superuser:
results = results.order_by('current_revision__deleted')
else:
results = results.filter(current_revision__deleted=0)
if querystring:
for queryword in querystring.split():
# Basic negation is as fancy as we get right now
if queryword[0] == '-' and len(queryword) > 1:
results._search = lambda x: results.exclude(x)
queryword = queryword[1:]
else:
results._search = lambda x: results.filter(x)
results = results._search(Q(current_revision__contents__icontains=queryword) | \
Q(title__icontains=queryword))
results = results.select_related('current_revision__deleted', 'namespace')
results = sorted(results, key=lambda article: (article.current_revision.deleted, article.get_path().lower()))
if len(results) == 1 and querystring:
return HttpResponseRedirect(wiki_reverse('wiki_view', article=results[0], course=course))
else:
d = {'wiki_search_results': results,
'wiki_search_query': querystring,
'namespace': namespace}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_searchresults.html', d)
def search_add_related(request, course_id, slug, namespace):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, slug, namespace if namespace else course_id)
if err:
return err
perm_err = check_permissions(request, article, course, check_read=True)
if perm_err:
return perm_err
search_string = request.GET.get('query', None)
self_pk = request.GET.get('self', None)
if search_string:
results = []
related = Article.objects.filter(title__istartswith=search_string)
others = article.related.all()
if self_pk:
related = related.exclude(pk=self_pk)
if others:
related = related.exclude(related__in=others)
related = related.order_by('title')[:10]
for item in related:
results.append({'id': str(item.id),
'value': item.title,
'info': item.get_url()})
else:
results = []
json = simplejson.dumps({'results': results})
return HttpResponse(json, mimetype='application/json')
def add_related(request, course_id, slug, namespace):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, slug, namespace if namespace else course_id)
if err:
return err
perm_err = check_permissions(request, article, course, check_write=True, check_locked=True)
if perm_err:
return perm_err
try:
related_id = request.POST['id']
rel = Article.objects.get(id=related_id)
has_already = article.related.filter(id=related_id).count()
if has_already == 0 and not rel == article:
article.related.add(rel)
article.save()
except:
pass
finally:
return HttpResponseRedirect(reverse('wiki_view', args=(article.get_url(),)))
def remove_related(request, course_id, namespace, slug, related_id):
course = get_opt_course_with_access(request.user, course_id, 'load')
(article, err) = get_article(request, slug, namespace if namespace else course_id)
if err:
return err
perm_err = check_permissions(request, article, course, check_write=True, check_locked=True)
if perm_err:
return perm_err
try:
rel_id = int(related_id)
rel = Article.objects.get(id=rel_id)
article.related.remove(rel)
article.save()
except:
pass
finally:
return HttpResponseRedirect(reverse('wiki_view', args=(article.get_url(),)))
def random_article(request, course_id=None):
course = get_opt_course_with_access(request.user, course_id, 'load')
from random import randint
num_arts = Article.objects.count()
article = Article.objects.all()[randint(0, num_arts - 1)]
return HttpResponseRedirect(wiki_reverse('wiki_view', article, course))
def not_found(request, article_path, course):
"""Generate a NOT FOUND message for some URL"""
d = {'wiki_err_notfound': True,
'article_path': article_path,
'namespace': "edX"}
update_template_dictionary(d, request, course)
return render_to_response('simplewiki/simplewiki_error.html', d)
def get_article(request, article_path, course):
err = None
article = None
try:
article = Article.get_article(article_path)
except Article.DoesNotExist, ValueError:
err = not_found(request, article_path, course)
return (article, err)
def check_permissions(request, article, course, check_read=False, check_write=False, check_locked=False, check_deleted=False, revision=None):
read_err = check_read and not article.can_read(request.user)
write_err = check_write and not article.can_write(request.user)
locked_err = check_locked and article.locked
if revision is None:
revision = article.current_revision
deleted_err = check_deleted and not (revision.deleted == 0)
if (request.user.is_superuser):
deleted_err = False
locked_err = False
if read_err or write_err or locked_err or deleted_err:
d = {'wiki_article': article,
'wiki_err_noread': read_err,
'wiki_err_nowrite': write_err,
'wiki_err_locked': locked_err,
'wiki_err_deleted': deleted_err, }
update_template_dictionary(d, request, course)
# TODO: Make this a little less jarring by just displaying an error
# on the current page? (no such redirect happens for an anon upload yet)
# benjaoming: I think this is the nicest way of displaying an error, but
# these errors shouldn't occur, but rather be prevented on the other pages.
return render_to_response('simplewiki/simplewiki_error.html', d)
else:
return None
####################
# LOGIN PROTECTION #
####################
if wiki_settings.WIKI_REQUIRE_LOGIN_VIEW:
view = login_required(view)
history = login_required(history)
search_articles = login_required(search_articles)
root_redirect = login_required(root_redirect)
revision_feed = login_required(revision_feed)
random_article = login_required(random_article)
search_add_related = login_required(search_add_related)
not_found = login_required(not_found)
view_revision = login_required(view_revision)
if wiki_settings.WIKI_REQUIRE_LOGIN_EDIT:
create = login_required(create)
edit = login_required(edit)
add_related = login_required(add_related)
remove_related = login_required(remove_related)
if wiki_settings.WIKI_CONTEXT_PREPROCESSORS:
settings.TEMPLATE_CONTEXT_PROCESSORS += wiki_settings.WIKI_CONTEXT_PREPROCESSORS<|fim▁end|> | |
<|file_name|>module-info.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.<|fim▁hole|> *
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
module test {
// jdk.test.resources.classes.MyResourcesProvider is in named.bundles.
requires named.bundles;
uses jdk.test.resources.classes.MyResourcesProvider;
uses jdk.test.resources.props.MyResourcesProvider;
}<|fim▁end|> | |
<|file_name|>ExceptionTwice-out.java<|end_file_name|><|fim▁begin|>class MyException extends RuntimeException{}
class MyClass {
public void foo() throws MyException {
throw new MyException();<caret> <|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>Listener.py<|end_file_name|><|fim▁begin|># PAL
# Listener
# Thomas Elvey
# 0.2<|fim▁hole|>
import math, audioop, time, wave, os
from collections import deque
from pocketsphinx import *
import pyaudio
from VoiceDecoder import VoiceDecoder
class Listener():
def __init__(self):
print "Listener Created"
# INIT FUNCTIONS
# self.setupMic()
# self.listen()
self.d = VoiceDecoder('/usr/share/pocketsphinx/model/hmm/en_US/hub4wsj_sc_8k',
'/home/pi/PAL/resources/PALActivate.dic',
'/home/pi/PAL/resources/PALActivate.lm',
'/home/pi/PAL/resources/PALActivateKeyphrase.list')
def setupMic(self, samples = 50):
# Setup microphone, background noise adjustments etc.
print "Getting mic values..."
p = pyaudio.PyAudio()
stream = p.open(format = pyaudio.paInt16,
channels = 1,
rate = 16000,
input = True,
frames_per_buffer = 1024)
values = [math.sqrt(abs(audioop.avg(stream.read(1024), 4)))
for x in range(samples)]
values = sorted(values, reverse = True)
r = sum(values[:int(samples * 0.2)]) / int(samples * 0.2)
print "Finished, Average aduio noise is: ", r
stream.close()
p.terminate()
if r < 3000:
self.threshold = 3500
else:
self.threshold = r + 100
def saveVoice(self, data, p):
filename = 'output_'+str(int(time.time()))
data = ''.join(data)
wf = wave.open(filename + '.wav', 'wb')
wf.setnchannels(1)
wf.setsampwidth(p.get_sample_size(pyaudio.paInt16))
wf.setframerate(16000)
wf.writeframes(data)
wf.close()
return filename + '.wav'
def listen(self, silenceTime = 1, previousAudio = 0.5):
# Listen to mic and save temp WAV
p = pyaudio.PyAudio()
stream = p.open(format = pyaudio.paInt16,
channels = 1,
rate = 16000,
input = True,
frames_per_buffer = 1024)
audioData = []
currentData = ''
rel = 16000 / 1024
slidWin = deque(maxlen = silenceTime * rel)
prevAudio = deque(maxlen = previousAudio * rel)
listenSetup = False
print "Listen class active"
while True:
currentData = stream.read(1024)
slidWin.append(math.sqrt(abs(audioop.avg(currentData, 4))))
print sum([x > self.threshold for x in slidWin])
if sum([x > self.threshold for x in slidWin]) > 0:
if listenSetup == False:
print "Starting recording..."
listenSetup = True
audioData.append(currentData)
elif listenSetup:
print "Finished recording."
filename = self.saveVoice(list(prevAudio) + audioData, p)
words = self.d.decodeWAV(filename)
print "Words detected: ", words
os.remove(filename)
listenSetup = False
slidWin = deque(maxlen = silenceTime * rel)
prevAudio = deque(maxlen = previousAudio * rel)
audioData = []
print "Reset, now listening..."
else:
print "Else stream..."
prevAudio.append(currentData)<|fim▁end|> | |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>extern crate serde_json;
use log::LogLevelFilter;
use logger::MetricsLoggerFactory;
use logger::MetricsLogger;
use self::serde_json::Value;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use std::collections::BTreeMap;
// This is the config file that reads all the json from metricsconfig.json. We can initially use
// an environment variable to locate this file or can be passed in.
// The worker thread and the app thread will both read from this file.
#[allow(non_upper_case_globals)]
const logger: fn() -> &'static MetricsLogger = MetricsLoggerFactory::get_logger;
pub struct Config {
parsed_json: Option<BTreeMap<String, Value>>,
}
impl Config {
pub fn new() -> Config {
Config { parsed_json: None }
}
pub fn create_and_write_json(&mut self, file_name: &str, json: &str) {
logger().log(LogLevelFilter::Debug,
format!("file: {}", file_name).as_str());
let f = File::create(file_name);
match f {
Ok(mut t) => {
let _ = t.write(json.as_bytes());
}
Err(e) => panic!("cannot open file: {}", e),
};
}
pub fn init(&mut self, file_name: &str) -> bool {
// TODO: Need to make this look at env variable or take a path to the file.
logger().log(LogLevelFilter::Debug,
format!("config file: {}", file_name).as_str());
let path = Path::new(file_name);
let display = path.display();
// Open the path in read-only mode.<|fim▁hole|> format!("couldn't open {}: {}", display, Error::description(&why))
.as_str());
return false;
}
Ok(file) => file,
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => {
logger().log(LogLevelFilter::Error, format!("Error: {}", why).as_str());
return false;
}
Ok(_) => {
logger().log(LogLevelFilter::Debug,
format!("file contains: {}", s).as_str())
}
}
self.parse_json(s);
true
}
fn parse_json(&mut self, json_string: String) {
// It's ok to unwrap here because if something is wrong here, we want to
// know and expose the bug.
let data: Value = serde_json::from_str(&json_string).unwrap();
self.parsed_json = Some(data.as_object().unwrap().clone());
}
pub fn get(&mut self, key: &str) -> Option<Value> {
if let Some(ref mut parsed_json) = self.parsed_json {
let val = parsed_json.get(key);
if val == None {
None
} else {
Some(val.unwrap().clone())
}
} else {
panic!("Data not parsed");
}
}
pub fn get_string(&mut self, key: &str) -> String {
if let Some(ref mut parsed_json) = self.parsed_json {
let val = parsed_json.get(key);
match val {
Some(v) => {
let nv = v.clone();
match nv {
Value::String(nv) => nv.clone(),
_ => panic!("Expected a String Value"),
}
},
None => panic!("Value not found"),
}
} else {
panic!("Data not parsed");
}
}
pub fn get_u64(&mut self, key: &str) -> u64 {
println!("Getting u64 value for {}", key);
if let Some(ref mut parsed_json) = self.parsed_json {
let val = parsed_json.get(key);
match val {
Some(v) => {
let nv = v.clone();
match nv {
Value::U64(nv) => nv.clone(),
_ => panic!("Expected a u64"),
}
},
None => panic!("Value not found"),
}
} else {
panic!("Data not parsed");
}
}
}
#[cfg(not(feature = "integration"))]
#[cfg(test)]
describe! config_file_found {
it "should open the config file when it exists" {
use std::fs;
let mut cfg = Config::new();
// Create sample config file
let file = "test.json";
cfg.create_and_write_json(file, "{\"cid\": \"123456\"}");
let found = cfg.init(file);
// No longer need the sample config file, delete it
match fs::remove_file(file) {
Ok(_) => println!("deleted file {}", file),
Err(e) => println!("Error deleting {}: {}", file, e)
}
assert_eq!(found, true);
}
it "should return false if config file not found" {
let mut cfg = Config::new();
let found = cfg.init("nosuchfile.json");
assert_eq!(found, false);
}
}
#[cfg(not(feature = "integration"))]
#[cfg(test)]
describe! parsing_file {
before_each {
// If the import is removed, it will not compile, but it gives a warning
// unless you have the following line. Most likely a compiler bug.
#[allow(unused_imports)]
use config::serde_json::Value;
let s = r#"{ "sendInterval": 10,
"saveInterval": 2,
"startTime": 0,
"savePath": "testSavePath",
"logPath": "/Volumes/development/metrics_controller/log" }"#.to_string();
let mut cfg = Config::new();
cfg.parse_json(s);
}
it "get_u64 should return a u64 for an existing key" {
let start_time = cfg.get_u64("startTime");
assert_eq!(start_time, 0);
}
failing "get_u64 should fail for a missing key" {
cfg.get_u64("start22Time");
}
it "get_string should return a string for an existing key" {
let save_path: String = cfg.get_string("savePath").to_string();
assert_eq!(save_path, "testSavePath");
}
failing "get_string should fail for a missing key" {
cfg.get_string("save22Path").to_string();
}
it "get should return a value for an existing key" {
match cfg.get("sendInterval") {
Some(v) => assert_eq!(v, Value::U64(10)),
None => {
assert!(false);
},
}
}
it "get should return None for a missing key" {
let val: Option<Value> = cfg.get("send22Interval");
match val {
Some(_) => assert!(false),
None => {
assert!(true);
},
}
}
}<|fim▁end|> | let mut file = match File::open(&path) {
Err(why) => {
logger().log(LogLevelFilter::Error, |
<|file_name|>Post.ts<|end_file_name|><|fim▁begin|>import {Entity, PrimaryGeneratedColumn} from "../../../../src";
import {Column} from "../../../../src/decorator/columns/Column";
@Entity()
export class Post {
@PrimaryGeneratedColumn()
id: number;
@Column("timestamp", { precision: 3, default: () => "CURRENT_TIMESTAMP(3)", onUpdate: "CURRENT_TIMESTAMP(3)"})
updateAt: Date;
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>matrix.rs<|end_file_name|><|fim▁begin|>use std::ops::{Index, IndexMut};
/// A type shorthand for the lookup key we'll be using, a width/height tuple.
type Idx = (usize, usize);
/// A 2-dimensional matrix of f64 values.
#[derive(Clone, Debug)]
pub struct Matrix {
width: usize,
contents: Vec<f64>,
}
impl Matrix {
/// Creates a new Matrix with the provided width and height.
pub fn new(width: usize, height: usize) -> Matrix {
Matrix {
width,
contents: vec![0.0; width * height],
}
}
}
impl Index<Idx> for Matrix {
type Output = f64;
fn index(&self, (width, height): Idx) -> &Self::Output {
&self.contents[height * self.width + width]
}
}
impl IndexMut<Idx> for Matrix {
fn index_mut(&mut self, (width, height): Idx) -> &mut Self::Output {
&mut self.contents[height * self.width + width]
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_matrix() {
let mut matrix = Matrix::new(1024, 768);
// Index<Idx>
assert_eq!(matrix[(1023, 767)], 0.0);
// IndexMut<Idx>
matrix[(12, 24)] = 123.456;
assert_eq!(matrix[(12, 24)], 123.456);
assert_eq!(matrix[(24, 12)], 0.0);<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>ztypes_linux_mips.go<|end_file_name|><|fim▁begin|>// cgo -godefs -- -Wall -Werror -static -I/tmp/include linux/types.go | go run mkpost.go
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build mips,linux
package unix
const (
sizeofPtr = 0x4
sizeofShort = 0x2
sizeofInt = 0x4
sizeofLong = 0x4
sizeofLongLong = 0x8
PathMax = 0x1000
)
type (
_C_short int16
_C_int int32
_C_long int32
_C_long_long int64
)
type Timespec struct {
Sec int32
Nsec int32
}
type Timeval struct {
Sec int32
Usec int32
}
type Timex struct {
Modes uint32
Offset int32
Freq int32
Maxerror int32
Esterror int32
Status int32
Constant int32
Precision int32
Tolerance int32
Time Timeval
Tick int32
Ppsfreq int32
Jitter int32
Shift int32
Stabil int32
Jitcnt int32
Calcnt int32
Errcnt int32
Stbcnt int32
Tai int32
_ [44]byte
}
type Time_t int32
type Tms struct {
Utime int32
Stime int32
Cutime int32
Cstime int32
}
type Utimbuf struct {
Actime int32
Modtime int32
}
type Rusage struct {
Utime Timeval
Stime Timeval
Maxrss int32
Ixrss int32
Idrss int32
Isrss int32
Minflt int32
Majflt int32
Nswap int32
Inblock int32
Oublock int32
Msgsnd int32
Msgrcv int32
Nsignals int32
Nvcsw int32
Nivcsw int32
}
type Rlimit struct {
Cur uint64
Max uint64
}
type _Gid_t uint32
type Stat_t struct {
Dev uint32
Pad1 [3]int32
Ino uint64
Mode uint32
Nlink uint32
Uid uint32
Gid uint32
Rdev uint32
Pad2 [3]int32
Size int64
Atim Timespec
Mtim Timespec
Ctim Timespec
Blksize int32
Pad4 int32
Blocks int64
Pad5 [14]int32
}
type StatxTimestamp struct {
Sec int64
Nsec uint32
_ int32
}
type Statx_t struct {
Mask uint32
Blksize uint32
Attributes uint64
Nlink uint32
Uid uint32
Gid uint32
Mode uint16
_ [1]uint16
Ino uint64
Size uint64
Blocks uint64
Attributes_mask uint64
Atime StatxTimestamp
Btime StatxTimestamp
Ctime StatxTimestamp
Mtime StatxTimestamp
Rdev_major uint32
Rdev_minor uint32
Dev_major uint32
Dev_minor uint32
_ [14]uint64
}
type Dirent struct {
Ino uint64
Off int64
Reclen uint16
Type uint8
Name [256]int8
_ [5]byte
}
type Fsid struct {
Val [2]int32
}
type Flock_t struct {
Type int16
Whence int16
_ [4]byte
Start int64
Len int64
Pid int32
_ [4]byte
}
type FscryptPolicy struct {
Version uint8
Contents_encryption_mode uint8
Filenames_encryption_mode uint8
Flags uint8
Master_key_descriptor [8]uint8
}
type FscryptKey struct {
Mode uint32
Raw [64]uint8
Size uint32
}
type KeyctlDHParams struct {
Private int32
Prime int32
Base int32
}
const (
FADV_NORMAL = 0x0
FADV_RANDOM = 0x1
FADV_SEQUENTIAL = 0x2
FADV_WILLNEED = 0x3
FADV_DONTNEED = 0x4
FADV_NOREUSE = 0x5
)
type RawSockaddrInet4 struct {
Family uint16
Port uint16
Addr [4]byte /* in_addr */
Zero [8]uint8
}
type RawSockaddrInet6 struct {
Family uint16
Port uint16
Flowinfo uint32
Addr [16]byte /* in6_addr */
Scope_id uint32
}
type RawSockaddrUnix struct {
Family uint16
Path [108]int8
}
type RawSockaddrLinklayer struct {
Family uint16
Protocol uint16
Ifindex int32
Hatype uint16
Pkttype uint8
Halen uint8
Addr [8]uint8
}
type RawSockaddrNetlink struct {
Family uint16
Pad uint16
Pid uint32
Groups uint32
}
type RawSockaddrHCI struct {
Family uint16
Dev uint16
Channel uint16
}
type RawSockaddrL2 struct {
Family uint16
Psm uint16
Bdaddr [6]uint8
Cid uint16
Bdaddr_type uint8
_ [1]byte
}
type RawSockaddrCAN struct {
Family uint16
_ [2]byte
Ifindex int32
Addr [8]byte
}
type RawSockaddrALG struct {
Family uint16
Type [14]uint8
Feat uint32
Mask uint32
Name [64]uint8
}
type RawSockaddrVM struct {
Family uint16
Reserved1 uint16
Port uint32
Cid uint32
Zero [4]uint8
}
type RawSockaddr struct {
Family uint16
Data [14]int8
}
type RawSockaddrAny struct {
Addr RawSockaddr
Pad [96]int8
}
type _Socklen uint32
type Linger struct {
Onoff int32
Linger int32
}
type Iovec struct {
Base *byte
Len uint32
}
type IPMreq struct {
Multiaddr [4]byte /* in_addr */
Interface [4]byte /* in_addr */
}
type IPMreqn struct {
Multiaddr [4]byte /* in_addr */
Address [4]byte /* in_addr */
Ifindex int32
}
type IPv6Mreq struct {
Multiaddr [16]byte /* in6_addr */
Interface uint32
}
type PacketMreq struct {
Ifindex int32
Type uint16
Alen uint16
Address [8]uint8
}
type Msghdr struct {
Name *byte
Namelen uint32
Iov *Iovec
Iovlen uint32
Control *byte
Controllen uint32
Flags int32
}
type Cmsghdr struct {
Len uint32
Level int32
Type int32
}
type Inet4Pktinfo struct {
Ifindex int32
Spec_dst [4]byte /* in_addr */
Addr [4]byte /* in_addr */
}
type Inet6Pktinfo struct {
Addr [16]byte /* in6_addr */
Ifindex uint32
}
type IPv6MTUInfo struct {
Addr RawSockaddrInet6
Mtu uint32
}
type ICMPv6Filter struct {
Data [8]uint32
}
type Ucred struct {
Pid int32
Uid uint32
Gid uint32
}
type TCPInfo struct {
State uint8
Ca_state uint8
Retransmits uint8
Probes uint8
Backoff uint8
Options uint8
_ [2]byte
Rto uint32
Ato uint32
Snd_mss uint32
Rcv_mss uint32
Unacked uint32
Sacked uint32
Lost uint32
Retrans uint32
Fackets uint32
Last_data_sent uint32
Last_ack_sent uint32
Last_data_recv uint32
Last_ack_recv uint32
Pmtu uint32
Rcv_ssthresh uint32
Rtt uint32
Rttvar uint32
Snd_ssthresh uint32
Snd_cwnd uint32
Advmss uint32
Reordering uint32
Rcv_rtt uint32
Rcv_space uint32
Total_retrans uint32
}
const (
SizeofSockaddrInet4 = 0x10
SizeofSockaddrInet6 = 0x1c
SizeofSockaddrAny = 0x70
SizeofSockaddrUnix = 0x6e
SizeofSockaddrLinklayer = 0x14
SizeofSockaddrNetlink = 0xc
SizeofSockaddrHCI = 0x6
SizeofSockaddrL2 = 0xe
SizeofSockaddrCAN = 0x10
SizeofSockaddrALG = 0x58
SizeofSockaddrVM = 0x10
SizeofLinger = 0x8
SizeofIovec = 0x8
SizeofIPMreq = 0x8
SizeofIPMreqn = 0xc
SizeofIPv6Mreq = 0x14
SizeofPacketMreq = 0x10
SizeofMsghdr = 0x1c
SizeofCmsghdr = 0xc
SizeofInet4Pktinfo = 0xc
SizeofInet6Pktinfo = 0x14
SizeofIPv6MTUInfo = 0x20
SizeofICMPv6Filter = 0x20
SizeofUcred = 0xc
SizeofTCPInfo = 0x68
)
const (
IFA_UNSPEC = 0x0
IFA_ADDRESS = 0x1
IFA_LOCAL = 0x2
IFA_LABEL = 0x3
IFA_BROADCAST = 0x4
IFA_ANYCAST = 0x5
IFA_CACHEINFO = 0x6
IFA_MULTICAST = 0x7
IFLA_UNSPEC = 0x0
IFLA_ADDRESS = 0x1
IFLA_BROADCAST = 0x2
IFLA_IFNAME = 0x3
IFLA_MTU = 0x4
IFLA_LINK = 0x5
IFLA_QDISC = 0x6
IFLA_STATS = 0x7
IFLA_COST = 0x8
IFLA_PRIORITY = 0x9
IFLA_MASTER = 0xa
IFLA_WIRELESS = 0xb
IFLA_PROTINFO = 0xc
IFLA_TXQLEN = 0xd
IFLA_MAP = 0xe
IFLA_WEIGHT = 0xf
IFLA_OPERSTATE = 0x10
IFLA_LINKMODE = 0x11
IFLA_LINKINFO = 0x12
IFLA_NET_NS_PID = 0x13
IFLA_IFALIAS = 0x14
IFLA_NUM_VF = 0x15
IFLA_VFINFO_LIST = 0x16
IFLA_STATS64 = 0x17
IFLA_VF_PORTS = 0x18
IFLA_PORT_SELF = 0x19
IFLA_AF_SPEC = 0x1a
IFLA_GROUP = 0x1b
IFLA_NET_NS_FD = 0x1c
IFLA_EXT_MASK = 0x1d
IFLA_PROMISCUITY = 0x1e
IFLA_NUM_TX_QUEUES = 0x1f
IFLA_NUM_RX_QUEUES = 0x20
IFLA_CARRIER = 0x21
IFLA_PHYS_PORT_ID = 0x22
IFLA_CARRIER_CHANGES = 0x23
IFLA_PHYS_SWITCH_ID = 0x24
IFLA_LINK_NETNSID = 0x25
IFLA_PHYS_PORT_NAME = 0x26
IFLA_PROTO_DOWN = 0x27
IFLA_GSO_MAX_SEGS = 0x28
IFLA_GSO_MAX_SIZE = 0x29
IFLA_PAD = 0x2a
IFLA_XDP = 0x2b
IFLA_EVENT = 0x2c
IFLA_NEW_NETNSID = 0x2d
IFLA_IF_NETNSID = 0x2e
IFLA_MAX = 0x31
RT_SCOPE_UNIVERSE = 0x0
RT_SCOPE_SITE = 0xc8
RT_SCOPE_LINK = 0xfd
RT_SCOPE_HOST = 0xfe
RT_SCOPE_NOWHERE = 0xff
RT_TABLE_UNSPEC = 0x0
RT_TABLE_COMPAT = 0xfc
RT_TABLE_DEFAULT = 0xfd
RT_TABLE_MAIN = 0xfe
RT_TABLE_LOCAL = 0xff
RT_TABLE_MAX = 0xffffffff
RTA_UNSPEC = 0x0
RTA_DST = 0x1
RTA_SRC = 0x2
RTA_IIF = 0x3
RTA_OIF = 0x4
RTA_GATEWAY = 0x5
RTA_PRIORITY = 0x6
RTA_PREFSRC = 0x7
RTA_METRICS = 0x8
RTA_MULTIPATH = 0x9
RTA_FLOW = 0xb
RTA_CACHEINFO = 0xc
RTA_TABLE = 0xf
RTN_UNSPEC = 0x0
RTN_UNICAST = 0x1
RTN_LOCAL = 0x2
RTN_BROADCAST = 0x3
RTN_ANYCAST = 0x4
RTN_MULTICAST = 0x5
RTN_BLACKHOLE = 0x6
RTN_UNREACHABLE = 0x7
RTN_PROHIBIT = 0x8
RTN_THROW = 0x9
RTN_NAT = 0xa
RTN_XRESOLVE = 0xb
RTNLGRP_NONE = 0x0
RTNLGRP_LINK = 0x1
RTNLGRP_NOTIFY = 0x2
RTNLGRP_NEIGH = 0x3
RTNLGRP_TC = 0x4
RTNLGRP_IPV4_IFADDR = 0x5
RTNLGRP_IPV4_MROUTE = 0x6
RTNLGRP_IPV4_ROUTE = 0x7
RTNLGRP_IPV4_RULE = 0x8
RTNLGRP_IPV6_IFADDR = 0x9
RTNLGRP_IPV6_MROUTE = 0xa
RTNLGRP_IPV6_ROUTE = 0xb
RTNLGRP_IPV6_IFINFO = 0xc
RTNLGRP_IPV6_PREFIX = 0x12
RTNLGRP_IPV6_RULE = 0x13
RTNLGRP_ND_USEROPT = 0x14
SizeofNlMsghdr = 0x10
SizeofNlMsgerr = 0x14
SizeofRtGenmsg = 0x1
SizeofNlAttr = 0x4
SizeofRtAttr = 0x4
SizeofIfInfomsg = 0x10
SizeofIfAddrmsg = 0x8
SizeofRtMsg = 0xc
SizeofRtNexthop = 0x8
)
type NlMsghdr struct {
Len uint32
Type uint16
Flags uint16
Seq uint32
Pid uint32
}
type NlMsgerr struct {
Error int32
Msg NlMsghdr
}
type RtGenmsg struct {
Family uint8
}
type NlAttr struct {
Len uint16
Type uint16
}
type RtAttr struct {
Len uint16
Type uint16
}
type IfInfomsg struct {
Family uint8
_ uint8
Type uint16
Index int32
Flags uint32
Change uint32
}
type IfAddrmsg struct {
Family uint8
Prefixlen uint8
Flags uint8
Scope uint8
Index uint32
}
type RtMsg struct {
Family uint8
Dst_len uint8
Src_len uint8
Tos uint8
Table uint8
Protocol uint8
Scope uint8
Type uint8
Flags uint32
}
type RtNexthop struct {
Len uint16
Flags uint8
Hops uint8
Ifindex int32
}
const (
SizeofSockFilter = 0x8
SizeofSockFprog = 0x8
)
type SockFilter struct {
Code uint16
Jt uint8
Jf uint8
K uint32
}
type SockFprog struct {
Len uint16
_ [2]byte
Filter *SockFilter
}
type InotifyEvent struct {
Wd int32
Mask uint32
Cookie uint32
Len uint32
}
const SizeofInotifyEvent = 0x10
type PtraceRegs struct {
Regs [32]uint64
Lo uint64
Hi uint64
Epc uint64
Badvaddr uint64
Status uint64
Cause uint64
}
type FdSet struct {
Bits [32]int32
}
type Sysinfo_t struct {
Uptime int32
Loads [3]uint32
Totalram uint32
Freeram uint32
Sharedram uint32
Bufferram uint32
Totalswap uint32
Freeswap uint32
Procs uint16
Pad uint16
Totalhigh uint32
Freehigh uint32
Unit uint32
_ [8]int8
}
type Utsname struct {
Sysname [65]byte
Nodename [65]byte
Release [65]byte
Version [65]byte
Machine [65]byte
Domainname [65]byte
}
type Ustat_t struct {
Tfree int32
Tinode uint32
Fname [6]int8
Fpack [6]int8
}
type EpollEvent struct {
Events uint32
PadFd int32
Fd int32
Pad int32
}
const (
AT_EMPTY_PATH = 0x1000
AT_FDCWD = -0x64
AT_NO_AUTOMOUNT = 0x800
AT_REMOVEDIR = 0x200
AT_STATX_SYNC_AS_STAT = 0x0
AT_STATX_FORCE_SYNC = 0x2000
AT_STATX_DONT_SYNC = 0x4000
AT_SYMLINK_FOLLOW = 0x400
AT_SYMLINK_NOFOLLOW = 0x100
)
type PollFd struct {
Fd int32
Events int16
Revents int16
}
const (
POLLIN = 0x1
POLLPRI = 0x2
POLLOUT = 0x4
POLLRDHUP = 0x2000
POLLERR = 0x8
POLLHUP = 0x10
POLLNVAL = 0x20
)
type Sigset_t struct {
Val [32]uint32
}
const RNDGETENTCNT = 0x40045200
const PERF_IOC_FLAG_GROUP = 0x1
type Termios struct {
Iflag uint32
Oflag uint32
Cflag uint32
Lflag uint32
Line uint8
Cc [23]uint8
Ispeed uint32
Ospeed uint32
}
type Winsize struct {
Row uint16
Col uint16
Xpixel uint16
Ypixel uint16
}
type Taskstats struct {
Version uint16
_ [2]byte
Ac_exitcode uint32
Ac_flag uint8
Ac_nice uint8
_ [6]byte
Cpu_count uint64
Cpu_delay_total uint64
Blkio_count uint64
Blkio_delay_total uint64
Swapin_count uint64
Swapin_delay_total uint64
Cpu_run_real_total uint64
Cpu_run_virtual_total uint64
Ac_comm [32]int8
Ac_sched uint8
Ac_pad [3]uint8
_ [4]byte
Ac_uid uint32
Ac_gid uint32
Ac_pid uint32
Ac_ppid uint32
Ac_btime uint32
_ [4]byte
Ac_etime uint64
Ac_utime uint64
Ac_stime uint64
Ac_minflt uint64
Ac_majflt uint64
Coremem uint64
Virtmem uint64
Hiwater_rss uint64
Hiwater_vm uint64
Read_char uint64
Write_char uint64
Read_syscalls uint64
Write_syscalls uint64
Read_bytes uint64
Write_bytes uint64
Cancelled_write_bytes uint64
Nvcsw uint64
Nivcsw uint64
Ac_utimescaled uint64
Ac_stimescaled uint64
Cpu_scaled_run_real_total uint64
Freepages_count uint64
Freepages_delay_total uint64
}
const (
TASKSTATS_CMD_UNSPEC = 0x0
TASKSTATS_CMD_GET = 0x1
TASKSTATS_CMD_NEW = 0x2
TASKSTATS_TYPE_UNSPEC = 0x0
TASKSTATS_TYPE_PID = 0x1
TASKSTATS_TYPE_TGID = 0x2
TASKSTATS_TYPE_STATS = 0x3
TASKSTATS_TYPE_AGGR_PID = 0x4
TASKSTATS_TYPE_AGGR_TGID = 0x5
TASKSTATS_TYPE_NULL = 0x6
TASKSTATS_CMD_ATTR_UNSPEC = 0x0
TASKSTATS_CMD_ATTR_PID = 0x1
TASKSTATS_CMD_ATTR_TGID = 0x2
TASKSTATS_CMD_ATTR_REGISTER_CPUMASK = 0x3
TASKSTATS_CMD_ATTR_DEREGISTER_CPUMASK = 0x4
)
type CGroupStats struct {
Sleeping uint64
Running uint64
Stopped uint64
Uninterruptible uint64
Io_wait uint64
}
const (
CGROUPSTATS_CMD_UNSPEC = 0x3
CGROUPSTATS_CMD_GET = 0x4
CGROUPSTATS_CMD_NEW = 0x5
CGROUPSTATS_TYPE_UNSPEC = 0x0
CGROUPSTATS_TYPE_CGROUP_STATS = 0x1
CGROUPSTATS_CMD_ATTR_UNSPEC = 0x0
CGROUPSTATS_CMD_ATTR_FD = 0x1
)
type Genlmsghdr struct {
Cmd uint8
Version uint8
Reserved uint16
}
const (
CTRL_CMD_UNSPEC = 0x0
CTRL_CMD_NEWFAMILY = 0x1
CTRL_CMD_DELFAMILY = 0x2
CTRL_CMD_GETFAMILY = 0x3
CTRL_CMD_NEWOPS = 0x4
CTRL_CMD_DELOPS = 0x5
CTRL_CMD_GETOPS = 0x6
CTRL_CMD_NEWMCAST_GRP = 0x7
CTRL_CMD_DELMCAST_GRP = 0x8
CTRL_CMD_GETMCAST_GRP = 0x9
CTRL_ATTR_UNSPEC = 0x0
CTRL_ATTR_FAMILY_ID = 0x1
CTRL_ATTR_FAMILY_NAME = 0x2
CTRL_ATTR_VERSION = 0x3
CTRL_ATTR_HDRSIZE = 0x4
CTRL_ATTR_MAXATTR = 0x5
CTRL_ATTR_OPS = 0x6
CTRL_ATTR_MCAST_GROUPS = 0x7
CTRL_ATTR_OP_UNSPEC = 0x0
CTRL_ATTR_OP_ID = 0x1
CTRL_ATTR_OP_FLAGS = 0x2
CTRL_ATTR_MCAST_GRP_UNSPEC = 0x0
CTRL_ATTR_MCAST_GRP_NAME = 0x1
CTRL_ATTR_MCAST_GRP_ID = 0x2
)
type cpuMask uint32
const (
_CPU_SETSIZE = 0x400
_NCPUBITS = 0x20
)
const (
BDADDR_BREDR = 0x0
BDADDR_LE_PUBLIC = 0x1
BDADDR_LE_RANDOM = 0x2
)
type PerfEventAttr struct {
Type uint32
Size uint32
Config uint64
Sample uint64
Sample_type uint64
Read_format uint64
Bits uint64
Wakeup uint32
Bp_type uint32
Ext1 uint64
Ext2 uint64
Branch_sample_type uint64
Sample_regs_user uint64
Sample_stack_user uint32
Clockid int32
Sample_regs_intr uint64
Aux_watermark uint32
_ uint32
}
type PerfEventMmapPage struct {
Version uint32
Compat_version uint32
Lock uint32
Index uint32
Offset int64
Time_enabled uint64
Time_running uint64
Capabilities uint64
Pmc_width uint16
Time_shift uint16
Time_mult uint32
Time_offset uint64
Time_zero uint64
Size uint32
_ [948]uint8
Data_head uint64
Data_tail uint64
Data_offset uint64
Data_size uint64
Aux_head uint64
Aux_tail uint64
Aux_offset uint64
Aux_size uint64
}
const (
PerfBitDisabled uint64 = CBitFieldMaskBit0
PerfBitInherit = CBitFieldMaskBit1
PerfBitPinned = CBitFieldMaskBit2
PerfBitExclusive = CBitFieldMaskBit3
PerfBitExcludeUser = CBitFieldMaskBit4
PerfBitExcludeKernel = CBitFieldMaskBit5
PerfBitExcludeHv = CBitFieldMaskBit6
PerfBitExcludeIdle = CBitFieldMaskBit7
PerfBitMmap = CBitFieldMaskBit8
PerfBitComm = CBitFieldMaskBit9
PerfBitFreq = CBitFieldMaskBit10
PerfBitInheritStat = CBitFieldMaskBit11
PerfBitEnableOnExec = CBitFieldMaskBit12
PerfBitTask = CBitFieldMaskBit13
PerfBitWatermark = CBitFieldMaskBit14
PerfBitPreciseIPBit1 = CBitFieldMaskBit15
PerfBitPreciseIPBit2 = CBitFieldMaskBit16
PerfBitMmapData = CBitFieldMaskBit17
PerfBitSampleIDAll = CBitFieldMaskBit18
PerfBitExcludeHost = CBitFieldMaskBit19
PerfBitExcludeGuest = CBitFieldMaskBit20
PerfBitExcludeCallchainKernel = CBitFieldMaskBit21
PerfBitExcludeCallchainUser = CBitFieldMaskBit22
PerfBitMmap2 = CBitFieldMaskBit23
PerfBitCommExec = CBitFieldMaskBit24
PerfBitUseClockID = CBitFieldMaskBit25
PerfBitContextSwitch = CBitFieldMaskBit26
)
const (
PERF_TYPE_HARDWARE = 0x0
PERF_TYPE_SOFTWARE = 0x1
PERF_TYPE_TRACEPOINT = 0x2
PERF_TYPE_HW_CACHE = 0x3
PERF_TYPE_RAW = 0x4
PERF_TYPE_BREAKPOINT = 0x5
PERF_COUNT_HW_CPU_CYCLES = 0x0
PERF_COUNT_HW_INSTRUCTIONS = 0x1
PERF_COUNT_HW_CACHE_REFERENCES = 0x2
PERF_COUNT_HW_CACHE_MISSES = 0x3
PERF_COUNT_HW_BRANCH_INSTRUCTIONS = 0x4
PERF_COUNT_HW_BRANCH_MISSES = 0x5
PERF_COUNT_HW_BUS_CYCLES = 0x6
PERF_COUNT_HW_STALLED_CYCLES_FRONTEND = 0x7
PERF_COUNT_HW_STALLED_CYCLES_BACKEND = 0x8
PERF_COUNT_HW_REF_CPU_CYCLES = 0x9
PERF_COUNT_HW_CACHE_L1D = 0x0
PERF_COUNT_HW_CACHE_L1I = 0x1
PERF_COUNT_HW_CACHE_LL = 0x2
PERF_COUNT_HW_CACHE_DTLB = 0x3
PERF_COUNT_HW_CACHE_ITLB = 0x4
PERF_COUNT_HW_CACHE_BPU = 0x5
PERF_COUNT_HW_CACHE_NODE = 0x6
PERF_COUNT_HW_CACHE_OP_READ = 0x0
PERF_COUNT_HW_CACHE_OP_WRITE = 0x1
PERF_COUNT_HW_CACHE_OP_PREFETCH = 0x2
PERF_COUNT_HW_CACHE_RESULT_ACCESS = 0x0
PERF_COUNT_HW_CACHE_RESULT_MISS = 0x1
PERF_COUNT_SW_CPU_CLOCK = 0x0
PERF_COUNT_SW_TASK_CLOCK = 0x1
PERF_COUNT_SW_PAGE_FAULTS = 0x2
PERF_COUNT_SW_CONTEXT_SWITCHES = 0x3
PERF_COUNT_SW_CPU_MIGRATIONS = 0x4
PERF_COUNT_SW_PAGE_FAULTS_MIN = 0x5
PERF_COUNT_SW_PAGE_FAULTS_MAJ = 0x6
PERF_COUNT_SW_ALIGNMENT_FAULTS = 0x7
PERF_COUNT_SW_EMULATION_FAULTS = 0x8
PERF_COUNT_SW_DUMMY = 0x9
PERF_SAMPLE_IP = 0x1
PERF_SAMPLE_TID = 0x2
PERF_SAMPLE_TIME = 0x4
PERF_SAMPLE_ADDR = 0x8
PERF_SAMPLE_READ = 0x10
PERF_SAMPLE_CALLCHAIN = 0x20
PERF_SAMPLE_ID = 0x40
PERF_SAMPLE_CPU = 0x80
PERF_SAMPLE_PERIOD = 0x100
PERF_SAMPLE_STREAM_ID = 0x200
PERF_SAMPLE_RAW = 0x400
PERF_SAMPLE_BRANCH_STACK = 0x800
PERF_SAMPLE_BRANCH_USER = 0x1
PERF_SAMPLE_BRANCH_KERNEL = 0x2
PERF_SAMPLE_BRANCH_HV = 0x4
PERF_SAMPLE_BRANCH_ANY = 0x8
PERF_SAMPLE_BRANCH_ANY_CALL = 0x10
PERF_SAMPLE_BRANCH_ANY_RETURN = 0x20
PERF_SAMPLE_BRANCH_IND_CALL = 0x40
PERF_FORMAT_TOTAL_TIME_ENABLED = 0x1
PERF_FORMAT_TOTAL_TIME_RUNNING = 0x2
PERF_FORMAT_ID = 0x4
PERF_FORMAT_GROUP = 0x8
PERF_RECORD_MMAP = 0x1
PERF_RECORD_LOST = 0x2
PERF_RECORD_COMM = 0x3
PERF_RECORD_EXIT = 0x4
PERF_RECORD_THROTTLE = 0x5
PERF_RECORD_UNTHROTTLE = 0x6
PERF_RECORD_FORK = 0x7
PERF_RECORD_READ = 0x8
PERF_RECORD_SAMPLE = 0x9
PERF_CONTEXT_HV = -0x20
PERF_CONTEXT_KERNEL = -0x80
PERF_CONTEXT_USER = -0x200
PERF_CONTEXT_GUEST = -0x800
PERF_CONTEXT_GUEST_KERNEL = -0x880
PERF_CONTEXT_GUEST_USER = -0xa00
PERF_FLAG_FD_NO_GROUP = 0x1
PERF_FLAG_FD_OUTPUT = 0x2
PERF_FLAG_PID_CGROUP = 0x4
)
const (
CBitFieldMaskBit0 = 0x8000000000000000
CBitFieldMaskBit1 = 0x4000000000000000
CBitFieldMaskBit2 = 0x2000000000000000
CBitFieldMaskBit3 = 0x1000000000000000
CBitFieldMaskBit4 = 0x800000000000000
CBitFieldMaskBit5 = 0x400000000000000
CBitFieldMaskBit6 = 0x200000000000000
CBitFieldMaskBit7 = 0x100000000000000
CBitFieldMaskBit8 = 0x80000000000000
CBitFieldMaskBit9 = 0x40000000000000
CBitFieldMaskBit10 = 0x20000000000000
CBitFieldMaskBit11 = 0x10000000000000
CBitFieldMaskBit12 = 0x8000000000000
CBitFieldMaskBit13 = 0x4000000000000
CBitFieldMaskBit14 = 0x2000000000000
CBitFieldMaskBit15 = 0x1000000000000
CBitFieldMaskBit16 = 0x800000000000
CBitFieldMaskBit17 = 0x400000000000
CBitFieldMaskBit18 = 0x200000000000
CBitFieldMaskBit19 = 0x100000000000
CBitFieldMaskBit20 = 0x80000000000
CBitFieldMaskBit21 = 0x40000000000
CBitFieldMaskBit22 = 0x20000000000
CBitFieldMaskBit23 = 0x10000000000
CBitFieldMaskBit24 = 0x8000000000
CBitFieldMaskBit25 = 0x4000000000
CBitFieldMaskBit26 = 0x2000000000
CBitFieldMaskBit27 = 0x1000000000
CBitFieldMaskBit28 = 0x800000000
CBitFieldMaskBit29 = 0x400000000
CBitFieldMaskBit30 = 0x200000000
CBitFieldMaskBit31 = 0x100000000
CBitFieldMaskBit32 = 0x80000000
CBitFieldMaskBit33 = 0x40000000
CBitFieldMaskBit34 = 0x20000000
CBitFieldMaskBit35 = 0x10000000
CBitFieldMaskBit36 = 0x8000000
CBitFieldMaskBit37 = 0x4000000
CBitFieldMaskBit38 = 0x2000000
CBitFieldMaskBit39 = 0x1000000
CBitFieldMaskBit40 = 0x800000
CBitFieldMaskBit41 = 0x400000
CBitFieldMaskBit42 = 0x200000
CBitFieldMaskBit43 = 0x100000
CBitFieldMaskBit44 = 0x80000
CBitFieldMaskBit45 = 0x40000
CBitFieldMaskBit46 = 0x20000
CBitFieldMaskBit47 = 0x10000
CBitFieldMaskBit48 = 0x8000
CBitFieldMaskBit49 = 0x4000
CBitFieldMaskBit50 = 0x2000
CBitFieldMaskBit51 = 0x1000
CBitFieldMaskBit52 = 0x800
CBitFieldMaskBit53 = 0x400
CBitFieldMaskBit54 = 0x200
CBitFieldMaskBit55 = 0x100
CBitFieldMaskBit56 = 0x80
CBitFieldMaskBit57 = 0x40
CBitFieldMaskBit58 = 0x20
CBitFieldMaskBit59 = 0x10
CBitFieldMaskBit60 = 0x8
CBitFieldMaskBit61 = 0x4
CBitFieldMaskBit62 = 0x2
CBitFieldMaskBit63 = 0x1
)
type SockaddrStorage struct {
Family uint16
_ [122]int8
_ uint32
}
type TCPMD5Sig struct {
Addr SockaddrStorage
Flags uint8
Prefixlen uint8
Keylen uint16
_ uint32
Key [80]uint8
}
type HDDriveCmdHdr struct {
Command uint8
Number uint8
Feature uint8
Count uint8
}
type HDGeometry struct {
Heads uint8
Sectors uint8
Cylinders uint16
Start uint32
}
type HDDriveID struct {
Config uint16
Cyls uint16
Reserved2 uint16
Heads uint16
Track_bytes uint16
Sector_bytes uint16
Sectors uint16
Vendor0 uint16
Vendor1 uint16
Vendor2 uint16
Serial_no [20]uint8
Buf_type uint16
Buf_size uint16
Ecc_bytes uint16
Fw_rev [8]uint8
Model [40]uint8
Max_multsect uint8
Vendor3 uint8
Dword_io uint16
Vendor4 uint8
Capability uint8
Reserved50 uint16
Vendor5 uint8
TPIO uint8
Vendor6 uint8
TDMA uint8
Field_valid uint16
Cur_cyls uint16
Cur_heads uint16
Cur_sectors uint16
Cur_capacity0 uint16
Cur_capacity1 uint16
Multsect uint8
Multsect_valid uint8
Lba_capacity uint32
Dma_1word uint16
Dma_mword uint16
Eide_pio_modes uint16
Eide_dma_min uint16
Eide_dma_time uint16
Eide_pio uint16
Eide_pio_iordy uint16
Words69_70 [2]uint16
Words71_74 [4]uint16
Queue_depth uint16
Words76_79 [4]uint16
Major_rev_num uint16
Minor_rev_num uint16
Command_set_1 uint16
Command_set_2 uint16
Cfsse uint16
Cfs_enable_1 uint16
Cfs_enable_2 uint16
Csf_default uint16
Dma_ultra uint16
Trseuc uint16
TrsEuc uint16
CurAPMvalues uint16
Mprc uint16
Hw_config uint16
Acoustic uint16
Msrqs uint16
Sxfert uint16
Sal uint16
Spg uint32
Lba_capacity_2 uint64
Words104_125 [22]uint16
Last_lun uint16
Word127 uint16
Dlf uint16
Csfo uint16
Words130_155 [26]uint16
Word156 uint16
Words157_159 [3]uint16
Cfa_power uint16
Words161_175 [15]uint16
Words176_205 [30]uint16
Words206_254 [49]uint16
Integrity_word uint16
}
type Statfs_t struct {
Type int32
Bsize int32
Frsize int32
_ [4]byte
Blocks uint64
Bfree uint64
Files uint64
Ffree uint64
Bavail uint64
Fsid Fsid
Namelen int32
Flags int32
Spare [5]int32
_ [4]byte
}
const (
ST_MANDLOCK = 0x40
ST_NOATIME = 0x400
ST_NODEV = 0x4
ST_NODIRATIME = 0x800
ST_NOEXEC = 0x8
ST_NOSUID = 0x2
ST_RDONLY = 0x1
ST_RELATIME = 0x1000
ST_SYNCHRONOUS = 0x10
)
type TpacketHdr struct {
Status uint32
Len uint32
Snaplen uint32
Mac uint16
Net uint16
Sec uint32
Usec uint32
}
type Tpacket2Hdr struct {
Status uint32
Len uint32
Snaplen uint32
Mac uint16
Net uint16
Sec uint32
Nsec uint32
Vlan_tci uint16
Vlan_tpid uint16
_ [4]uint8
}
type Tpacket3Hdr struct {
Next_offset uint32
Sec uint32
Nsec uint32
Snaplen uint32
Len uint32
Status uint32
Mac uint16
Net uint16
Hv1 TpacketHdrVariant1
_ [8]uint8
}
type TpacketHdrVariant1 struct {
Rxhash uint32
Vlan_tci uint32
Vlan_tpid uint16
_ uint16
}
type TpacketBlockDesc struct {
Version uint32
To_priv uint32
Hdr [40]byte
}
type TpacketReq struct {
Block_size uint32
Block_nr uint32
Frame_size uint32
Frame_nr uint32
}
type TpacketReq3 struct {
Block_size uint32
Block_nr uint32
Frame_size uint32
Frame_nr uint32
Retire_blk_tov uint32
Sizeof_priv uint32
Feature_req_word uint32
}
type TpacketStats struct {
Packets uint32
Drops uint32
}
type TpacketStatsV3 struct {
Packets uint32
Drops uint32
Freeze_q_cnt uint32
}
type TpacketAuxdata struct {
Status uint32
Len uint32
Snaplen uint32
Mac uint16
Net uint16
Vlan_tci uint16
Vlan_tpid uint16
}
const (
TPACKET_V1 = 0x0
TPACKET_V2 = 0x1
TPACKET_V3 = 0x2
)
const (
SizeofTpacketHdr = 0x18
SizeofTpacket2Hdr = 0x20
SizeofTpacket3Hdr = 0x30
)
const (
NF_INET_PRE_ROUTING = 0x0
NF_INET_LOCAL_IN = 0x1
NF_INET_FORWARD = 0x2
NF_INET_LOCAL_OUT = 0x3
NF_INET_POST_ROUTING = 0x4
NF_INET_NUMHOOKS = 0x5
)
const (
NF_NETDEV_INGRESS = 0x0
NF_NETDEV_NUMHOOKS = 0x1
)
const (
NFPROTO_UNSPEC = 0x0
NFPROTO_INET = 0x1
NFPROTO_IPV4 = 0x2
NFPROTO_ARP = 0x3
NFPROTO_NETDEV = 0x5
NFPROTO_BRIDGE = 0x7
NFPROTO_IPV6 = 0xa
NFPROTO_DECNET = 0xc
NFPROTO_NUMPROTO = 0xd
)
type Nfgenmsg struct {<|fim▁hole|> Nfgen_family uint8
Version uint8
Res_id uint16
}
const (
NFNL_BATCH_UNSPEC = 0x0
NFNL_BATCH_GENID = 0x1
)
const (
NFT_REG_VERDICT = 0x0
NFT_REG_1 = 0x1
NFT_REG_2 = 0x2
NFT_REG_3 = 0x3
NFT_REG_4 = 0x4
NFT_REG32_00 = 0x8
NFT_REG32_01 = 0x9
NFT_REG32_02 = 0xa
NFT_REG32_03 = 0xb
NFT_REG32_04 = 0xc
NFT_REG32_05 = 0xd
NFT_REG32_06 = 0xe
NFT_REG32_07 = 0xf
NFT_REG32_08 = 0x10
NFT_REG32_09 = 0x11
NFT_REG32_10 = 0x12
NFT_REG32_11 = 0x13
NFT_REG32_12 = 0x14
NFT_REG32_13 = 0x15
NFT_REG32_14 = 0x16
NFT_REG32_15 = 0x17
NFT_CONTINUE = -0x1
NFT_BREAK = -0x2
NFT_JUMP = -0x3
NFT_GOTO = -0x4
NFT_RETURN = -0x5
NFT_MSG_NEWTABLE = 0x0
NFT_MSG_GETTABLE = 0x1
NFT_MSG_DELTABLE = 0x2
NFT_MSG_NEWCHAIN = 0x3
NFT_MSG_GETCHAIN = 0x4
NFT_MSG_DELCHAIN = 0x5
NFT_MSG_NEWRULE = 0x6
NFT_MSG_GETRULE = 0x7
NFT_MSG_DELRULE = 0x8
NFT_MSG_NEWSET = 0x9
NFT_MSG_GETSET = 0xa
NFT_MSG_DELSET = 0xb
NFT_MSG_NEWSETELEM = 0xc
NFT_MSG_GETSETELEM = 0xd
NFT_MSG_DELSETELEM = 0xe
NFT_MSG_NEWGEN = 0xf
NFT_MSG_GETGEN = 0x10
NFT_MSG_TRACE = 0x11
NFT_MSG_NEWOBJ = 0x12
NFT_MSG_GETOBJ = 0x13
NFT_MSG_DELOBJ = 0x14
NFT_MSG_GETOBJ_RESET = 0x15
NFT_MSG_MAX = 0x19
NFTA_LIST_UNPEC = 0x0
NFTA_LIST_ELEM = 0x1
NFTA_HOOK_UNSPEC = 0x0
NFTA_HOOK_HOOKNUM = 0x1
NFTA_HOOK_PRIORITY = 0x2
NFTA_HOOK_DEV = 0x3
NFT_TABLE_F_DORMANT = 0x1
NFTA_TABLE_UNSPEC = 0x0
NFTA_TABLE_NAME = 0x1
NFTA_TABLE_FLAGS = 0x2
NFTA_TABLE_USE = 0x3
NFTA_CHAIN_UNSPEC = 0x0
NFTA_CHAIN_TABLE = 0x1
NFTA_CHAIN_HANDLE = 0x2
NFTA_CHAIN_NAME = 0x3
NFTA_CHAIN_HOOK = 0x4
NFTA_CHAIN_POLICY = 0x5
NFTA_CHAIN_USE = 0x6
NFTA_CHAIN_TYPE = 0x7
NFTA_CHAIN_COUNTERS = 0x8
NFTA_CHAIN_PAD = 0x9
NFTA_RULE_UNSPEC = 0x0
NFTA_RULE_TABLE = 0x1
NFTA_RULE_CHAIN = 0x2
NFTA_RULE_HANDLE = 0x3
NFTA_RULE_EXPRESSIONS = 0x4
NFTA_RULE_COMPAT = 0x5
NFTA_RULE_POSITION = 0x6
NFTA_RULE_USERDATA = 0x7
NFTA_RULE_PAD = 0x8
NFTA_RULE_ID = 0x9
NFT_RULE_COMPAT_F_INV = 0x2
NFT_RULE_COMPAT_F_MASK = 0x2
NFTA_RULE_COMPAT_UNSPEC = 0x0
NFTA_RULE_COMPAT_PROTO = 0x1
NFTA_RULE_COMPAT_FLAGS = 0x2
NFT_SET_ANONYMOUS = 0x1
NFT_SET_CONSTANT = 0x2
NFT_SET_INTERVAL = 0x4
NFT_SET_MAP = 0x8
NFT_SET_TIMEOUT = 0x10
NFT_SET_EVAL = 0x20
NFT_SET_OBJECT = 0x40
NFT_SET_POL_PERFORMANCE = 0x0
NFT_SET_POL_MEMORY = 0x1
NFTA_SET_DESC_UNSPEC = 0x0
NFTA_SET_DESC_SIZE = 0x1
NFTA_SET_UNSPEC = 0x0
NFTA_SET_TABLE = 0x1
NFTA_SET_NAME = 0x2
NFTA_SET_FLAGS = 0x3
NFTA_SET_KEY_TYPE = 0x4
NFTA_SET_KEY_LEN = 0x5
NFTA_SET_DATA_TYPE = 0x6
NFTA_SET_DATA_LEN = 0x7
NFTA_SET_POLICY = 0x8
NFTA_SET_DESC = 0x9
NFTA_SET_ID = 0xa
NFTA_SET_TIMEOUT = 0xb
NFTA_SET_GC_INTERVAL = 0xc
NFTA_SET_USERDATA = 0xd
NFTA_SET_PAD = 0xe
NFTA_SET_OBJ_TYPE = 0xf
NFT_SET_ELEM_INTERVAL_END = 0x1
NFTA_SET_ELEM_UNSPEC = 0x0
NFTA_SET_ELEM_KEY = 0x1
NFTA_SET_ELEM_DATA = 0x2
NFTA_SET_ELEM_FLAGS = 0x3
NFTA_SET_ELEM_TIMEOUT = 0x4
NFTA_SET_ELEM_EXPIRATION = 0x5
NFTA_SET_ELEM_USERDATA = 0x6
NFTA_SET_ELEM_EXPR = 0x7
NFTA_SET_ELEM_PAD = 0x8
NFTA_SET_ELEM_OBJREF = 0x9
NFTA_SET_ELEM_LIST_UNSPEC = 0x0
NFTA_SET_ELEM_LIST_TABLE = 0x1
NFTA_SET_ELEM_LIST_SET = 0x2
NFTA_SET_ELEM_LIST_ELEMENTS = 0x3
NFTA_SET_ELEM_LIST_SET_ID = 0x4
NFT_DATA_VALUE = 0x0
NFT_DATA_VERDICT = 0xffffff00
NFTA_DATA_UNSPEC = 0x0
NFTA_DATA_VALUE = 0x1
NFTA_DATA_VERDICT = 0x2
NFTA_VERDICT_UNSPEC = 0x0
NFTA_VERDICT_CODE = 0x1
NFTA_VERDICT_CHAIN = 0x2
NFTA_EXPR_UNSPEC = 0x0
NFTA_EXPR_NAME = 0x1
NFTA_EXPR_DATA = 0x2
NFTA_IMMEDIATE_UNSPEC = 0x0
NFTA_IMMEDIATE_DREG = 0x1
NFTA_IMMEDIATE_DATA = 0x2
NFTA_BITWISE_UNSPEC = 0x0
NFTA_BITWISE_SREG = 0x1
NFTA_BITWISE_DREG = 0x2
NFTA_BITWISE_LEN = 0x3
NFTA_BITWISE_MASK = 0x4
NFTA_BITWISE_XOR = 0x5
NFT_BYTEORDER_NTOH = 0x0
NFT_BYTEORDER_HTON = 0x1
NFTA_BYTEORDER_UNSPEC = 0x0
NFTA_BYTEORDER_SREG = 0x1
NFTA_BYTEORDER_DREG = 0x2
NFTA_BYTEORDER_OP = 0x3
NFTA_BYTEORDER_LEN = 0x4
NFTA_BYTEORDER_SIZE = 0x5
NFT_CMP_EQ = 0x0
NFT_CMP_NEQ = 0x1
NFT_CMP_LT = 0x2
NFT_CMP_LTE = 0x3
NFT_CMP_GT = 0x4
NFT_CMP_GTE = 0x5
NFTA_CMP_UNSPEC = 0x0
NFTA_CMP_SREG = 0x1
NFTA_CMP_OP = 0x2
NFTA_CMP_DATA = 0x3
NFT_RANGE_EQ = 0x0
NFT_RANGE_NEQ = 0x1
NFTA_RANGE_UNSPEC = 0x0
NFTA_RANGE_SREG = 0x1
NFTA_RANGE_OP = 0x2
NFTA_RANGE_FROM_DATA = 0x3
NFTA_RANGE_TO_DATA = 0x4
NFT_LOOKUP_F_INV = 0x1
NFTA_LOOKUP_UNSPEC = 0x0
NFTA_LOOKUP_SET = 0x1
NFTA_LOOKUP_SREG = 0x2
NFTA_LOOKUP_DREG = 0x3
NFTA_LOOKUP_SET_ID = 0x4
NFTA_LOOKUP_FLAGS = 0x5
NFT_DYNSET_OP_ADD = 0x0
NFT_DYNSET_OP_UPDATE = 0x1
NFT_DYNSET_F_INV = 0x1
NFTA_DYNSET_UNSPEC = 0x0
NFTA_DYNSET_SET_NAME = 0x1
NFTA_DYNSET_SET_ID = 0x2
NFTA_DYNSET_OP = 0x3
NFTA_DYNSET_SREG_KEY = 0x4
NFTA_DYNSET_SREG_DATA = 0x5
NFTA_DYNSET_TIMEOUT = 0x6
NFTA_DYNSET_EXPR = 0x7
NFTA_DYNSET_PAD = 0x8
NFTA_DYNSET_FLAGS = 0x9
NFT_PAYLOAD_LL_HEADER = 0x0
NFT_PAYLOAD_NETWORK_HEADER = 0x1
NFT_PAYLOAD_TRANSPORT_HEADER = 0x2
NFT_PAYLOAD_CSUM_NONE = 0x0
NFT_PAYLOAD_CSUM_INET = 0x1
NFT_PAYLOAD_L4CSUM_PSEUDOHDR = 0x1
NFTA_PAYLOAD_UNSPEC = 0x0
NFTA_PAYLOAD_DREG = 0x1
NFTA_PAYLOAD_BASE = 0x2
NFTA_PAYLOAD_OFFSET = 0x3
NFTA_PAYLOAD_LEN = 0x4
NFTA_PAYLOAD_SREG = 0x5
NFTA_PAYLOAD_CSUM_TYPE = 0x6
NFTA_PAYLOAD_CSUM_OFFSET = 0x7
NFTA_PAYLOAD_CSUM_FLAGS = 0x8
NFT_EXTHDR_F_PRESENT = 0x1
NFT_EXTHDR_OP_IPV6 = 0x0
NFT_EXTHDR_OP_TCPOPT = 0x1
NFTA_EXTHDR_UNSPEC = 0x0
NFTA_EXTHDR_DREG = 0x1
NFTA_EXTHDR_TYPE = 0x2
NFTA_EXTHDR_OFFSET = 0x3
NFTA_EXTHDR_LEN = 0x4
NFTA_EXTHDR_FLAGS = 0x5
NFTA_EXTHDR_OP = 0x6
NFTA_EXTHDR_SREG = 0x7
NFT_META_LEN = 0x0
NFT_META_PROTOCOL = 0x1
NFT_META_PRIORITY = 0x2
NFT_META_MARK = 0x3
NFT_META_IIF = 0x4
NFT_META_OIF = 0x5
NFT_META_IIFNAME = 0x6
NFT_META_OIFNAME = 0x7
NFT_META_IIFTYPE = 0x8
NFT_META_OIFTYPE = 0x9
NFT_META_SKUID = 0xa
NFT_META_SKGID = 0xb
NFT_META_NFTRACE = 0xc
NFT_META_RTCLASSID = 0xd
NFT_META_SECMARK = 0xe
NFT_META_NFPROTO = 0xf
NFT_META_L4PROTO = 0x10
NFT_META_BRI_IIFNAME = 0x11
NFT_META_BRI_OIFNAME = 0x12
NFT_META_PKTTYPE = 0x13
NFT_META_CPU = 0x14
NFT_META_IIFGROUP = 0x15
NFT_META_OIFGROUP = 0x16
NFT_META_CGROUP = 0x17
NFT_META_PRANDOM = 0x18
NFT_RT_CLASSID = 0x0
NFT_RT_NEXTHOP4 = 0x1
NFT_RT_NEXTHOP6 = 0x2
NFT_RT_TCPMSS = 0x3
NFT_HASH_JENKINS = 0x0
NFT_HASH_SYM = 0x1
NFTA_HASH_UNSPEC = 0x0
NFTA_HASH_SREG = 0x1
NFTA_HASH_DREG = 0x2
NFTA_HASH_LEN = 0x3
NFTA_HASH_MODULUS = 0x4
NFTA_HASH_SEED = 0x5
NFTA_HASH_OFFSET = 0x6
NFTA_HASH_TYPE = 0x7
NFTA_META_UNSPEC = 0x0
NFTA_META_DREG = 0x1
NFTA_META_KEY = 0x2
NFTA_META_SREG = 0x3
NFTA_RT_UNSPEC = 0x0
NFTA_RT_DREG = 0x1
NFTA_RT_KEY = 0x2
NFT_CT_STATE = 0x0
NFT_CT_DIRECTION = 0x1
NFT_CT_STATUS = 0x2
NFT_CT_MARK = 0x3
NFT_CT_SECMARK = 0x4
NFT_CT_EXPIRATION = 0x5
NFT_CT_HELPER = 0x6
NFT_CT_L3PROTOCOL = 0x7
NFT_CT_SRC = 0x8
NFT_CT_DST = 0x9
NFT_CT_PROTOCOL = 0xa
NFT_CT_PROTO_SRC = 0xb
NFT_CT_PROTO_DST = 0xc
NFT_CT_LABELS = 0xd
NFT_CT_PKTS = 0xe
NFT_CT_BYTES = 0xf
NFT_CT_AVGPKT = 0x10
NFT_CT_ZONE = 0x11
NFT_CT_EVENTMASK = 0x12
NFTA_CT_UNSPEC = 0x0
NFTA_CT_DREG = 0x1
NFTA_CT_KEY = 0x2
NFTA_CT_DIRECTION = 0x3
NFTA_CT_SREG = 0x4
NFT_LIMIT_PKTS = 0x0
NFT_LIMIT_PKT_BYTES = 0x1
NFT_LIMIT_F_INV = 0x1
NFTA_LIMIT_UNSPEC = 0x0
NFTA_LIMIT_RATE = 0x1
NFTA_LIMIT_UNIT = 0x2
NFTA_LIMIT_BURST = 0x3
NFTA_LIMIT_TYPE = 0x4
NFTA_LIMIT_FLAGS = 0x5
NFTA_LIMIT_PAD = 0x6
NFTA_COUNTER_UNSPEC = 0x0
NFTA_COUNTER_BYTES = 0x1
NFTA_COUNTER_PACKETS = 0x2
NFTA_COUNTER_PAD = 0x3
NFTA_LOG_UNSPEC = 0x0
NFTA_LOG_GROUP = 0x1
NFTA_LOG_PREFIX = 0x2
NFTA_LOG_SNAPLEN = 0x3
NFTA_LOG_QTHRESHOLD = 0x4
NFTA_LOG_LEVEL = 0x5
NFTA_LOG_FLAGS = 0x6
NFTA_QUEUE_UNSPEC = 0x0
NFTA_QUEUE_NUM = 0x1
NFTA_QUEUE_TOTAL = 0x2
NFTA_QUEUE_FLAGS = 0x3
NFTA_QUEUE_SREG_QNUM = 0x4
NFT_QUOTA_F_INV = 0x1
NFT_QUOTA_F_DEPLETED = 0x2
NFTA_QUOTA_UNSPEC = 0x0
NFTA_QUOTA_BYTES = 0x1
NFTA_QUOTA_FLAGS = 0x2
NFTA_QUOTA_PAD = 0x3
NFTA_QUOTA_CONSUMED = 0x4
NFT_REJECT_ICMP_UNREACH = 0x0
NFT_REJECT_TCP_RST = 0x1
NFT_REJECT_ICMPX_UNREACH = 0x2
NFT_REJECT_ICMPX_NO_ROUTE = 0x0
NFT_REJECT_ICMPX_PORT_UNREACH = 0x1
NFT_REJECT_ICMPX_HOST_UNREACH = 0x2
NFT_REJECT_ICMPX_ADMIN_PROHIBITED = 0x3
NFTA_REJECT_UNSPEC = 0x0
NFTA_REJECT_TYPE = 0x1
NFTA_REJECT_ICMP_CODE = 0x2
NFT_NAT_SNAT = 0x0
NFT_NAT_DNAT = 0x1
NFTA_NAT_UNSPEC = 0x0
NFTA_NAT_TYPE = 0x1
NFTA_NAT_FAMILY = 0x2
NFTA_NAT_REG_ADDR_MIN = 0x3
NFTA_NAT_REG_ADDR_MAX = 0x4
NFTA_NAT_REG_PROTO_MIN = 0x5
NFTA_NAT_REG_PROTO_MAX = 0x6
NFTA_NAT_FLAGS = 0x7
NFTA_MASQ_UNSPEC = 0x0
NFTA_MASQ_FLAGS = 0x1
NFTA_MASQ_REG_PROTO_MIN = 0x2
NFTA_MASQ_REG_PROTO_MAX = 0x3
NFTA_REDIR_UNSPEC = 0x0
NFTA_REDIR_REG_PROTO_MIN = 0x1
NFTA_REDIR_REG_PROTO_MAX = 0x2
NFTA_REDIR_FLAGS = 0x3
NFTA_DUP_UNSPEC = 0x0
NFTA_DUP_SREG_ADDR = 0x1
NFTA_DUP_SREG_DEV = 0x2
NFTA_FWD_UNSPEC = 0x0
NFTA_FWD_SREG_DEV = 0x1
NFTA_OBJREF_UNSPEC = 0x0
NFTA_OBJREF_IMM_TYPE = 0x1
NFTA_OBJREF_IMM_NAME = 0x2
NFTA_OBJREF_SET_SREG = 0x3
NFTA_OBJREF_SET_NAME = 0x4
NFTA_OBJREF_SET_ID = 0x5
NFTA_GEN_UNSPEC = 0x0
NFTA_GEN_ID = 0x1
NFTA_GEN_PROC_PID = 0x2
NFTA_GEN_PROC_NAME = 0x3
NFTA_FIB_UNSPEC = 0x0
NFTA_FIB_DREG = 0x1
NFTA_FIB_RESULT = 0x2
NFTA_FIB_FLAGS = 0x3
NFT_FIB_RESULT_UNSPEC = 0x0
NFT_FIB_RESULT_OIF = 0x1
NFT_FIB_RESULT_OIFNAME = 0x2
NFT_FIB_RESULT_ADDRTYPE = 0x3
NFTA_FIB_F_SADDR = 0x1
NFTA_FIB_F_DADDR = 0x2
NFTA_FIB_F_MARK = 0x4
NFTA_FIB_F_IIF = 0x8
NFTA_FIB_F_OIF = 0x10
NFTA_FIB_F_PRESENT = 0x20
NFTA_CT_HELPER_UNSPEC = 0x0
NFTA_CT_HELPER_NAME = 0x1
NFTA_CT_HELPER_L3PROTO = 0x2
NFTA_CT_HELPER_L4PROTO = 0x3
NFTA_OBJ_UNSPEC = 0x0
NFTA_OBJ_TABLE = 0x1
NFTA_OBJ_NAME = 0x2
NFTA_OBJ_TYPE = 0x3
NFTA_OBJ_DATA = 0x4
NFTA_OBJ_USE = 0x5
NFTA_TRACE_UNSPEC = 0x0
NFTA_TRACE_TABLE = 0x1
NFTA_TRACE_CHAIN = 0x2
NFTA_TRACE_RULE_HANDLE = 0x3
NFTA_TRACE_TYPE = 0x4
NFTA_TRACE_VERDICT = 0x5
NFTA_TRACE_ID = 0x6
NFTA_TRACE_LL_HEADER = 0x7
NFTA_TRACE_NETWORK_HEADER = 0x8
NFTA_TRACE_TRANSPORT_HEADER = 0x9
NFTA_TRACE_IIF = 0xa
NFTA_TRACE_IIFTYPE = 0xb
NFTA_TRACE_OIF = 0xc
NFTA_TRACE_OIFTYPE = 0xd
NFTA_TRACE_MARK = 0xe
NFTA_TRACE_NFPROTO = 0xf
NFTA_TRACE_POLICY = 0x10
NFTA_TRACE_PAD = 0x11
NFT_TRACETYPE_UNSPEC = 0x0
NFT_TRACETYPE_POLICY = 0x1
NFT_TRACETYPE_RETURN = 0x2
NFT_TRACETYPE_RULE = 0x3
NFTA_NG_UNSPEC = 0x0
NFTA_NG_DREG = 0x1
NFTA_NG_MODULUS = 0x2
NFTA_NG_TYPE = 0x3
NFTA_NG_OFFSET = 0x4
NFT_NG_INCREMENTAL = 0x0
NFT_NG_RANDOM = 0x1
)
type RTCTime struct {
Sec int32
Min int32
Hour int32
Mday int32
Mon int32
Year int32
Wday int32
Yday int32
Isdst int32
}
type RTCWkAlrm struct {
Enabled uint8
Pending uint8
_ [2]byte
Time RTCTime
}
type RTCPLLInfo struct {
Ctrl int32
Value int32
Max int32
Min int32
Posmult int32
Negmult int32
Clock int32
}<|fim▁end|> | |
<|file_name|>webpack.config.production.js<|end_file_name|><|fim▁begin|>var path = require('path');
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');<|fim▁hole|> entry: {
app: './src/main.js'
},
output: {
filename: '[name].min.js',
path: path.join(__dirname, 'dist'),
publicPath: ''
},
plugins: [
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production'),
},
'__DEV_TOOLS__': false
}),
new webpack.optimize.UglifyJsPlugin({
compressor: {
warnings: false
}
}),
//new ExtractTextPlugin('app.css', { allChunks: true }),
new HtmlWebpackPlugin({
title: 'Redux Boilerplate',
filename: 'index.html',
template: 'index.template.html',
favicon: path.join(__dirname, 'assets/images/favicon.ico')
})
],
module: {
loaders: [
{ test: /\.css$/, loader: ExtractTextPlugin.extract('style-loader', 'css-loader!cssnext-loader') },
{ test: /\.js$/, loaders: ['babel'], exclude: /node_modules/ }
]
},
cssnext: {
browsers: 'last 2 versions'
}
};<|fim▁end|> | var HtmlWebpackPlugin = require('html-webpack-plugin');
module.exports = {
devtool: 'source-map', |
<|file_name|>MasterSecretRequirementProvider.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import org.grovecity.drizzlesms.service.KeyCachingService;
import org.whispersystems.jobqueue.requirements.RequirementListener;
import org.whispersystems.jobqueue.requirements.RequirementProvider;
public class MasterSecretRequirementProvider implements RequirementProvider {
private final BroadcastReceiver newKeyReceiver;
private RequirementListener listener;
public MasterSecretRequirementProvider(Context context) {
this.newKeyReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (listener != null) {
listener.onRequirementStatusChanged();
}
}
};
IntentFilter filter = new IntentFilter(KeyCachingService.NEW_KEY_EVENT);
context.registerReceiver(newKeyReceiver, filter, KeyCachingService.KEY_PERMISSION, null);
}
@Override
public void setListener(RequirementListener listener) {
this.listener = listener;
}
}<|fim▁end|> | package org.grovecity.drizzlesms.jobs.requirements; |
<|file_name|>LldbThreadResumedEvent.java<|end_file_name|><|fim▁begin|>/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> * limitations under the License.
*/
package agent.lldb.manager.evt;
import agent.lldb.lldb.DebugThreadInfo;
/**
* The event corresponding with SBThread.eBroadcastBitThreadResumed
*/
public class LldbThreadResumedEvent extends AbstractLldbEvent<DebugThreadInfo> {
public LldbThreadResumedEvent(DebugThreadInfo info) {
super(info);
}
}<|fim▁end|> | * See the License for the specific language governing permissions and |
<|file_name|>test-046.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | class Foo { [prop1]: string; } |
<|file_name|>instmsg.js<|end_file_name|><|fim▁begin|>/* ======================================================================== \
| FORMA - The E-Learning Suite |
| |
| Copyright (c) 2013 (Forma) |
| http://www.formalms.org |
| License http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt |
| |
| from docebo 4.0.5 CE 2008-2012 (c) docebo |
| License http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt |
\ ======================================================================== */
var userid=0;
var username="";
var pingTimer;
var _TTim;
var pathImage;
var chat_windows=0;
function setup_instmsg(passed_userid,passed_username,passed_path) {
userid=passed_userid;
username=passed_username;
pathImage=passed_path;
}
function getLang() {
var data="op=getLang";
var objAjax = YAHOO.util.Connect.asyncRequest('POST', 'ajax.server.php?mn=instmsg&'+data, {
success: getLangCompleted
});
}
function getLangCompleted(o) {
try {
_TTim = YAHOO.lang.JSON.parse(o.responseText);
} catch (e) {return;}
startPinging();
}
function startPinging() {
pingTimer=setInterval("ping()",15000);
}
function ping() {
var id_receiver=userid;
var name_receiver=username;
var data="op=ping&id_receiver="+id_receiver+"&name_receiver="+name_receiver;
var objAjax = YAHOO.util.Connect.asyncRequest('POST', 'ajax.server.php?mn=instmsg&'+data, {
success: pingResponse
});
}
function pingResponse(o) {
try {
chatMsgs = YAHOO.lang.JSON.parse(o.responseText);
} catch (e) {return;}
for (var i=0;i<chatMsgs.content.length;i++) {
var wChat=chatMsgs.content[i].id_sender;
if (wObjList[wChat]==null) {
startChat(chatMsgs.content[i].id_sender,chatMsgs.content[i].name_sender);
return true;
}
msg=unescape(chatMsgs.content[i].msg);
msg=replaceEmoticon(msg);
var chatBox=YAHOO.util.Dom.get(wChat+'_text');
chatBox.innerHTML = chatBox.innerHTML + "<span class=\"timestamp\"> (" + chatMsgs.content[i].timestamp + ")</span> <strong class=\"userB\">" + chatMsgs.content[i].name_sender + ":</strong> <span class=\"new\">" + msg + "</span><br />\n";
chatBox.scrollTop = chatBox.scrollHeight - chatBox.clientHeight;
}
displayUsersList(chatMsgs.list,false);
}
function displayUsersList(usersList,openwin) {
YAHOO.util.Dom.get('user_online_n').firstChild.innerHTML=usersList.length;
if(!openwin) return;
var str='<div id="listContainer"><ul id="userList">';
for (var i=0;i<usersList.length;i++) {
if (usersList[i].idSt!=userid)
str+='<li><a href="javascript:;" onclick="startChat(\''+usersList[i].idSt+'\',\''+usersList[i].idUser+'\');" class="callOut"><span>'+usersList[i].userName+'</span></a></li>';
else
str+='<li><span class="callOutDisabled"><span>'+usersList[i].userName+'</span></a></li>';
}
str+='</ul></div>';
var w = new YAHOO.widget.SimpleDialog("wUsersList", {
fixedcenter: true,
visible: true,
close: true,
modal: false,
constraintoviewport: true
} );
w.setHeader(_TTim._WHOIS_ONLINE);
w.setBody(str);
w.render(document.body);
}
function openUsersList() {
var data="op=getUsersList";
var objAjax = YAHOO.util.Connect.asyncRequest('POST', 'ajax.server.php?mn=instmsg&'+data, {
success: showUsersList
});
}
function showUsersList(o) {
try {
users = YAHOO.lang.JSON.parse(o.responseText);
} catch (e) {return;}
displayUsersList(users.list,true);
}
function startChat(id_sender,name_sender) {
if (id_sender==userid) return 0;
var wChat=id_sender;
if (wObjList[wChat]==null) {
clearInterval(pingTimer);
pingTimer=setInterval("ping()",5000);
chat_windows++;
getChatContent(wChat,name_sender);
}
destroyWindow("wUsersList");
}
function getChatContent(wChat,name_sender) {
var id_receiver=userid;
var name_receiver=username;
var id_sender=wChat;
var data="op=getContent";
data+="&wChat="+wChat;
data+="&id_sender="+id_sender;
data+="&id_receiver="+id_receiver;
data+="&name_sender="+name_sender;
data+="&name_receiver="+name_receiver;
var objAjax = YAHOO.util.Connect.asyncRequest('POST', 'ajax.server.php?mn=instmsg&'+data, {
success: showChat
});
}
function showChat(o) {
try {
chatObj = YAHOO.lang.JSON.parse(o.responseText);
} catch (e) {return;}
var wChat=chatObj.wChat;
var name_sender=chatObj.name_sender;
var msg="";
str='<div class="chat">';
str+='<div id="'+wChat+'_text'+'" class="chatText">';
for (var i=0;i<chatObj.content.length;i++) {
msg=unescape(chatObj.content[i].msg);
msg=replaceEmoticon(msg);
str+="<span class=\"timestamp\"> (" + chatObj.content[i].timestamp + ")</span> <strong class=\""+chatObj.content[i].userClass+"\">" + chatObj.content[i].userName + ":</strong> <span class=\""+chatObj.content[i].lineStatus+"\">" + msg + "</span><br />\n";
}
str+='</div>';
str+='<input type="text" name="'+wChat+'_inputBox'+'" id="'+wChat+'_inputBox'+'" onkeypress="keyHandler(event,'+"'"+wChat+"'"+');" style="width:350px;" /> <button onclick="sendLine(\''+wChat+'\')">'+_TTim._SEND+'</button>';
str+="</div>";
str+='<object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,0,0" width="1" height="1" id="soundeffect" align=""> <param name="movie" value="./modules/instmsg/grilli.swf"> <param name="quality" value="high"> <parame name="bgcolor" value="#FFFFFF"> <embed src="./modules/instmsg/grilli.swf" quality="high" bgcolor="#FFFFFF" name="soundeffect" width="1" height="1" align="" type="application/x-shockwave-flash" pluginspace="http://www.macromedia.com/go/getflashplayer"></embed></object>';
Stamp = new Date();
var day = String(Stamp.getDate());
var month = String(Stamp.getMonth()+1);
var year = String(Stamp.getFullYear());
day = (day.length > 1) ? day : "0"+day;
month = (month.length > 1) ? month : "0"+month;
var name=wChat;
var title=_TTim._CHAT+": "+name_sender+" ("+ day + "/" + month + "/" + year+")";
var w = new YAHOO.widget.SimpleDialog("inst_"+name, {
visible: true,
close: true,
modal: false,
constraintoviewport: true
} );
w.setHeader(title);
w.setBody(str);
w.render(document.body);
var chatBox=YAHOO.util.Dom.get(wChat+'_text');
setTimeout("setScroll('"+wChat+"_text')",500);
w.addListener('cancelEvent', updateTimers);
w.addListener('destroyEvent', updateTimers);
}
function setScroll(chatDiv) {
var chatBox=YAHOO.util.Dom.get(chatDiv);
chatBox.scrollTop = chatBox.scrollHeight - chatBox.clientHeight;
}
function keyHandler( e, wChat ) {
var asc = document.all ? event.keyCode : e.which;
if(asc == 13) {
sendLine(wChat);
}
return asc != 13;
}
function sendLine(wChat) {
var sentText=String(YAHOO.util.Dom.get(wChat+'_inputBox').value);
if (sentText.length < 1) return true;
YAHOO.util.Dom.get(wChat+'_inputBox').value="";
Stamp = new Date();
var h = String(Stamp.getHours());
var m = String(Stamp.getMinutes());
var s = String(Stamp.getSeconds());
var day = String(Stamp.getDate());
var month = String(Stamp.getMonth()+1);
var year = String(Stamp.getFullYear());
day = (day.length > 1) ? day : "0"+day;
month = (month.length > 1) ? month : "0"+month;
h = (h.length > 1) ? h : "0"+h;
m = (m.length > 1) ? m : "0"+m;
s = (s.length > 1) ? s : "0"+s;
var msg=replaceEmoticon(sentText);
var chatBox=YAHOO.util.Dom.get(wChat+'_text');
chatBox.innerHTML = chatBox.innerHTML + "<span class=\"timestamp\"> (" + h + ":" + m + ":" + s + ")</span> <strong class=\"userA\">" + username + ":</strong> <span class=\"new\">" + msg + "</span><br />\n";
chatBox.scrollTop = chatBox.scrollHeight - chatBox.clientHeight;
var id_sender=userid;
var id_receiver=wChat;
var msg=new String();
msg=escape(sentText);
var data="op=sendLine&wChat="+wChat+"&id_sender="+id_sender+"&id_receiver="+id_receiver+"&msg="+msg;
var objAjax = YAHOO.util.Connect.asyncRequest('POST', 'ajax.server.php?mn=instmsg&'+data, {
success: lineSent
});
}
function lineSent(o) {
return true;
}
function updateTimers() {
chat_windows--;
if (chat_windows==0) {
clearInterval(pingTimer);
pingTimer=setInterval("ping()",15000);
}
}
/* replacing emoticons with images */
function getChatEmoticon(name) {
var ext="gif";
var res ="<img alt=\""+name+"\" title=\""+name+"\" src=\"";
res+=pathImage+"emoticons/"+name+"."+ext+"\" />";
return res;
}
function replaceEmoticon(txt) {
var res=txt;
res=res.replace(/;[-]?\)/i, getChatEmoticon("wink_smile"));
res=res.replace(/:[-]?\|/i, getChatEmoticon("whatchutalkingabout_smile"));
res=res.replace(/:[-]?P/i, getChatEmoticon("tounge_smile"));
res=res.replace(/o:[-]?\)/i, getChatEmoticon("angel_smile"));
res=res.replace(/:[-]?\)/i, getChatEmoticon("regular_smile"));
res=res.replace(/:[-]?\(/i, getChatEmoticon("sad_smile"));
res=res.replace(/:?\'[-]?(\(|\[)/i, getChatEmoticon("cry_smile"));
res=res.replace(/:[-]?o/i, getChatEmoticon("omg_smile"));
res=res.replace(/8[-]?\)/i, getChatEmoticon("shades_smile"));
res=res.replace(/:[-]?s/i, getChatEmoticon("confused_smile"));
res=res.replace(/X[-]?\(/i, getChatEmoticon("devil_smile"));
res=res.replace(/\=\(\(/i, getChatEmoticon("broken_heart"));
res=res.replace(/:[-]?x/i, getChatEmoticon("heart"));
res=res.replace(/:[-]?d/i, getChatEmoticon("teeth_smile"));
res=res.replace(/\[OK\]/, getChatEmoticon("thumbs_up"));
res=res.replace(/\[BAD\]/, getChatEmoticon("thumbs_down"));
res=res.replace(/\[IDEA\]/, getChatEmoticon("lightbulb"));
<|fim▁hole|>
onunload = function() {clearInterval(pingTimer);}
YAHOO.util.Event.addListener(window,"load", getLang);
YAHOO.util.Event.addListener("open_users_list","click", openUsersList);
YAHOO.util.Event.addListener(window,"unload", onunload);<|fim▁end|> | res=res.replace(/\[MAIL\]/, getChatEmoticon("envelope"));
return res;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.