prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>find-map.js<|end_file_name|><|fim▁begin|>/**
* shuji (周氏)
* https://github.com/paazmaya/shuji
*
* Reverse engineering JavaScript and CSS sources from sourcemaps
*
* Copyright (c) Juga Paazmaya <[email protected]> (https://paazmaya.fi)
* Licensed under the MIT license
*/
const fs = require('fs'),
path = require('path');
const MATCH_MAP = /\.map$/iu;
const MATCH_CODE = /\.(js|css)$/iu;
const FIND_SOURCE_FILE = /\/\/#\s*sourceMappingURL=([.\w]+map)/iu;
const FIND_SOURCE_BASE64 = /\/\*?\/?#\s*sourceMappingURL=([.\w\-/=;:]*)base64,([\w]+=)/iu;
const FIND_SOURCE_UENC = /\/\*?\/?#\s*sourceMappingURL=([.\w\-/=;:]+),([;:,.\-\w%]+)/iu;
/**
* Find the sourceMap and return its contents.
* In case the given filepath is already the sourceMap file, not much is done.
* In case the given filepath is a JavaScript file, then the matching sourceMap
* is being search for.
*
* @param {string} filepath
* @param {object} options Options object. If not defined, verbose=false
* @param {boolean} options.verbose Shall there be more output
*
* @returns {string|boolean} soureMap contents or false when not found
*/
const findMap = (filepath, options) => {
options = options || {
verbose: false
};
const input = fs.readFileSync(filepath, 'utf8');
if (filepath.match(MATCH_MAP)) {
return input;
}
else if (filepath.match(MATCH_CODE)) {
if (input.match(FIND_SOURCE_BASE64)) {
const sourceMappingMatch = FIND_SOURCE_BASE64.exec(input);
if (sourceMappingMatch && sourceMappingMatch.length > 2) {
if (options.verbose) {
console.log(`Input file "${filepath}" contains Base64 of ${sourceMappingMatch[2].length} length`);
}
const buf = Buffer.from(sourceMappingMatch[2], 'base64');
return buf.toString('utf8');
}
}
else if (input.match(FIND_SOURCE_UENC)) {
const sourceMappingMatch = FIND_SOURCE_UENC.exec(input);
if (sourceMappingMatch && sourceMappingMatch.length > 2) {
if (options.verbose) {
console.log(`Input file "${filepath}" contains URL encoded of ${sourceMappingMatch[2].length} length`);
}
const buf = Buffer.from(sourceMappingMatch[2], 'ascii');
return buf.toString('utf8');
}
}
else if (input.match(FIND_SOURCE_FILE)) {
const sourceMappingMatch = FIND_SOURCE_FILE.exec(input);
if (sourceMappingMatch && sourceMappingMatch.length > 1) {
if (options.verbose) {
console.log(`Input file "${filepath}" points to "${sourceMappingMatch[1]}"`);
}
}
// Since the sourceMappingURL is relative, try to find it from the same folder<|fim▁hole|> const mapFile = path.join(path.dirname(filepath), sourceMappingMatch[1]);
try {
fs.accessSync(mapFile);
}
catch (error) {
console.error(`Could not access "${mapFile}"`);
console.error(error.message);
return false;
}
return fs.readFileSync(mapFile, 'utf8');
}
}
else if (options.verbose) {
console.error(`Input file "${filepath}" was not a map nor a code file`);
}
return false;
};
module.exports = findMap;<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# privacyIDEA is a fork of LinOTP
# May 08, 2014 Cornelius Kölbel
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# 2014-10-17 Fix the empty result problem
# Cornelius Kölbel, <[email protected]>
#
# Copyright (C) 2010 - 2014 LSE Leading Security Experts GmbH
# License: AGPLv3
# contact: http://www.linotp.org
# http://www.lsexperts.de
# [email protected]
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__="""This is the BaseClass for audit trails
The audit is supposed to work like this. First we need to create an audit
object. E.g. this can be done in the before_request:
g.audit_object = getAudit(file_config)
During the request, the g.audit_object can be used to add audit information:
g.audit_object.log({"client": "123.2.3.4", "action": "validate/check"})<|fim▁hole|>Thus at many different places in the code, audit information can be added to
the audit object.
Finally the audit_object needs to be stored to the audit storage. So we call:
g.audit_object.finalize_log()
which creates a signature of the audit data and writes the data to the audit
storage.
"""
import logging
log = logging.getLogger(__name__)
from privacyidea.lib.log import log_with
import socket
from datetime import datetime, timedelta
class Paginate(object):
"""
This is a pagination object, that is used for searching audit trails.
"""
def __init__(self):
# The audit data
self.auditdata = []
# The number of the previous page
self.prev = None
# the number of the next page
self.next = None
# the number of the current page
self.current = 1
# the total entry numbers
self.total = 0
class Audit(object): # pragma: no cover
def __init__(self, config=None):
"""
Create a new audit object.
:param config: The web config is passed to the audit module, so that
the special module implementation can get its configuration.
:type config: dict
:return:
"""
self.name = "AuditBase"
self.audit_data = {}
self.private = ""
self.public = ""
@log_with(log)
def initialize(self):
# defaults
self.audit_data = {'action_detail': '',
'info': '',
'log_level': 'INFO',
'administrator': '',
'value': '',
'key': '',
'serial': '',
'token_type': '',
'clearance_level': 0,
'privacyidea_server': socket.gethostname(),
'realm': '',
'user': '',
'client': ''
}
#controller = request.environ['pylons.routes_dict']['controller']
#action = request.environ['pylons.routes_dict']['action']
#c.audit['action'] = "%s/%s" % (controller, action)
def log_token_num(self, count):
"""
Log the number of the tokens.
Can be passed like
log_token_num(get_tokens(count=True))
:param count: Number of tokens
:type count: int
:return:
"""
self.audit_data['action_detail'] = "tokennum = %s" % str(count)
@log_with(log)
def read_keys(self, pub, priv):
"""
Set the private and public key for the audit class. This is achieved by
passing the entries.
#priv = config.get("privacyideaAudit.key.private")
#pub = config.get("privacyideaAudit.key.public")
:param pub: Public key, used for verifying the signature
:type pub: string with filename
:param priv: Private key, used to sign the audit entry
:type priv: string with filename
:return: None
"""
try:
f = open(priv, "r")
self.private = f.read()
f.close()
except Exception as e:
log.error("Error reading private key %s: (%r)" % (priv, e))
raise e
try:
f = open(pub, "r")
self.public = f.read()
f.close()
except Exception as e:
log.error("Error reading public key %s: (%r)" % (pub, e))
raise e
def get_audit_id(self):
return self.name
def get_total(self, param, AND=True, display_error=True):
"""
This method returns the total number of audit entries
in the audit store
"""
return None
@log_with(log)
def log(self, param): # pragma: no cover
"""
This method is used to log the data.
During a request this method can be called several times to fill the
internal audit_data dictionary.
"""
pass
def add_to_log(self, param):
"""
Add to existing log entry
:param param:
:return:
"""
pass
def finalize_log(self):
"""
This method is called to finalize the audit_data. I.e. sign the data
and write it to the database.
It should hash the data and do a hash chain and sign the data
"""
pass
def initialize_log(self, param):
"""
This method initialized the log state.
The fact, that the log state was initialized, also needs to be logged.
Therefor the same params are passed as i the log method.
"""
pass
def set(self):
"""
This function could be used to set certain things like the signing key.
But maybe it should only be read from privacyidea.ini?
"""
pass
def search(self, param, display_error=True, rp_dict=None):
"""
This function is used to search audit events.
param: Search parameters can be passed.
return: A pagination object
This function is deprecated.
"""
return Paginate()
def csv_generator(self, param):
"""
A generator that can be used to stream the audit log
:param param:
:return:
"""
pass
def search_query(self, search_dict, rp_dict):
"""
This function returns the audit log as an iterator on the result
"""
return None
def audit_entry_to_dict(self, audit_entry):
"""
If the search_query returns an iterator with elements that are not a
dictionary, the audit module needs
to provide this function, to convert the audit entry to a dictionary.
"""
return {}
def get_dataframe(self, start_time=datetime.now()-timedelta(days=7),
end_time=datetime.now()):
"""
The Audit module can handle its data the best. This function is used
to return a pandas.dataframe with all audit data in the given time
frame.
This dataframe then can be used for extracting statistics.
:param start_time: The start time of the data
:type start_time: datetime
:param end_time: The end time of the data
:type end_time: datetime
:return: Audit data
:rtype: dataframe
"""
return None<|fim▁end|>
| |
<|file_name|>parsing.rs<|end_file_name|><|fim▁begin|>//! Utility functions for Header implementations.
use language_tags::LanguageTag;
use std::str;
use std::str::FromStr;
use std::fmt::{self, Display};
use url::percent_encoding;
use header::Raw;
use header::shared::Charset;
/// Reads a single raw string when parsing a header.
pub fn from_one_raw_str<T: str::FromStr>(raw: &Raw) -> ::Result<T> {
if let Some(line) = raw.one() {
if !line.is_empty() {
return from_raw_str(line)
}
}
Err(::Error::Header)
}
/// Reads a raw string into a value.
pub fn from_raw_str<T: str::FromStr>(raw: &[u8]) -> ::Result<T> {
let s = try!(str::from_utf8(raw)).trim();
T::from_str(s).or(Err(::Error::Header))
}
/// Reads a comma-delimited raw header into a Vec.
#[inline]
pub fn from_comma_delimited<T: str::FromStr>(raw: &Raw) -> ::Result<Vec<T>> {
let mut result = Vec::new();
for s in raw {
let s = try!(str::from_utf8(s.as_ref()));
result.extend(s.split(',')
.filter_map(|x| match x.trim() {
"" => None,
y => Some(y)
})
.filter_map(|x| x.trim().parse().ok()))
}
Ok(result)
}
/// Format an array into a comma-delimited string.
pub fn fmt_comma_delimited<T: Display>(f: &mut fmt::Formatter, parts: &[T]) -> fmt::Result {
for (i, part) in parts.iter().enumerate() {
if i != 0 {
try!(f.write_str(", "));
}
try!(Display::fmt(part, f));
}
Ok(())
}
/// An extended header parameter value (i.e., tagged with a character set and optionally,
/// a language), as defined in [RFC 5987](https://tools.ietf.org/html/rfc5987#section-3.2).
#[derive(Clone, Debug, PartialEq)]
pub struct ExtendedValue {
/// The character set that is used to encode the `value` to a string.
pub charset: Charset,
/// The human language details of the `value`, if available.
pub language_tag: Option<LanguageTag>,
/// The parameter value, as expressed in octets.
pub value: Vec<u8>,
}
/// Parses extended header parameter values (`ext-value`), as defined in
/// [RFC 5987](https://tools.ietf.org/html/rfc5987#section-3.2).
///
/// Extended values are denoted by parameter names that end with `*`.
///
/// ## ABNF
/// ```plain
/// ext-value = charset "'" [ language ] "'" value-chars
/// ; like RFC 2231's <extended-initial-value>
/// ; (see [RFC2231], Section 7)
///
/// charset = "UTF-8" / "ISO-8859-1" / mime-charset
///
/// mime-charset = 1*mime-charsetc
/// mime-charsetc = ALPHA / DIGIT
/// / "!" / "#" / "$" / "%" / "&"
/// / "+" / "-" / "^" / "_" / "`"
/// / "{" / "}" / "~"
/// ; as <mime-charset> in Section 2.3 of [RFC2978]
/// ; except that the single quote is not included
/// ; SHOULD be registered in the IANA charset registry
///
/// language = <Language-Tag, defined in [RFC5646], Section 2.1>
///
/// value-chars = *( pct-encoded / attr-char )
///
/// pct-encoded = "%" HEXDIG HEXDIG
/// ; see [RFC3986], Section 2.1
///
/// attr-char = ALPHA / DIGIT
/// / "!" / "#" / "$" / "&" / "+" / "-" / "."
/// / "^" / "_" / "`" / "|" / "~"
/// ; token except ( "*" / "'" / "%" )
/// ```
pub fn parse_extended_value(val: &str) -> ::Result<ExtendedValue> {
// Break into three pieces separated by the single-quote character
let mut parts = val.splitn(3,'\'');
// Interpret the first piece as a Charset
let charset: Charset = match parts.next() {
None => return Err(::Error::Header),
Some(n) => try!(FromStr::from_str(n)),
};
// Interpret the second piece as a language tag
let lang: Option<LanguageTag> = match parts.next() {
None => return Err(::Error::Header),
Some("") => None,
Some(s) => match s.parse() {
Ok(lt) => Some(lt),
Err(_) => return Err(::Error::Header),
}
};
// Interpret the third piece as a sequence of value characters
let value: Vec<u8> = match parts.next() {
None => return Err(::Error::Header),
Some(v) => percent_encoding::percent_decode(v.as_bytes()).collect(),
};
Ok(ExtendedValue {
charset: charset,
language_tag: lang,
value: value,<|fim▁hole|>}
define_encode_set! {
/// This encode set is used for HTTP header values and is defined at
/// https://tools.ietf.org/html/rfc5987#section-3.2
pub HTTP_VALUE = [percent_encoding::SIMPLE_ENCODE_SET] | {
' ', '"', '%', '\'', '(', ')', '*', ',', '/', ':', ';', '<', '-', '>', '?',
'[', '\\', ']', '{', '}'
}
}
impl fmt::Debug for HTTP_VALUE {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("HTTP_VALUE")
}
}
impl Display for ExtendedValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let encoded_value =
percent_encoding::percent_encode(&self.value[..], HTTP_VALUE);
if let Some(ref lang) = self.language_tag {
write!(f, "{}'{}'{}", self.charset, lang, encoded_value)
} else {
write!(f, "{}''{}", self.charset, encoded_value)
}
}
}
#[cfg(test)]
mod tests {
use header::shared::Charset;
use super::{ExtendedValue, parse_extended_value};
#[test]
fn test_parse_extended_value_with_encoding_and_language_tag() {
let expected_language_tag = langtag!(en);
// RFC 5987, Section 3.2.2
// Extended notation, using the Unicode character U+00A3 (POUND SIGN)
let result = parse_extended_value("iso-8859-1'en'%A3%20rates");
assert!(result.is_ok());
let extended_value = result.unwrap();
assert_eq!(Charset::Iso_8859_1, extended_value.charset);
assert!(extended_value.language_tag.is_some());
assert_eq!(expected_language_tag, extended_value.language_tag.unwrap());
assert_eq!(vec![163, b' ', b'r', b'a', b't', b'e', b's'], extended_value.value);
}
#[test]
fn test_parse_extended_value_with_encoding() {
// RFC 5987, Section 3.2.2
// Extended notation, using the Unicode characters U+00A3 (POUND SIGN)
// and U+20AC (EURO SIGN)
let result = parse_extended_value("UTF-8''%c2%a3%20and%20%e2%82%ac%20rates");
assert!(result.is_ok());
let extended_value = result.unwrap();
assert_eq!(Charset::Ext("UTF-8".to_string()), extended_value.charset);
assert!(extended_value.language_tag.is_none());
assert_eq!(vec![194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', b't', b'e', b's'], extended_value.value);
}
#[test]
fn test_parse_extended_value_missing_language_tag_and_encoding() {
// From: https://greenbytes.de/tech/tc2231/#attwithfn2231quot2
let result = parse_extended_value("foo%20bar.html");
assert!(result.is_err());
}
#[test]
fn test_parse_extended_value_partially_formatted() {
let result = parse_extended_value("UTF-8'missing third part");
assert!(result.is_err());
}
#[test]
fn test_parse_extended_value_partially_formatted_blank() {
let result = parse_extended_value("blank second part'");
assert!(result.is_err());
}
#[test]
fn test_fmt_extended_value_with_encoding_and_language_tag() {
let extended_value = ExtendedValue {
charset: Charset::Iso_8859_1,
language_tag: Some("en".parse().expect("Could not parse language tag")),
value: vec![163, b' ', b'r', b'a', b't', b'e', b's'],
};
assert_eq!("ISO-8859-1'en'%A3%20rates", format!("{}", extended_value));
}
#[test]
fn test_fmt_extended_value_with_encoding() {
let extended_value = ExtendedValue {
charset: Charset::Ext("UTF-8".to_string()),
language_tag: None,
value: vec![194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a',
b't', b'e', b's'],
};
assert_eq!("UTF-8''%C2%A3%20and%20%E2%82%AC%20rates",
format!("{}", extended_value));
}
}<|fim▁end|>
|
})
|
<|file_name|>test_output.py<|end_file_name|><|fim▁begin|>import pytest
from httpie import ExitStatus
from httpie.output.formatters.colors import get_lexer
from utils import TestEnvironment, http, HTTP_OK, COLOR, CRLF
class TestVerboseFlag:
def test_verbose(self, httpbin):
r = http('--verbose',
'GET', httpbin.url + '/get', 'test-header:__test__')
assert HTTP_OK in r
assert r.count('__test__') == 2
def test_verbose_form(self, httpbin):
# https://github.com/jakubroztocil/httpie/issues/53
r = http('--verbose', '--form', 'POST', httpbin.url + '/post',
'A=B', 'C=D')
assert HTTP_OK in r
assert 'A=B&C=D' in r
def test_verbose_json(self, httpbin):
r = http('--verbose',
'POST', httpbin.url + '/post', 'foo=bar', 'baz=bar')
assert HTTP_OK in r
assert '"baz": "bar"' in r
class TestColors:
@pytest.mark.parametrize('mime', [
'application/json',
'application/json+foo',
'application/foo+json',
'foo/json',
'foo/json+bar',
'foo/bar+json',
])<|fim▁hole|> assert lexer is not None
assert lexer.name == 'JSON'
def test_get_lexer_not_found(self):
assert get_lexer('xxx/yyy') is None
class TestPrettyOptions:
"""Test the --pretty flag handling."""
def test_pretty_enabled_by_default(self, httpbin):
env = TestEnvironment(colors=256)
r = http('GET', httpbin.url + '/get', env=env)
assert COLOR in r
def test_pretty_enabled_by_default_unless_stdout_redirected(self, httpbin):
r = http('GET', httpbin.url + '/get')
assert COLOR not in r
def test_force_pretty(self, httpbin):
env = TestEnvironment(stdout_isatty=False, colors=256)
r = http('--pretty=all', 'GET', httpbin.url + '/get', env=env, )
assert COLOR in r
def test_force_ugly(self, httpbin):
r = http('--pretty=none', 'GET', httpbin.url + '/get')
assert COLOR not in r
def test_subtype_based_pygments_lexer_match(self, httpbin):
"""Test that media subtype is used if type/subtype doesn't
match any lexer.
"""
env = TestEnvironment(colors=256)
r = http('--print=B', '--pretty=all', httpbin.url + '/post',
'Content-Type:text/foo+json', 'a=b', env=env)
assert COLOR in r
def test_colors_option(self, httpbin):
env = TestEnvironment(colors=256)
r = http('--print=B', '--pretty=colors',
'GET', httpbin.url + '/get', 'a=b',
env=env)
# Tests that the JSON data isn't formatted.
assert not r.strip().count('\n')
assert COLOR in r
def test_format_option(self, httpbin):
env = TestEnvironment(colors=256)
r = http('--print=B', '--pretty=format',
'GET', httpbin.url + '/get', 'a=b',
env=env)
# Tests that the JSON data is formatted.
assert r.strip().count('\n') == 2
assert COLOR not in r
class TestLineEndings:
"""
Test that CRLF is properly used in headers
and as the headers/body separator.
"""
def _validate_crlf(self, msg):
lines = iter(msg.splitlines(True))
for header in lines:
if header == CRLF:
break
assert header.endswith(CRLF), repr(header)
else:
assert 0, 'CRLF between headers and body not found in %r' % msg
body = ''.join(lines)
assert CRLF not in body
return body
def test_CRLF_headers_only(self, httpbin):
r = http('--headers', 'GET', httpbin.url + '/get')
body = self._validate_crlf(r)
assert not body, 'Garbage after headers: %r' % r
def test_CRLF_ugly_response(self, httpbin):
r = http('--pretty=none', 'GET', httpbin.url + '/get')
self._validate_crlf(r)
def test_CRLF_formatted_response(self, httpbin):
r = http('--pretty=format', 'GET', httpbin.url + '/get')
assert r.exit_status == ExitStatus.OK
self._validate_crlf(r)
def test_CRLF_ugly_request(self, httpbin):
r = http('--pretty=none', '--print=HB', 'GET', httpbin.url + '/get')
self._validate_crlf(r)
def test_CRLF_formatted_request(self, httpbin):
r = http('--pretty=format', '--print=HB', 'GET', httpbin.url + '/get')
self._validate_crlf(r)<|fim▁end|>
|
def test_get_lexer(self, mime):
lexer = get_lexer(mime)
|
<|file_name|>f64.rs<|end_file_name|><|fim▁begin|>//! Double precision
use Complex;
/// Imaginary unit<|fim▁hole|><|fim▁end|>
|
pub const I: Complex<f64> = Complex { re: 0., im: 1. };
|
<|file_name|>Input.cpp<|end_file_name|><|fim▁begin|>#include "Input.h"
#include "Core.h"
#include "Memory.h"
#include "Cpu.h"
//Gameboy keys:
//[Up][Left][Right][Down][A][B][Start][Select]
//Mapped to standard keyboard keys:
//[Up][Left][Right][Down][Z][X][Enter][RShift]
//Mapped to standard Xbox controller buttons:
//[Up][Left][Right][Down][A][X][Start][Select]
// or
// [B]
Input::Input(QObject *parent, Memory& memory, Cpu& cpu)
: QObject(parent), memory(memory), cpu(cpu)
{
QObject::connect(QGamepadManager::instance(), &QGamepadManager::gamepadButtonPressEvent,
this, &Input::gamepadButtonPressed);
QObject::connect(QGamepadManager::instance(), &QGamepadManager::gamepadButtonReleaseEvent,
this, &Input::gamepadButtonReleased);
}
void Input::gamepadButtonPressed(int id, QGamepadManager::GamepadButton button, double value) {
switch(button) {
case QGamepadManager::ButtonA:
padA = true;
break;
case QGamepadManager::ButtonB:
case QGamepadManager::ButtonX:
padB = true;
break;
case QGamepadManager::ButtonStart:
padStart = true;
break;
case QGamepadManager::ButtonSelect:
padSelect = false;
break;
case QGamepadManager::ButtonLeft:
padLeft = true;
break;
case QGamepadManager::ButtonRight:
padRight = true;
break;
case QGamepadManager::ButtonUp:
padUp = true;
break;
case QGamepadManager::ButtonDown:
padDown = true;
break;
}
}
void Input::gamepadButtonReleased(int id, QGamepadManager::GamepadButton button) {
switch(button) {
case QGamepadManager::ButtonA:
padA = false;
break;
case QGamepadManager::ButtonB:
case QGamepadManager::ButtonX:
padB = false;
break;
case QGamepadManager::ButtonStart:
padStart = false;
break;
case QGamepadManager::ButtonSelect:
padSelect = false;
break;
case QGamepadManager::ButtonLeft:
padLeft = false;
break;
case QGamepadManager::ButtonRight:
padRight = false;
break;
case QGamepadManager::ButtonUp:
padUp = false;
break;
case QGamepadManager::ButtonDown:
padDown = false;
break;
}
}
Input::~Input() {
QObject::disconnect(QGamepadManager::instance(), &QGamepadManager::gamepadButtonPressEvent,
this, &Input::gamepadButtonPressed);
QObject::disconnect(QGamepadManager::instance(), &QGamepadManager::gamepadButtonReleaseEvent,
this, &Input::gamepadButtonReleased);
}
unsigned char Input::getKeyInput()
{
return memory.readMemory(0xFF00);
}
bool Input::isAnyKeyPressed()
{
return keyUp || keyDown || keyLeft || keyRight || keyStart || keySelect || keyA || keyB ||
padUp || padDown || padLeft || padRight || padStart || padSelect || padA || padB;
}
void Input::readInput()
{
unsigned char keyInput = getKeyInput();
bool interrupt = false;
cpu.setStop(false);
if (((keyInput & 0x10) >> 4) == 1)
{
if (keyA || padA) { //Z //A
keyInput &= 0xFE;
interrupt = true;
}
else
{
keyInput |= 0x01;
}
if (keyB || padB) { //X //B
keyInput &= 0xFD;
interrupt = true;
}
else
{
keyInput |= 0x02;
}
if (keySelect || padSelect) { //Control //Select
keyInput &= 0xFB;
interrupt = true;
}
else
{
keyInput |= 0x04;
}
if (keyStart || padStart) { //Enter //Start
keyInput &= 0xF7;
interrupt = true;
}
else
{
keyInput |= 0x08;
}
}
else if (((keyInput & 0x20) >> 5) == 1)//(keyInput == 0x20)
{
if (!((keyRight || padRight) && (keyLeft || padLeft))) //Detect if both inputs are NOT enabled at once
{
if (keyRight || padRight)
{
keyInput &= 0xFE;
interrupt = true;
}
else
{
keyInput |= 0x01;
}
if (keyLeft || padLeft)
{
keyInput &= 0xFD;
interrupt = true;
}
else
{
keyInput |= 0x02;
}
}
else //To solve issue of multiple key input on one axis we will ignore input when both left and right are pressed at the same time.
{
keyInput |= 0x01;
keyInput |= 0x02;
}
if (!((keyUp || padUp) && (keyDown || padDown))) //Detect if both inputs are NOT enabled at once
{
if (keyUp || padUp)
{
keyInput &= 0xFB;
interrupt = true;
}
else
{
keyInput |= 0x04;
}
if (keyDown || padDown)
{
keyInput &= 0xF7;
interrupt = true;
}
else
{
keyInput |= 0x08;
}
}
else //To solve issue of multiple key input on one axis we will ignore input when both left and right are pressed at the same time.
{
keyInput |= 0x04;
keyInput |= 0x08;
}
}
else
{
keyInput |= 0x01;
keyInput |= 0x02;
keyInput |= 0x04;
keyInput |= 0x08;
}
//Bit 7 and 6 are always 1
keyInput |= 0x80; //Bit 7
keyInput |= 0x40; //Bit 6
if (interrupt)
{
memory.writeMemory(0xFF0F, (unsigned char)(memory.readMemory(0xFF0F) | 0x10));
}<|fim▁hole|>
void Input::setKeyInput(int keyCode, bool enabled)
{
cpu.setStop(false);
switch (keyCode)
{
case 0:
{
keyUp = enabled;
break;
}
case 1:
{
keyDown = enabled;
break;
}
case 2:
{
keyLeft = enabled;
break;
}
case 3:
{
keyRight = enabled;
break;
}
case 4:
{
keyStart = enabled;
break;
}
case 5:
{
keySelect = enabled;
break;
}
case 6:
{
keyA = enabled;
break;
}
case 7:
{
keyB = enabled;
break;
}
}
}
bool Input::eventFilter(QObject *obj, QEvent *event) {
bool keyPressed = event->type() == QEvent::KeyPress;
bool keyReleased = event->type() == QEvent::KeyRelease;
if (keyPressed || keyReleased) {
QKeyEvent *keyEvent = static_cast<QKeyEvent *>(event);
int key = keyEvent->key();
if (key == Qt::Key_Z) {
setKeyInput(6, keyPressed);
}
if (key == Qt::Key_X) {
setKeyInput(7, keyPressed);
}
if (key == Qt::Key_Return) {
setKeyInput(4, keyPressed);
}
if (key == Qt::Key_Shift) {
setKeyInput(5, keyPressed);
}
if (key == Qt::Key_Right) {
setKeyInput(3, keyPressed);
}
if (key == Qt::Key_Left) {
setKeyInput(2, keyPressed);
}
if (key == Qt::Key_Up) {
setKeyInput(0, keyPressed);
}
if (key == Qt::Key_Down) {
setKeyInput(1, keyPressed);
}
if (key == Qt::Key_F1 && keyReleased) {
memory.createSaveFile(true);
}
}
return QObject::eventFilter(obj, event);
}
void Input::resetInput() {
keyRight = false;
keyLeft = false;
keyUp = false;
keyDown = false;
keySelect = false;
keyStart = false;
keyA = false;
keyB = false;
padRight = false;
padLeft = false;
padUp = false;
padDown = false;
padSelect = false;
padStart = false;
padA = false;
padB = false;
}<|fim▁end|>
|
memory.writeMemory(0xFF00, keyInput);
}
|
<|file_name|>many_queries.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)]
#![plugin(postgres_macros)]
fn main() {
sql!("
LOCK TABLE foo IN ACCESS EXCLUSIVE MODE
");
sql!("
ALTER TABLE foo
ADD CONSTRAINT foo PRIMARY KEY (foo)
");
sql!("
ALTER TABLE foo
ADD CONSTRAINT foo
FOREIGN KEY (foo)
REFERENCES foo (foo)<|fim▁hole|>
sql!("
CREATE INDEX foo ON foo (foo)
");
sql!("
INSERT INTO foo VALUES ($1)
");
sql!("
LOCK TABLE foo IN ACCESS EXCLUSIVE MODE
");
sql!("
ALTER TABLE foo
ADD CONSTRAINT foo PRIMARY KEY (foo)
");
}<|fim▁end|>
|
ON DELETE RESTRICT
ON UPDATE RESTRICT
");
|
<|file_name|>tensor.py<|end_file_name|><|fim▁begin|>import numpy as np
from .abstract_layer import LayerBase, NoParamMixin
from ..util import zX, zX_like, white, scalX
class PoolLayer(NoParamMixin, LayerBase):
def __init__(self, filter_size, compiled=True):
LayerBase.__init__(self, activation="linear", trainable=False)
if compiled:
from ..llatomic.lltensor_op import MaxPoolOp
else:
from ..atomic import MaxPoolOp
self.fdim = filter_size
self.filter = None
self.op = MaxPoolOp()
def connect(self, brain):
ic, iy, ix = brain.outshape[-3:]
if any((iy % self.fdim, ix % self.fdim)):
raise RuntimeError(
"Incompatible shapes: {} % {}".format((ix, iy), self.fdim)
)
LayerBase.connect(self, brain)
self.output = zX(ic, iy // self.fdim, ix // self.fdim)
def feedforward(self, questions):
self.output, self.filter = self.op.forward(questions, self.fdim)
return self.output
def backpropagate(self, delta):
return self.op.backward(delta, self.filter)
@property
def outshape(self):
return self.output.shape[-3:]
def __str__(self):
return "Pool-{}x{}".format(self.fdim, self.fdim)
class ConvLayer(LayerBase):
def __init__(self, nfilters, filterx=3, filtery=3, compiled=True, **kw):
super().__init__(compiled=compiled, **kw)
self.nfilters = nfilters
self.fx = filterx
self.fy = filtery
self.depth = 0
self.stride = 1
self.inshape = None
self.op = None
def connect(self, brain):
if self.compiled:
from ..llatomic import ConvolutionOp
else:
from ..atomic import ConvolutionOp
depth, iy, ix = brain.outshape[-3:]
if any((iy < self.fy, ix < self.fx)):
raise RuntimeError(
"Incompatible shapes: iy ({}) < fy ({}) OR ix ({}) < fx ({})"
.format(iy, self.fy, ix, self.fx)
)
super().connect(brain)
self.op = ConvolutionOp()
self.inshape = brain.outshape
self.depth = depth
self.weights = white(self.nfilters, self.depth, self.fx, self.fy)
self.biases = zX(self.nfilters)[None, :, None, None]
self.nabla_b = zX_like(self.biases)
self.nabla_w = zX_like(self.weights)
def feedforward(self, X):
self.inputs = X<|fim▁hole|> def backpropagate(self, delta):
delta *= self.activation.backward(self.output)
self.nabla_w, self.nabla_b, dX = self.op.backward(X=self.inputs, E=delta, F=self.weights)
return dX
@property
def outshape(self):
oy, ox = tuple(ix - fx + 1 for ix, fx in zip(self.inshape[-2:], (self.fx, self.fy)))
return self.nfilters, ox, oy
def __str__(self):
return "Conv({}x{}x{})-{}".format(self.nfilters, self.fy, self.fx, str(self.activation)[:4])
class GlobalAveragePooling(NoParamMixin, LayerBase):
def __init__(self):
LayerBase.__init__(self)
NoParamMixin.__init__(self)
self.repeats = 0
def feedforward(self, X):
self.repeats = np.prod(X.shape[2:])
return X.mean(axis=(2, 3))
def backpropagate(self, delta):
m = len(delta)
delta = np.repeat(delta / scalX(self.repeats), self.repeats)
delta = delta.reshape((m,) + self.inshape)
return delta
@property
def outshape(self):
return self.inshape[0],<|fim▁end|>
|
self.output = self.activation.forward(self.op.forward(X, self.weights, "valid"))
self.output += self.biases
return self.output
|
<|file_name|>model09.rs<|end_file_name|><|fim▁begin|>% Including a second lag of a control variable (page 56)
endogenous C N W R
exogenous ER EW
parameters h gam rhow sigw sigr b1
observables N C
<|fim▁hole|>model
N = b1*N{-2} + W - gam*C;
C = (1-h)*W+h*C{+1}+R;
W = rhow*W{-1}+sigw*EW;
R = sigr*ER;<|fim▁end|>
| |
<|file_name|>menu.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
# Support for excporting Qt's MenuBars/Menus over DBUS. The API is defined in
# dbus-menu.xml from the libdbusmenu project https://launchpad.net/libdbusmenu
import dbus, sip
from PyQt5.Qt import (
QApplication, QMenu, QIcon, QKeySequence, QObject, QEvent, QTimer, pyqtSignal, Qt)
from calibre.utils.dbus_service import Object, BusName, method as dbus_method, dbus_property, signal as dbus_signal
from calibre.gui2.dbus_export.utils import (
setup_for_cli_run, swap_mnemonic_char, key_sequence_to_dbus_shortcut, icon_to_dbus_menu_icon)
null = object()
def PropDict(mapping=()):
return dbus.Dictionary(mapping, signature='sv')
def create_properties_for_action(ac, previous=None):
ans = PropDict()
if ac.isSeparator():
ans['type'] = 'separator'
if not ac.isVisible():
ans['visible'] = False
return ans
text = ac.text() or ac.iconText()
if text:
ans['label'] = swap_mnemonic_char(text)
if not ac.isEnabled():
ans['enabled'] = False
if not ac.isVisible() or ac.property('blocked') is True:
ans['visible'] = False
if ac.menu() is not None:
ans['children-display'] = 'submenu'
if ac.isCheckable():
exclusive = ac.actionGroup() is not None and ac.actionGroup().isExclusive()
ans['toggle-type'] = 'radio' if exclusive else 'checkmark'
ans['toggle-state'] = int(ac.isChecked())
shortcuts = ac.shortcuts()
if shortcuts:
sc = dbus.Array(signature='as')
for s in shortcuts:
if not s.isEmpty():
for x in key_sequence_to_dbus_shortcut(s):
sc.append(dbus.Array(x, signature='s'))
if sc:
ans['shortcut'] = sc[:1] # Unity fails to display the shortcuts at all if more than one is specified
if ac.isIconVisibleInMenu():
icon = ac.icon()
if previous and previous.get('x-qt-icon-cache-key') == icon.cacheKey():
for x in 'icon-data x-qt-icon-cache-key'.split():
ans[x] = previous[x]
else:
data = icon_to_dbus_menu_icon(ac.icon())
if data is not None:
ans['icon-data'] = data
ans['x-qt-icon-cache-key'] = icon.cacheKey()
return ans
def menu_actions(menu):
try:
return menu.actions()
except TypeError:
if isinstance(menu, QMenu):
return QMenu.actions(menu)
raise
class DBusMenu(QObject):
handle_event_signal = pyqtSignal(object, object, object, object)
def __init__(self, object_path, parent=None, bus=None):
QObject.__init__(self, parent)
# Unity barfs is the Event DBUS method does not return immediately, so
# handle it asynchronously
self.handle_event_signal.connect(self.handle_event, type=Qt.QueuedConnection)
self.dbus_api = DBusMenuAPI(self, object_path, bus=bus)
self.set_status = self.dbus_api.set_status
self._next_id = 0
self.action_changed_timer = t = QTimer(self)
t.setInterval(0), t.setSingleShot(True), t.timeout.connect(self.actions_changed)
self.layout_changed_timer = t = QTimer(self)
t.setInterval(0), t.setSingleShot(True), t.timeout.connect(self.layouts_changed)
self.init_maps()
@property
def object_path(self):
return self.dbus_api._object_path
def init_maps(self, qmenu=None):
self.action_changes = set()
self.layout_changes = set()
self.qmenu = qmenu
self._id_to_action, self._action_to_id = {}, {}
self._action_properties = {}
@property
def next_id(self):
self._next_id += 1
return self._next_id
def id_to_action(self, action_id):
if self.qmenu is None:
return None
return self._id_to_action.get(action_id)
def action_to_id(self, action):
if self.qmenu is None:
return None
return self._action_to_id.get(action)
def action_properties(self, action_id, restrict_to=None):
if self.qmenu is None:
return {}
ans = self._action_properties.get(action_id, PropDict())
if restrict_to:
ans = PropDict({k:v for k, v in ans.iteritems() if k in restrict_to})
return ans
def publish_new_menu(self, qmenu=None):
self.init_maps(qmenu)
if qmenu is not None:
qmenu.destroyed.connect(lambda obj=None:self.publish_new_menu())
ac = qmenu.menuAction()
self.add_action(ac)
self.dbus_api.LayoutUpdated(self.dbus_api.revision, 0)
def set_visible(self, visible):
ac = self.id_to_action(0)
if ac is not None and self.qmenu is not None:
changed = False
blocked = not visible
for ac in menu_actions(ac.menu()):
ac_id = self.action_to_id(ac)
if ac_id is not None:
old = ac.property('blocked')
if old is not blocked:
ac.setProperty('blocked', blocked)
self.action_changes.add(ac_id)
changed = True
if changed:
self.action_changed_timer.start()
def add_action(self, ac):
ac_id = 0 if ac.menu() is self.qmenu else self.next_id
self._id_to_action[ac_id] = ac
self._action_to_id[ac] = ac_id
self._action_properties[ac_id] = create_properties_for_action(ac)
if ac.menu() is not None:
self.add_menu(ac.menu())
def add_menu(self, menu):
menu.installEventFilter(self)
for ac in menu_actions(menu):
self.add_action(ac)
def eventFilter(self, obj, ev):
ac = getattr(obj, 'menuAction', lambda : None)()
ac_id = self.action_to_id(ac)
if ac_id is not None:
etype = ev.type()
if etype == QEvent.ActionChanged:
ac_id = self.action_to_id(ev.action())
self.action_changes.add(ac_id)
self.action_changed_timer.start()
elif etype == QEvent.ActionAdded:
self.layout_changes.add(ac_id)
self.layout_changed_timer.start()
self.add_action(ev.action())
elif etype == QEvent.ActionRemoved:
self.layout_changes.add(ac_id)
self.layout_changed_timer.start()
self.action_removed(ev.action())
return False
def actions_changed(self):
updated_props = dbus.Array(signature='(ia{sv})')
removed_props = dbus.Array(signature='(ias)')
for ac_id in self.action_changes:
ac = self.id_to_action(ac_id)
if ac is None:
continue
old_props = self.action_properties(ac_id)
new_props = self._action_properties[ac_id] = create_properties_for_action(ac, old_props)
removed = set(old_props) - set(new_props)
if removed:
removed_props.append((ac_id, dbus.Array(removed, signature='as')))
updated = PropDict({k:v for k, v in new_props.iteritems() if v != old_props.get(k, null)})
if updated:
updated_props.append((ac_id, updated))
self.action_changes = set()
if updated_props or removed_props:
self.dbus_api.ItemsPropertiesUpdated(updated_props, removed_props)
return updated_props, removed_props
def layouts_changed(self):
changes = set()
for ac_id in self.layout_changes:
if ac_id in self._id_to_action:
changes.add(ac_id)
self.layout_changes = set()
if changes:
self.dbus_api.revision += 1
for change in changes:
self.dbus_api.LayoutUpdated(self.dbus_api.revision, change)
return changes
def action_is_in_a_menu(self, ac):
if sip.isdeleted(ac):
return False
all_menus = {a.menu() for a in self._action_to_id if not sip.isdeleted(a)}
all_menus.discard(None)
return bool(set(ac.associatedWidgets()).intersection(all_menus))
def action_removed(self, ac):
if not self.action_is_in_a_menu(ac):
ac_id = self._action_to_id.pop(ac, None)
self._id_to_action.pop(ac_id, None)
self._action_properties.pop(ac_id, None)
def get_layout(self, parent_id, depth, property_names):
# Ensure any pending updates are done, as they are needed now
self.actions_changed()
self.layouts_changed()
property_names = property_names or None
props = self.action_properties(parent_id, property_names)
return parent_id, props, self.get_layout_children(parent_id, depth, property_names)
def get_layout_children(self, parent_id, depth, property_names):
ans = dbus.Array(signature='(ia{sv}av)')
ac = self.id_to_action(parent_id)
if ac is not None and depth != 0 and ac.menu() is not None:
for child in menu_actions(ac.menu()):
child_id = self.action_to_id(child)
if child_id is not None:
props = self.action_properties(child_id, property_names)
ans.append((child_id, props, self.get_layout_children(child_id, depth - 1, property_names)))
return ans
def get_properties(self, ids=None, property_names=None):
property_names = property_names or None
ans = dbus.Array(signature='(ia{sv})')
for action_id in (ids or self._id_to_action):
ans.append((action_id, self.action_properties(action_id, property_names)))
return ans
def handle_event(self, action_id, event, data, timestamp):
ac = self.id_to_action(action_id)
if event == 'clicked':
if ac.isCheckable():
ac.toggle()
ac.triggered.emit(ac.isCheckable() and ac.isChecked())
def handle_about_to_show(self, ac):
child_ids = {self.action_to_id(x) for x in menu_actions(ac.menu())}
child_ids.discard(None)
ac_id = self.action_to_id(ac)
ac.menu().aboutToShow.emit()
if ac_id in self.layout_changes or child_ids.intersection(self.action_changes):
return True
return False
class DBusMenuAPI(Object):
IFACE = 'com.canonical.dbusmenu'
def __init__(self, menu, object_path, bus=None):
if bus is None:
bus = dbus.SessionBus()<|fim▁hole|>
@dbus_property(IFACE, signature='u')
def Version(self):
return 3 # GTK 3 uses 3, KDE 4 uses 2
@dbus_property(IFACE, signature='s', emits_changed_signal=True)
def Status(self):
return self.status
def set_status(self, normal=True):
self.status = 'normal' if normal else 'notice'
self.PropertiesChanged(self.IFACE, {'Status': self.status}, [])
@dbus_property(IFACE, signature='s')
def TextDirection(self):
return 'ltr' if QApplication.instance().isLeftToRight() else 'rtl'
@dbus_property(IFACE, signature='as')
def IconThemePath(self):
return dbus.Array(signature='s')
@dbus_method(IFACE, in_signature='iias', out_signature='u(ia{sv}av)')
def GetLayout(self, parentId, recursionDepth, propertyNames):
layout = self.menu.get_layout(parentId, recursionDepth, propertyNames)
return self.revision, layout
@dbus_method(IFACE, in_signature='aias', out_signature='a(ia{sv})')
def GetGroupProperties(self, ids, propertyNames):
return self.menu.get_properties(ids, propertyNames)
@dbus_method(IFACE, in_signature='is', out_signature='v')
def GetProperty(self, id, name):
return self.menu.action_properties(id).get(name, '')
@dbus_method(IFACE, in_signature='isvu', out_signature='')
def Event(self, id, eventId, data, timestamp):
''' This is called by the applet to notify the application an event happened on a
menu item. eventId can be one of the following::
* "clicked"
* "hovered"
* "opened"
* "closed"
Vendor specific events can be added by prefixing them with "x-<vendor>-"'''
if self.menu.id_to_action(id) is not None:
self.menu.handle_event_signal.emit(id, eventId, data, timestamp)
@dbus_method(IFACE, in_signature='a(isvu)', out_signature='ai')
def EventGroup(self, events):
''' Used to pass a set of events as a single message for possibily
several different menuitems. This is done to optimize DBus traffic.
Should return a list of ids that are not found. events is a list of
events in the same format as used for the Event method.'''
missing = dbus.Array(signature='u')
for id, eventId, data, timestamp in events:
if self.menu.id_to_action(id) is not None:
self.menu.handle_event_signal.emit(id, eventId, data, timestamp)
else:
missing.append(id)
return missing
@dbus_method(IFACE, in_signature='i', out_signature='b')
def AboutToShow(self, id):
ac = self.menu.id_to_action(id)
if ac is not None and ac.menu() is not None:
return self.menu.handle_about_to_show(ac)
return False
@dbus_method(IFACE, in_signature='ai', out_signature='aiai')
def AboutToShowGroup(self, ids):
updates_needed = dbus.Array(signature='i')
id_errors = dbus.Array(signature='i')
for ac_id in ids:
ac = self.menu.id_to_action(id)
if ac is not None and ac.menu() is not None:
if self.menu.handle_about_to_show(ac):
updates_needed.append(ac_id)
else:
id_errors.append(ac_id)
return updates_needed, id_errors
@dbus_signal(IFACE, 'a(ia{sv})a(ias)')
def ItemsPropertiesUpdated(self, updatedProps, removedProps):
pass
@dbus_signal(IFACE, 'ui')
def LayoutUpdated(self, revision, parent):
pass
@dbus_signal(IFACE, 'iu')
def ItemActivationRequested(self, id, timestamp):
pass
def test():
setup_for_cli_run()
app = QApplication([])
bus = dbus.SessionBus()
dbus_name = BusName('com.calibre-ebook.TestDBusMenu', bus=bus, do_not_queue=True)
m = QMenu()
ac = m.addAction(QIcon(I('window-close.png')), 'Quit', app.quit)
ac.setShortcut(QKeySequence('Ctrl+Q'))
menu = DBusMenu('/Menu', bus=bus)
menu.publish_new_menu(m)
app.exec_()
del dbus_name
if __name__ == '__main__':
test()<|fim▁end|>
|
Object.__init__(self, bus, object_path)
self.status = 'normal'
self.menu = menu
self.revision = 0
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*! THIS FILE IS AUTO-GENERATED */
import {AuthPlus, getAPI, GoogleConfigurable} from 'googleapis-common';
import {translate_v2} from './v2';
import {translate_v3beta1} from './v3beta1';
export const VERSIONS = {
v2: translate_v2.Translate,
v3beta1: translate_v3beta1.Translate,
};
export function translate(version: 'v2'): translate_v2.Translate;
export function translate(
options: translate_v2.Options
): translate_v2.Translate;
export function translate(version: 'v3beta1'): translate_v3beta1.Translate;
export function translate(<|fim▁hole|>): translate_v3beta1.Translate;
export function translate<
T = translate_v2.Translate | translate_v3beta1.Translate
>(
this: GoogleConfigurable,
versionOrOptions:
| 'v2'
| translate_v2.Options
| 'v3beta1'
| translate_v3beta1.Options
) {
return getAPI<T>('translate', versionOrOptions, VERSIONS, this);
}
const auth = new AuthPlus();
export {auth};<|fim▁end|>
|
options: translate_v3beta1.Options
|
<|file_name|>issue-7012.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
/*
# Comparison of static arrays
The expected behaviour would be that `test == test1`, therefore 'true'
would be printed, however the below prints false.
*/
struct signature<'a> { pattern : &'a [u32] }<|fim▁hole|>
static test1: signature<'static> = signature {
pattern: &[0x243f6a88,0x85a308d3,0x13198a2e,0x03707344,0xa4093822,0x299f31d0]
};
pub fn main() {
let test: &[u32] = &[0x243f6a88,0x85a308d3,0x13198a2e,
0x03707344,0xa4093822,0x299f31d0];
println!("{}",test==test1.pattern);
}<|fim▁end|>
| |
<|file_name|>globalsystems.rs<|end_file_name|><|fim▁begin|>extern mod glfw;
extern mod std;
use components::{Components,texture_from_uint};
//GLOBAL SYSTEM DEFINITIONS
pub trait GlobalSystem {
fn process(&mut self, window: &glfw::Window) -> ();
}
pub struct ScoreUpdateSystem {
paddle: @Components,
counter: @Components,
score: uint,
port: Port<uint>
}
impl GlobalSystem for ScoreUpdateSystem {
fn process(&mut self, _: &glfw::Window) -> () {
loop {
match self.port.try_recv() {
Some(i) => {<|fim▁hole|> }
self.counter.sprite.unwrap().texture = Some(texture_from_uint(self.score));
}
}
pub struct BotInputSystem {
paddle: @Components,
ball: @Components
}
impl GlobalSystem for BotInputSystem {
fn process(&mut self, _: &glfw::Window) -> () {
let d = self.ball.position.unwrap().y - self.paddle.position.unwrap().y;
if std::num::abs(d) > 0.2 {
if d > 0.0 {
self.paddle.vert_velocity.unwrap().y = 1.5/60.0;
} else {
self.paddle.vert_velocity.unwrap().y = -1.5/60.0;
}
} else {
self.paddle.vert_velocity.unwrap().y = 0.0;
}
}
}
pub struct KeyboardInputSystem {
paddle: @Components
}
impl GlobalSystem for KeyboardInputSystem {
fn process(&mut self, window: &glfw::Window) -> () {
let mut dir = 0.0;
if window.get_key(glfw::KeyA) == glfw::Press {
dir += 1.0;
}
if window.get_key(glfw::KeyZ) == glfw::Press {
dir -= 1.0;
}
self.paddle.vert_velocity.unwrap().y = 1.5*dir/60.0;
}
}<|fim▁end|>
|
self.score += i;
}
None => break
}
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(asm)]
extern crate core;
extern crate system;
use std::collections::BTreeMap;
use std::collections::VecDeque;
use std::io::{Read, Write, SeekFrom};
use std::mem;
use std::process::Command;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Instant;
use system::error::{Error, Result, EBADF};
use system::scheme::{Packet, Scheme};
use system::syscall::SYS_READ;
pub use self::color::Color;
pub use self::event::{Event, EventOption};
pub use self::font::Font;
pub use self::image::{Image, ImageRoi};
pub use self::rect::Rect;
pub use self::socket::Socket;
pub use self::window::Window;
use self::bmp::BmpFile;
use self::config::Config;
use self::event::{EVENT_KEY, EVENT_MOUSE, QuitEvent};
pub mod bmp;
pub mod color;
pub mod config;
#[path="../../kernel/common/event.rs"]
pub mod event;
pub mod font;
pub mod image;
pub mod rect;
pub mod socket;
pub mod window;
fn schedule(redraws: &mut Vec<Rect>, request: Rect) {
let mut push = true;
for mut rect in redraws.iter_mut() {
//If contained, ignore new redraw request
let container = rect.container(&request);
if container.area() <= rect.area() + request.area() {
*rect = container;
push = false;
break;
}
}
if push {
redraws.push(request);
}
}
struct OrbitalScheme {
start: Instant,
image: Image,
background: Image,
cursor: Image,
cursor_x: i32,
cursor_y: i32,
dragging: bool,
drag_x: i32,
drag_y: i32,
next_id: isize,
next_x: i32,
next_y: i32,
order: VecDeque<usize>,
windows: BTreeMap<usize, Window>,
redraws: Vec<Rect>,
todo: Vec<Packet>
}
impl OrbitalScheme {
fn new(width: i32, height: i32, config: &Config) -> OrbitalScheme {
OrbitalScheme {
start: Instant::now(),
image: Image::new(width, height),
background: BmpFile::from_path(&config.background),
cursor: BmpFile::from_path(&config.cursor),
cursor_x: 0,
cursor_y: 0,
dragging: false,
drag_x: 0,
drag_y: 0,
next_id: 1,
next_x: 20,
next_y: 20,
order: VecDeque::new(),
windows: BTreeMap::new(),
redraws: vec![Rect::new(0, 0, width, height)],
todo: Vec::new()
}
}
fn cursor_rect(&self) -> Rect {
Rect::new(self.cursor_x, self.cursor_y, self.cursor.width(), self.cursor.height())
}
fn screen_rect(&self) -> Rect {
Rect::new(0, 0, self.image.width(), self.image.height())
}
fn redraw(&mut self, display: &Socket){
let mut redraws = Vec::new();
mem::swap(&mut self.redraws, &mut redraws);
let screen_rect = self.screen_rect();
for mut rect in redraws.iter_mut() {
*rect = rect.intersection(&screen_rect);
if ! rect.is_empty() {
//TODO: Allow background to have different size: self.image.roi(&rect).set(Color::rgb(75, 163, 253));
self.image.roi(&rect).blit(&self.background.roi(rect));
let mut i = self.order.len();
for id in self.order.iter().rev() {
i -= 1;
if let Some(mut window) = self.windows.get_mut(&id) {
window.draw_title(&mut self.image, &rect, i == 0);
window.draw(&mut self.image, &rect);
}
}
let cursor_rect = self.cursor_rect();
let cursor_intersect = rect.intersection(&cursor_rect);
if ! cursor_intersect.is_empty() {
self.image.roi(&cursor_intersect).blend(&self.cursor.roi(&cursor_intersect.offset(-cursor_rect.left(), -cursor_rect.top())));
}
}
}
for rect in redraws.iter_mut() {
if ! rect.is_empty() {
let data = self.image.data();
for row in rect.top()..rect.bottom() {
let off1 = row * self.image.width() + rect.left();
let off2 = row * self.image.width() + rect.right();
unsafe { display.seek(SeekFrom::Start(off1 as u64)).unwrap(); }
display.send_type(&data[off1 as usize .. off2 as usize]).unwrap();
}
}
}
}
fn event(&mut self, event: Event){
if event.code == EVENT_KEY {
if event.c > 0 {
if event.b as u8 == event::K_F1 {
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
self.cursor_x = 0;
self.cursor_y = 0;
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
} else if event.b as u8 == event::K_F2 {
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
self.cursor_x = self.screen_rect().width();
self.cursor_y = self.screen_rect().height();
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
}
}
if let Some(id) = self.order.front() {
if let Some(mut window) = self.windows.get_mut(&id) {
window.event(event);
}
}
} else if event.code == EVENT_MOUSE {
if event.a as i32 != self.cursor_x || event.b as i32 != self.cursor_y {
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
self.cursor_x = event.a as i32;
self.cursor_y = event.b as i32;
let cursor_rect = self.cursor_rect();
schedule(&mut self.redraws, cursor_rect);
}
if self.dragging {
if event.c > 0 {
if let Some(id) = self.order.front() {
if let Some(mut window) = self.windows.get_mut(&id) {
if self.drag_x != self.cursor_x || self.drag_y != self.cursor_y {
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
window.x += self.cursor_x - self.drag_x;
window.y += self.cursor_y - self.drag_y;
self.drag_x = self.cursor_x;
self.drag_y = self.cursor_y;
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
}<|fim▁hole|> } else {
self.dragging = false;
}
} else {
self.dragging = false;
}
} else {
let mut focus = 0;
let mut i = 0;
for id in self.order.iter() {
if let Some(mut window) = self.windows.get_mut(&id) {
if window.rect().contains(event.a as i32, event.b as i32) {
let mut window_event = event;
window_event.a -= window.x as i64;
window_event.b -= window.y as i64;
window.event(window_event);
if event.c > 0 {
focus = i;
}
break;
} else if window.title_rect().contains(event.a as i32, event.b as i32) {
if event.c > 0 {
focus = i;
if window.exit_contains(event.a as i32, event.b as i32) {
window.event(QuitEvent.to_event());
} else {
self.dragging = true;
self.drag_x = self.cursor_x;
self.drag_y = self.cursor_y;
}
}
break;
}
}
i += 1;
}
if focus > 0 {
//Redraw old focused window
if let Some(id) = self.order.front() {
if let Some(window) = self.windows.get(&id){
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
}
}
//Redraw new focused window
if let Some(id) = self.order.remove(focus) {
if let Some(window) = self.windows.get(&id){
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
}
self.order.push_front(id);
}
}
}
}
}
}
impl Scheme for OrbitalScheme {
fn open(&mut self, path: &str, _flags: usize, _mode: usize) -> Result<usize> {
let mut parts = path.split("/");
let flags = parts.next().unwrap_or("");
let mut async = false;
for flag in flags.chars() {
if flag == 'a' {
async = true;
}
}
let mut x = parts.next().unwrap_or("").parse::<i32>().unwrap_or(0);
let mut y = parts.next().unwrap_or("").parse::<i32>().unwrap_or(0);
let width = parts.next().unwrap_or("").parse::<i32>().unwrap_or(0);
let height = parts.next().unwrap_or("").parse::<i32>().unwrap_or(0);
let mut title = parts.next().unwrap_or("").to_string();
for part in parts {
title.push('/');
title.push_str(part);
}
let id = self.next_id as usize;
self.next_id += 1;
if self.next_id < 0 {
self.next_id = 1;
}
if x < 0 && y < 0 {
x = self.next_x;
y = self.next_y;
self.next_x += 20;
if self.next_x + 20 >= self.image.width() {
self.next_x = 20;
}
self.next_y += 20;
if self.next_y + 20 >= self.image.height() {
self.next_y = 20;
}
}
if let Some(id) = self.order.front() {
if let Some(window) = self.windows.get(&id){
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
}
}
let window = Window::new(x, y, width, height, title, async);
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
self.order.push_front(id);
self.windows.insert(id, window);
Ok(id)
}
fn read(&mut self, id: usize, buf: &mut [u8]) -> Result<usize> {
if let Some(mut window) = self.windows.get_mut(&id) {
window.read(buf)
} else {
Err(Error::new(EBADF))
}
}
fn write(&mut self, id: usize, buf: &[u8]) -> Result<usize> {
if let Some(mut window) = self.windows.get_mut(&id) {
schedule(&mut self.redraws, window.rect());
window.write(buf)
} else {
Err(Error::new(EBADF))
}
}
fn fpath(&self, id: usize, buf: &mut [u8]) -> Result<usize> {
if let Some(window) = self.windows.get(&id) {
window.path(buf)
} else {
Err(Error::new(EBADF))
}
}
fn close(&mut self, id: usize) -> Result<usize> {
self.order.retain(|&e| e != id);
if let Some(id) = self.order.front() {
if let Some(window) = self.windows.get(&id){
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
}
}
if let Some(window) = self.windows.remove(&id) {
schedule(&mut self.redraws, window.title_rect());
schedule(&mut self.redraws, window.rect());
Ok(0)
} else {
Err(Error::new(EBADF))
}
}
}
fn event_loop(scheme_mutex: Arc<Mutex<OrbitalScheme>>, display: Arc<Socket>, socket: Arc<Socket>){
loop {
{
let mut scheme = scheme_mutex.lock().unwrap();
scheme.redraw(&display);
}
let mut events = [Event::new(); 128];
let count = display.receive_type(&mut events).unwrap();
let mut responses = Vec::new();
{
let mut scheme = scheme_mutex.lock().unwrap();
for &event in events[.. count].iter() {
scheme.event(event);
}
let mut packets = Vec::new();
mem::swap(&mut scheme.todo, &mut packets);
for mut packet in packets.iter_mut() {
let delay = if packet.a == SYS_READ {
if let Some(window) = scheme.windows.get(&packet.b) {
window.async == false
} else {
true
}
} else {
false
};
scheme.handle(packet);
if delay && packet.a == 0 {
scheme.todo.push(*packet);
}else{
responses.push(*packet);
}
}
}
if ! responses.is_empty() {
socket.send_type(&responses).unwrap();
}
}
}
fn server_loop(scheme_mutex: Arc<Mutex<OrbitalScheme>>, display: Arc<Socket>, socket: Arc<Socket>){
loop {
{
let mut scheme = scheme_mutex.lock().unwrap();
scheme.redraw(&display);
}
let mut packets = [Packet::default(); 128];
let count = socket.receive_type(&mut packets).unwrap();
let mut responses = Vec::new();
{
let mut scheme = scheme_mutex.lock().unwrap();
for mut packet in packets[.. count].iter_mut() {
let delay = if packet.a == SYS_READ {
if let Some(window) = scheme.windows.get(&packet.b) {
window.async == false
} else {
true
}
} else {
false
};
scheme.handle(packet);
if delay && packet.a == 0 {
scheme.todo.push(*packet);
} else {
responses.push(*packet);
}
}
}
if ! responses.is_empty() {
socket.send_type(&responses).unwrap();
}
}
}
enum Status {
Starting,
Running,
Stopping
}
fn main() {
let status_mutex = Arc::new(Mutex::new(Status::Starting));
let status_daemon = status_mutex.clone();
let daemon_thread = thread::spawn(move || {
match Socket::create(":orbital").map(|socket| Arc::new(socket)) {
Ok(socket) => match Socket::open("display:").map(|display| Arc::new(display)) {
Ok(display) => {
let path = display.path().map(|path| path.into_os_string().into_string().unwrap_or(String::new())).unwrap_or(String::new());
let res = path.split(":").nth(1).unwrap_or("");
let width = res.split("/").nth(0).unwrap_or("").parse::<i32>().unwrap_or(0);
let height = res.split("/").nth(1).unwrap_or("").parse::<i32>().unwrap_or(0);
println!("orbital: found display {}x{}", width, height);
let config = Config::from_path("/etc/orbital.conf");
let scheme = Arc::new(Mutex::new(OrbitalScheme::new(width, height, &config)));
*status_daemon.lock().unwrap() = Status::Running;
let scheme_event = scheme.clone();
let display_event = display.clone();
let socket_event = socket.clone();
let server_thread = thread::spawn(move || {
server_loop(scheme, display, socket);
});
event_loop(scheme_event, display_event, socket_event);
let _ = server_thread.join();
},
Err(err) => println!("orbital: no display found: {}", err)
},
Err(err) => println!("orbital: could not register orbital: {}", err)
}
*status_daemon.lock().unwrap() = Status::Stopping;
});
'waiting: loop {
match *status_mutex.lock().unwrap() {
Status::Starting => (),
Status::Running => {
Command::new("launcher").spawn().unwrap().wait().unwrap();
break 'waiting;
},
Status::Stopping => break 'waiting,
}
thread::sleep_ms(30);
}
daemon_thread.join().unwrap();
}<|fim▁end|>
|
} else {
self.dragging = false;
}
|
<|file_name|>make_in_dir.rs<|end_file_name|><|fim▁begin|>use prelude::*;
pub fn make_inode_in_dir(fs: &mut Filesystem, dir_ino: u64,
name: &[u8], mode: Mode, attr: FileAttr) -> Result<Inode><|fim▁hole|> "Inode {} is not a directory", dir_ino)));
}
let dir_group = get_ino_group(fs, dir_ino).0;
let new_ino = match try!(alloc_inode(fs, dir_group)) {
None => return Err(Error::new(format!("No free inodes left"))),
Some(ino) => ino,
};
let mut new_inode = try!(init_inode(fs, &mut dir_inode, new_ino, mode, attr));
try!(add_dir_entry(fs, &mut dir_inode, &mut new_inode, name));
Ok(new_inode)
}
pub fn make_symlink_in_dir(fs: &mut Filesystem, dir_ino: u64,
name: &[u8], link: &[u8], attr: FileAttr) -> Result<Inode>
{
let mode = Mode {
file_type: FileType::Symlink,
suid: false, sgid: false, sticky: false,
access_rights: 0o777,
};
let mut inode = try!(make_inode_in_dir(fs, dir_ino, name, mode, attr));
try!(write_link_data(fs, &mut inode, link));
Ok(inode)
}
pub fn make_hardlink_in_dir(fs: &mut Filesystem, dir_ino: u64,
name: &[u8], link_ino: u64) -> Result<Inode>
{
let mut dir_inode = try!(get_inode(fs, dir_ino));
let mut link_inode = try!(get_inode(fs, link_ino));
if dir_inode.mode.file_type != FileType::Dir {
return Err(Error::new(format!("Inode {} is not a directory", dir_ino)));
} else if link_inode.mode.file_type == FileType::Dir {
return Err(Error::new(format!("Inode {} is a directory", link_ino)));
}
try!(add_dir_entry(fs, &mut dir_inode, &mut link_inode, name));
Ok(link_inode)
}<|fim▁end|>
|
{
let mut dir_inode = try!(get_inode(fs, dir_ino));
if dir_inode.mode.file_type != FileType::Dir {
return Err(Error::new(format!(
|
<|file_name|>DisplaySteerableBase.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> */
package boofcv.demonstrations.imageprocessing;
import boofcv.abst.distort.FDistort;
import boofcv.alg.filter.kernel.GKernelMath;
import boofcv.alg.filter.kernel.SteerableKernel;
import boofcv.alg.misc.GImageStatistics;
import boofcv.core.image.GeneralizedImageOps;
import boofcv.gui.ListDisplayPanel;
import boofcv.gui.SelectAlgorithmPanel;
import boofcv.gui.image.VisualizeImageData;
import boofcv.struct.convolve.Kernel2D;
import boofcv.struct.image.ImageGray;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
/**
* Visualizes steerable kernels.
*
* @author Peter Abeles
*/
public abstract class DisplaySteerableBase<T extends ImageGray<T>, K extends Kernel2D>
extends SelectAlgorithmPanel {
protected static int imageSize = 400;
protected static int radius = 100;
protected Class<T> imageType;
protected Class<K> kernelType;
ListDisplayPanel basisPanel = new ListDisplayPanel();
ListDisplayPanel steerPanel = new ListDisplayPanel();
T largeImg;
List<DisplayGaussianKernelApp.DerivType> order = new ArrayList<>();
protected DisplaySteerableBase( Class<T> imageType, Class<K> kernelType ) {
this.imageType = imageType;
this.kernelType = kernelType;
largeImg = GeneralizedImageOps.createSingleBand(imageType, imageSize, imageSize);
addAlgorithm("Deriv X", new DisplayGaussianKernelApp.DerivType(1, 0));
addAlgorithm("Deriv XX", new DisplayGaussianKernelApp.DerivType(2, 0));
addAlgorithm("Deriv XXX", new DisplayGaussianKernelApp.DerivType(3, 0));
addAlgorithm("Deriv XXXX", new DisplayGaussianKernelApp.DerivType(4, 0));
addAlgorithm("Deriv XY", new DisplayGaussianKernelApp.DerivType(1, 1));
addAlgorithm("Deriv XXY", new DisplayGaussianKernelApp.DerivType(2, 1));
addAlgorithm("Deriv XYY", new DisplayGaussianKernelApp.DerivType(1, 2));
addAlgorithm("Deriv XXXY", new DisplayGaussianKernelApp.DerivType(3, 1));
addAlgorithm("Deriv XXYY", new DisplayGaussianKernelApp.DerivType(2, 2));
addAlgorithm("Deriv XYYY", new DisplayGaussianKernelApp.DerivType(1, 3));
JPanel content = new JPanel(new GridLayout(0, 2));
content.add(basisPanel);
content.add(steerPanel);
setMainGUI(content);
}
protected abstract SteerableKernel<K> createKernel( int orderX, int orderY );
@Override
public void setActiveAlgorithm( String name, Object cookie ) {
DisplayGaussianKernelApp.DerivType dt = (DisplayGaussianKernelApp.DerivType)cookie;
// add basis
SteerableKernel<K> steerable = createKernel(dt.orderX, dt.orderY);
basisPanel.reset();
for (int i = 0; i < steerable.getBasisSize(); i++) {
T smallImg = GKernelMath.convertToImage(steerable.getBasis(i));
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
basisPanel.addImage(out, "Basis " + i);
}
// add steered kernels
steerPanel.reset();
for (int i = 0; i <= 20; i++) {
double angle = Math.PI*i/20.0;
K kernel = steerable.compute(angle);
T smallImg = GKernelMath.convertToImage(kernel);
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
steerPanel.addImage(out, String.format("%5d", (int)(180.0*angle/Math.PI)));
}
repaint();
}
}<|fim▁end|>
|
* See the License for the specific language governing permissions and
* limitations under the License.
|
<|file_name|>not_op.py<|end_file_name|><|fim▁begin|># encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
from jx_base.expressions import NotOp as NotOp_
from jx_elasticsearch.es52.painless._utils import Painless
from jx_elasticsearch.es52.painless.es_script import EsScript
from jx_elasticsearch.es52.painless.false_op import false_script<|fim▁hole|>from jx_elasticsearch.es52.painless.true_op import true_script
from mo_json import BOOLEAN
class NotOp(NotOp_):
def to_es_script(self, schema, not_null=False, boolean=False, many=True):
value = self.term.partial_eval(Painless).to_es_script(schema)
if value is false_script:
return true_script
elif value is true_script:
return false_script
elif value is null_script:
return null_script
return EsScript(
type=BOOLEAN, expr="!(" + value.expr + ")", frum=self, schema=schema,
)<|fim▁end|>
|
from jx_elasticsearch.es52.painless.null_op import null_script
|
<|file_name|>testconn.go<|end_file_name|><|fim▁begin|>package conn
import (
"sync"
"time"
)
type testConn struct {
rd <-chan []byte
wt chan<- []byte
rlock *sync.Mutex
wlock *sync.Mutex
readDeadline <-chan time.Time
writeDeadline <-chan time.Time
closed chan struct{}
wtCloseWait *sync.WaitGroup
}
func TestPair() (c1 Conn, c2 Conn) {
a := make(chan []byte)
b := make(chan []byte)
c1 = &testConn{
rd: a,
wt: b,
rlock: &sync.Mutex{},
wlock: &sync.Mutex{},
closed: make(chan struct{}),
wtCloseWait: &sync.WaitGroup{},
}
c2 = &testConn{
rd: b,
wt: a,
rlock: &sync.Mutex{},
wlock: &sync.Mutex{},
closed: make(chan struct{}),
wtCloseWait: &sync.WaitGroup{},
}
return c1, c2
}
func (conn *testConn) isClosed() bool {
select {
case <-conn.closed:
return true
default:
return false
}
}
func (conn *testConn) Read() (msg []byte, err error) {
if conn.isClosed() {
return nil, ErrClosed<|fim▁hole|> conn.rlock.Lock()
defer conn.rlock.Unlock()
select {
case <-conn.closed:
return nil, ErrClosed
case <-conn.readDeadline:
return nil, ErrReadTimeout
case data, ok := <-conn.rd:
if !ok {
if !conn.isClosed() {
conn.Close()
}
return data, ErrClosed
}
return data, nil
}
}
func (conn *testConn) Write(msg []byte) error {
if conn.isClosed() {
return ErrClosed
}
conn.wlock.Lock()
defer conn.wlock.Unlock()
select {
case <-conn.closed:
return ErrClosed
case <-conn.writeDeadline:
return ErrWriteTimeout
case conn.wt <- msg:
return nil
}
}
func (conn *testConn) Close() {
if !conn.isClosed() {
close(conn.closed)
conn.wlock.Lock()
close(conn.wt)
conn.wlock.Unlock()
}
}
func (conn *testConn) SetReadDeadline(t time.Time) error {
if conn.isClosed() {
return ErrClosed
}
conn.readDeadline = time.After(t.Sub(time.Now()))
return nil
}
func (conn *testConn) SetWriteDeadline(t time.Time) error {
if conn.isClosed() {
return ErrClosed
}
conn.writeDeadline = time.After(t.Sub(time.Now()))
return nil
}<|fim▁end|>
|
}
|
<|file_name|>default_settings.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
"""
this is the default settings, don't insert into your customized settings!
"""
DEBUG = True
TESTING = True
SECRET_KEY = "5L)0K%,i.;*i/s("
SECURITY_SALT = "sleiuyyao"
# DB config<|fim▁hole|>SQLALCHEMY_ECHO = True
UPLOADS_DEFAULT_DEST = 'uploads'
LOG_FILE = 'log.txt'
ERROR_LOG_RECIPIENTS = []
# Flask-Mail related configuration, refer to
# `http://pythonhosted.org/flask-mail/#configuring-flask-mail`
MAIL_SERVER = 'smtp.foo.com'
MAIL_USERNAME = 'username'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '[email protected]'
FREEZER_RELATIVE_URLS = False<|fim▁end|>
|
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
|
<|file_name|>p210_Course_Schedule_II.py<|end_file_name|><|fim▁begin|>'''
- Leetcode problem: 210
- Difficulty: Medium
- Brief problem description:
There are a total of n courses you have to take, labeled from 0 to n-1.
Some courses may have prerequisites, for example to take course 0 you have to first take course 1, which is expressed as a pair: [0,1]
Given the total number of courses and a list of prerequisite pairs, return the ordering of courses you should take to finish all courses.
There may be multiple correct orders, you just need to return one of them. If it is impossible to finish all courses, return an empty array.
Example 1:
Input: 2, [[1,0]]
Output: [0,1]
Explanation: There are a total of 2 courses to take. To take course 1 you should have finished
course 0. So the correct course order is [0,1] .
Example 2:
Input: 4, [[1,0],[2,0],[3,1],[3,2]]
Output: [0,1,2,3] or [0,2,1,3]
Explanation: There are a total of 4 courses to take. To take course 3 you should have finished both
courses 1 and 2. Both courses 1 and 2 should be taken after you finished course 0.
So one correct course order is [0,1,2,3]. Another correct ordering is [0,2,1,3] .
Note:
The input prerequisites is a graph represented by a list of edges, not adjacency matrices. Read more about how a graph is represented.
You may assume that there are no duplicate edges in the input prerequisites.
- Solution Summary:
Topological sort
- Used Resources:
--- Bo Zhou
'''
class Solution:
def findOrder(self, numCourses: int, prerequisites: List[List[int]]) -> List[int]:
dag = defaultdict(list)
in_degree = {}
for p in prerequisites:
in_degree[p[0]] = in_degree.get(p[0], 0) + 1
dag[p[1]].append(p[0])
zero_dq = deque()
for i in range(numCourses):
if not in_degree.get(i):<|fim▁hole|> zero_dq.append(i)
ordered_course = []
while zero_dq:
course = zero_dq.popleft()
ordered_course.append(course)
nb = dag.get(course, [])
for c in nb:
in_degree[c] = in_degree.get(c) - 1
if in_degree[c] == 0:
zero_dq.append(c)
if len(ordered_course) == numCourses:
return ordered_course
else:
return []<|fim▁end|>
| |
<|file_name|>ContainerList.react.js<|end_file_name|><|fim▁begin|>import React from 'react/addons';
import ContainerListItem from './ContainerListItem.react';
var ContainerList = React.createClass({
componentWillMount: function () {
this.start = Date.now();
},
render: function () {
var containers = this.props.containers.map(container => {
return (
<ContainerListItem key={container.Id} container={container} start={this.start} />
);
});
return (
<ul>
{containers}
</ul>
);
}<|fim▁hole|><|fim▁end|>
|
});
module.exports = ContainerList;
|
<|file_name|>fix_rigid.cpp<|end_file_name|><|fim▁begin|>/* ----------------------------------------------------------------------
LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
http://lammps.sandia.gov, Sandia National Laboratories
Steve Plimpton, [email protected]
Copyright (2003) Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
certain rights in this software. This software is distributed under
the GNU General Public License.
See the README file in the top-level LAMMPS directory.
------------------------------------------------------------------------- */
#include "fix_rigid.h"
#include <mpi.h>
#include <cmath>
#include <cstdlib>
#include <cstring>
#include "math_extra.h"
#include "atom.h"
#include "atom_vec_ellipsoid.h"
#include "atom_vec_line.h"
#include "atom_vec_tri.h"
#include "domain.h"
#include "update.h"
#include "respa.h"
#include "modify.h"
#include "group.h"
#include "comm.h"
#include "random_mars.h"
#include "force.h"
#include "input.h"
#include "variable.h"
#include "math_const.h"
#include "memory.h"
#include "error.h"
#include "rigid_const.h"
using namespace LAMMPS_NS;
using namespace FixConst;
using namespace MathConst;
using namespace RigidConst;
/* ---------------------------------------------------------------------- */
FixRigid::FixRigid(LAMMPS *lmp, int narg, char **arg) :
Fix(lmp, narg, arg), step_respa(NULL),
inpfile(NULL), nrigid(NULL), mol2body(NULL), body2mol(NULL),
body(NULL), displace(NULL), masstotal(NULL), xcm(NULL),
vcm(NULL), fcm(NULL), inertia(NULL), ex_space(NULL),
ey_space(NULL), ez_space(NULL), angmom(NULL), omega(NULL),
torque(NULL), quat(NULL), imagebody(NULL), fflag(NULL),
tflag(NULL), langextra(NULL), sum(NULL), all(NULL),
remapflag(NULL), xcmimage(NULL), eflags(NULL), orient(NULL),
dorient(NULL), id_dilate(NULL), id_gravity(NULL), random(NULL),
avec_ellipsoid(NULL), avec_line(NULL), avec_tri(NULL)
{
int i,ibody;
scalar_flag = 1;
extscalar = 0;
time_integrate = 1;
rigid_flag = 1;
virial_flag = 1;
thermo_virial = 1;
create_attribute = 1;
dof_flag = 1;
enforce2d_flag = 1;
MPI_Comm_rank(world,&me);
MPI_Comm_size(world,&nprocs);
// perform initial allocation of atom-based arrays
// register with Atom class
extended = orientflag = dorientflag = 0;
body = NULL;
xcmimage = NULL;
displace = NULL;
eflags = NULL;
orient = NULL;
dorient = NULL;
grow_arrays(atom->nmax);
atom->add_callback(0);
// parse args for rigid body specification
// set nbody and body[i] for each atom
if (narg < 4) error->all(FLERR,"Illegal fix rigid command");
int iarg;
mol2body = NULL;
body2mol = NULL;
// single rigid body
// nbody = 1
// all atoms in fix group are part of body
if (strcmp(arg[3],"single") == 0) {
rstyle = SINGLE;
iarg = 4;
nbody = 1;
int *mask = atom->mask;
int nlocal = atom->nlocal;
for (i = 0; i < nlocal; i++) {
body[i] = -1;
if (mask[i] & groupbit) body[i] = 0;
}
// each molecule in fix group is a rigid body
// maxmol = largest molecule ID
// ncount = # of atoms in each molecule (have to sum across procs)
// nbody = # of non-zero ncount values
// use nall as incremented ptr to set body[] values for each atom
} else if (strcmp(arg[3],"molecule") == 0 || strcmp(arg[3],"custom") == 0) {
rstyle = MOLECULE;
tagint *molecule;
int *mask = atom->mask;
int nlocal = atom->nlocal;
int custom_flag = strcmp(arg[3],"custom") == 0;
if (custom_flag) {
if (narg < 5) error->all(FLERR,"Illegal fix rigid command");
// determine whether atom-style variable or atom property is used
if (strstr(arg[4],"i_") == arg[4]) {
int is_double=0;
int custom_index = atom->find_custom(arg[4]+2,is_double);
if (custom_index == -1)
error->all(FLERR,"Fix rigid custom requires "
"previously defined property/atom");
else if (is_double)
error->all(FLERR,"Fix rigid custom requires "
"integer-valued property/atom");
int minval = INT_MAX;
int *value = atom->ivector[custom_index];
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit) minval = MIN(minval,value[i]);
int vmin = minval;
MPI_Allreduce(&vmin,&minval,1,MPI_INT,MPI_MIN,world);
molecule = new tagint[nlocal];
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit)
molecule[i] = (tagint)(value[i] - minval + 1);
else
molecule[i] = 0;
} else if (strstr(arg[4],"v_") == arg[4]) {
int ivariable = input->variable->find(arg[4]+2);
if (ivariable < 0)
error->all(FLERR,"Variable name for fix rigid custom does not exist");
if (input->variable->atomstyle(ivariable) == 0)
error->all(FLERR,"Fix rigid custom variable is no atom-style variable");
double *value = new double[nlocal];
input->variable->compute_atom(ivariable,0,value,1,0);
int minval = INT_MAX;
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit) minval = MIN(minval,(int)value[i]);
int vmin = minval;
MPI_Allreduce(&vmin,&minval,1,MPI_INT,MPI_MIN,world);
molecule = new tagint[nlocal];
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit)
molecule[i] = (tagint)((tagint)value[i] - minval + 1);
delete[] value;
} else error->all(FLERR,"Unsupported fix rigid custom property");
} else {
if (atom->molecule_flag == 0)
error->all(FLERR,"Fix rigid molecule requires atom attribute molecule");
molecule = atom->molecule;
}
iarg = 4 + custom_flag;
tagint maxmol_tag = -1;
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit) maxmol_tag = MAX(maxmol_tag,molecule[i]);
tagint itmp;
MPI_Allreduce(&maxmol_tag,&itmp,1,MPI_LMP_TAGINT,MPI_MAX,world);
if (itmp+1 > MAXSMALLINT)
error->all(FLERR,"Too many molecules for fix rigid");
maxmol = (int) itmp;
int *ncount;
memory->create(ncount,maxmol+1,"rigid:ncount");
for (i = 0; i <= maxmol; i++) ncount[i] = 0;
for (i = 0; i < nlocal; i++)
if (mask[i] & groupbit) ncount[molecule[i]]++;
memory->create(mol2body,maxmol+1,"rigid:mol2body");
MPI_Allreduce(ncount,mol2body,maxmol+1,MPI_INT,MPI_SUM,world);
nbody = 0;
for (i = 0; i <= maxmol; i++)
if (mol2body[i]) mol2body[i] = nbody++;
else mol2body[i] = -1;
memory->create(body2mol,nbody,"rigid:body2mol");
nbody = 0;
for (i = 0; i <= maxmol; i++)
if (mol2body[i] >= 0) body2mol[nbody++] = i;
for (i = 0; i < nlocal; i++) {
body[i] = -1;
if (mask[i] & groupbit) body[i] = mol2body[molecule[i]];
}
memory->destroy(ncount);
if (custom_flag) delete [] molecule;
// each listed group is a rigid body
// check if all listed groups exist
// an atom must belong to fix group and listed group to be in rigid body
// error if atom belongs to more than 1 rigid body
} else if (strcmp(arg[3],"group") == 0) {
if (narg < 5) error->all(FLERR,"Illegal fix rigid command");
rstyle = GROUP;
nbody = force->inumeric(FLERR,arg[4]);
if (nbody <= 0) error->all(FLERR,"Illegal fix rigid command");
if (narg < 5+nbody) error->all(FLERR,"Illegal fix rigid command");
iarg = 5+nbody;
int *igroups = new int[nbody];
for (ibody = 0; ibody < nbody; ibody++) {
igroups[ibody] = group->find(arg[5+ibody]);
if (igroups[ibody] == -1)
error->all(FLERR,"Could not find fix rigid group ID");
}
int *mask = atom->mask;
int nlocal = atom->nlocal;
int flag = 0;
for (i = 0; i < nlocal; i++) {
body[i] = -1;
if (mask[i] & groupbit)
for (ibody = 0; ibody < nbody; ibody++)
if (mask[i] & group->bitmask[igroups[ibody]]) {
if (body[i] >= 0) flag = 1;
body[i] = ibody;
}
}
int flagall;
MPI_Allreduce(&flag,&flagall,1,MPI_INT,MPI_SUM,world);
if (flagall)
error->all(FLERR,"One or more atoms belong to multiple rigid bodies");
delete [] igroups;
} else error->all(FLERR,"Illegal fix rigid command");
// error check on nbody
if (nbody == 0) error->all(FLERR,"No rigid bodies defined");
// create all nbody-length arrays
memory->create(nrigid,nbody,"rigid:nrigid");
memory->create(masstotal,nbody,"rigid:masstotal");
memory->create(xcm,nbody,3,"rigid:xcm");
memory->create(vcm,nbody,3,"rigid:vcm");
memory->create(fcm,nbody,3,"rigid:fcm");
memory->create(inertia,nbody,3,"rigid:inertia");
memory->create(ex_space,nbody,3,"rigid:ex_space");
memory->create(ey_space,nbody,3,"rigid:ey_space");
memory->create(ez_space,nbody,3,"rigid:ez_space");
memory->create(angmom,nbody,3,"rigid:angmom");
memory->create(omega,nbody,3,"rigid:omega");
memory->create(torque,nbody,3,"rigid:torque");
memory->create(quat,nbody,4,"rigid:quat");
memory->create(imagebody,nbody,"rigid:imagebody");
memory->create(fflag,nbody,3,"rigid:fflag");
memory->create(tflag,nbody,3,"rigid:tflag");
memory->create(langextra,nbody,6,"rigid:langextra");
memory->create(sum,nbody,6,"rigid:sum");
memory->create(all,nbody,6,"rigid:all");
memory->create(remapflag,nbody,4,"rigid:remapflag");
// initialize force/torque flags to default = 1.0
// for 2d: fz, tx, ty = 0.0
array_flag = 1;
size_array_rows = nbody;
size_array_cols = 15;
global_freq = 1;
extarray = 0;
for (i = 0; i < nbody; i++) {
fflag[i][0] = fflag[i][1] = fflag[i][2] = 1.0;
tflag[i][0] = tflag[i][1] = tflag[i][2] = 1.0;
if (domain->dimension == 2) fflag[i][2] = tflag[i][0] = tflag[i][1] = 0.0;
}
// number of linear rigid bodies is counted later
nlinear = 0;
// parse optional args
int seed;
langflag = 0;
reinitflag = 1;
tstat_flag = 0;
pstat_flag = 0;
allremap = 1;
t_chain = 10;
t_iter = 1;
t_order = 3;
p_chain = 10;
inpfile = NULL;
id_gravity = NULL;
id_dilate = NULL;
pcouple = NONE;
pstyle = ANISO;
dimension = domain->dimension;
for (int i = 0; i < 3; i++) {
p_start[i] = p_stop[i] = p_period[i] = 0.0;
p_flag[i] = 0;
}
while (iarg < narg) {
if (strcmp(arg[iarg],"force") == 0) {
if (iarg+5 > narg) error->all(FLERR,"Illegal fix rigid command");
int mlo,mhi;
force->bounds(FLERR,arg[iarg+1],nbody,mlo,mhi);
double xflag,yflag,zflag;
if (strcmp(arg[iarg+2],"off") == 0) xflag = 0.0;
else if (strcmp(arg[iarg+2],"on") == 0) xflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (strcmp(arg[iarg+3],"off") == 0) yflag = 0.0;
else if (strcmp(arg[iarg+3],"on") == 0) yflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (strcmp(arg[iarg+4],"off") == 0) zflag = 0.0;
else if (strcmp(arg[iarg+4],"on") == 0) zflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (domain->dimension == 2 && zflag == 1.0)
error->all(FLERR,"Fix rigid z force cannot be on for 2d simulation");
int count = 0;
for (int m = mlo; m <= mhi; m++) {
fflag[m-1][0] = xflag;
fflag[m-1][1] = yflag;
fflag[m-1][2] = zflag;
count++;
}
if (count == 0) error->all(FLERR,"Illegal fix rigid command");
iarg += 5;
} else if (strcmp(arg[iarg],"torque") == 0) {
if (iarg+5 > narg) error->all(FLERR,"Illegal fix rigid command");
int mlo,mhi;
force->bounds(FLERR,arg[iarg+1],nbody,mlo,mhi);
double xflag,yflag,zflag;
if (strcmp(arg[iarg+2],"off") == 0) xflag = 0.0;
else if (strcmp(arg[iarg+2],"on") == 0) xflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (strcmp(arg[iarg+3],"off") == 0) yflag = 0.0;
else if (strcmp(arg[iarg+3],"on") == 0) yflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (strcmp(arg[iarg+4],"off") == 0) zflag = 0.0;
else if (strcmp(arg[iarg+4],"on") == 0) zflag = 1.0;
else error->all(FLERR,"Illegal fix rigid command");
if (domain->dimension == 2 && (xflag == 1.0 || yflag == 1.0))
error->all(FLERR,"Fix rigid xy torque cannot be on for 2d simulation");
int count = 0;
for (int m = mlo; m <= mhi; m++) {
tflag[m-1][0] = xflag;
tflag[m-1][1] = yflag;
tflag[m-1][2] = zflag;
count++;
}
if (count == 0) error->all(FLERR,"Illegal fix rigid command");
iarg += 5;
} else if (strcmp(arg[iarg],"langevin") == 0) {
if (iarg+5 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid") != 0 && strcmp(style,"rigid/nve") != 0 &&
strcmp(style,"rigid/omp") != 0 && strcmp(style,"rigid/nve/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
langflag = 1;
t_start = force->numeric(FLERR,arg[iarg+1]);
t_stop = force->numeric(FLERR,arg[iarg+2]);
t_period = force->numeric(FLERR,arg[iarg+3]);
seed = force->inumeric(FLERR,arg[iarg+4]);
if (t_period <= 0.0)
error->all(FLERR,"Fix rigid langevin period must be > 0.0");
if (seed <= 0) error->all(FLERR,"Illegal fix rigid command");
iarg += 5;
} else if (strcmp(arg[iarg],"temp") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/nvt") != 0 && strcmp(style,"rigid/npt") != 0 &&
strcmp(style,"rigid/nvt/omp") != 0 &&
strcmp(style,"rigid/npt/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
tstat_flag = 1;
t_start = force->numeric(FLERR,arg[iarg+1]);
t_stop = force->numeric(FLERR,arg[iarg+2]);
t_period = force->numeric(FLERR,arg[iarg+3]);
iarg += 4;
} else if (strcmp(arg[iarg],"iso") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
pcouple = XYZ;
p_start[0] = p_start[1] = p_start[2] = force->numeric(FLERR,arg[iarg+1]);
p_stop[0] = p_stop[1] = p_stop[2] = force->numeric(FLERR,arg[iarg+2]);
p_period[0] = p_period[1] = p_period[2] =
force->numeric(FLERR,arg[iarg+3]);
p_flag[0] = p_flag[1] = p_flag[2] = 1;
if (dimension == 2) {
p_start[2] = p_stop[2] = p_period[2] = 0.0;
p_flag[2] = 0;
}
iarg += 4;
} else if (strcmp(arg[iarg],"aniso") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
p_start[0] = p_start[1] = p_start[2] = force->numeric(FLERR,arg[iarg+1]);
p_stop[0] = p_stop[1] = p_stop[2] = force->numeric(FLERR,arg[iarg+2]);
p_period[0] = p_period[1] = p_period[2] =
force->numeric(FLERR,arg[iarg+3]);
p_flag[0] = p_flag[1] = p_flag[2] = 1;
if (dimension == 2) {
p_start[2] = p_stop[2] = p_period[2] = 0.0;
p_flag[2] = 0;
}
iarg += 4;
} else if (strcmp(arg[iarg],"x") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
p_start[0] = force->numeric(FLERR,arg[iarg+1]);
p_stop[0] = force->numeric(FLERR,arg[iarg+2]);
p_period[0] = force->numeric(FLERR,arg[iarg+3]);
p_flag[0] = 1;
iarg += 4;
} else if (strcmp(arg[iarg],"y") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
p_start[1] = force->numeric(FLERR,arg[iarg+1]);
p_stop[1] = force->numeric(FLERR,arg[iarg+2]);
p_period[1] = force->numeric(FLERR,arg[iarg+3]);
p_flag[1] = 1;
iarg += 4;
} else if (strcmp(arg[iarg],"z") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
p_start[2] = force->numeric(FLERR,arg[iarg+1]);
p_stop[2] = force->numeric(FLERR,arg[iarg+2]);
p_period[2] = force->numeric(FLERR,arg[iarg+3]);
p_flag[2] = 1;
iarg += 4;
} else if (strcmp(arg[iarg],"couple") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(arg[iarg+1],"xyz") == 0) pcouple = XYZ;
else if (strcmp(arg[iarg+1],"xy") == 0) pcouple = XY;
else if (strcmp(arg[iarg+1],"yz") == 0) pcouple = YZ;
else if (strcmp(arg[iarg+1],"xz") == 0) pcouple = XZ;
else if (strcmp(arg[iarg+1],"none") == 0) pcouple = NONE;
else error->all(FLERR,"Illegal fix rigid command");
iarg += 2;
} else if (strcmp(arg[iarg],"dilate") == 0) {
if (iarg+2 > narg)
error->all(FLERR,"Illegal fix rigid npt/nph command");
if (strcmp(arg[iarg+1],"all") == 0) allremap = 1;
else {
allremap = 0;
delete [] id_dilate;
int n = strlen(arg[iarg+1]) + 1;
id_dilate = new char[n];
strcpy(id_dilate,arg[iarg+1]);
int idilate = group->find(id_dilate);
if (idilate == -1)
error->all(FLERR,
"Fix rigid npt/nph dilate group ID does not exist");
}
iarg += 2;
} else if (strcmp(arg[iarg],"tparam") == 0) {
if (iarg+4 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/nvt") != 0 && strcmp(style,"rigid/npt") != 0 &&
strcmp(style,"rigid/nvt/omp") != 0 &&
strcmp(style,"rigid/npt/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
t_chain = force->inumeric(FLERR,arg[iarg+1]);
t_iter = force->inumeric(FLERR,arg[iarg+2]);
t_order = force->inumeric(FLERR,arg[iarg+3]);
iarg += 4;
} else if (strcmp(arg[iarg],"pchain") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp(style,"rigid/npt") != 0 && strcmp(style,"rigid/nph") != 0 &&
strcmp(style,"rigid/npt/omp") != 0 &&
strcmp(style,"rigid/nph/omp") != 0)
error->all(FLERR,"Illegal fix rigid command");
p_chain = force->inumeric(FLERR,arg[iarg+1]);
iarg += 2;
} else if (strcmp(arg[iarg],"infile") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix rigid command");
delete [] inpfile;
int n = strlen(arg[iarg+1]) + 1;
inpfile = new char[n];
strcpy(inpfile,arg[iarg+1]);
restart_file = 1;
reinitflag = 0;
iarg += 2;
} else if (strcmp(arg[iarg],"reinit") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix rigid command");
if (strcmp("yes",arg[iarg+1]) == 0) reinitflag = 1;
else if (strcmp("no",arg[iarg+1]) == 0) reinitflag = 0;
else error->all(FLERR,"Illegal fix rigid command");
iarg += 2;
} else if (strcmp(arg[iarg],"gravity") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix rigid command");
delete [] id_gravity;
int n = strlen(arg[iarg+1]) + 1;
id_gravity = new char[n];
strcpy(id_gravity,arg[iarg+1]);
iarg += 2;
} else error->all(FLERR,"Illegal fix rigid command");
}
// set pstat_flag
pstat_flag = 0;
for (int i = 0; i < 3; i++)
if (p_flag[i]) pstat_flag = 1;
if (pcouple == XYZ || (dimension == 2 && pcouple == XY)) pstyle = ISO;
else pstyle = ANISO;
// initialize Marsaglia RNG with processor-unique seed
if (langflag) random = new RanMars(lmp,seed + me);
else random = NULL;
// initialize vector output quantities in case accessed before run
for (i = 0; i < nbody; i++) {
xcm[i][0] = xcm[i][1] = xcm[i][2] = 0.0;
vcm[i][0] = vcm[i][1] = vcm[i][2] = 0.0;
fcm[i][0] = fcm[i][1] = fcm[i][2] = 0.0;
torque[i][0] = torque[i][1] = torque[i][2] = 0.0;
}
// nrigid[n] = # of atoms in Nth rigid body
// error if one or zero atoms
int *ncount = new int[nbody];
for (ibody = 0; ibody < nbody; ibody++) ncount[ibody] = 0;
int nlocal = atom->nlocal;
for (i = 0; i < nlocal; i++)
if (body[i] >= 0) ncount[body[i]]++;
MPI_Allreduce(ncount,nrigid,nbody,MPI_INT,MPI_SUM,world);
delete [] ncount;
for (ibody = 0; ibody < nbody; ibody++)
if (nrigid[ibody] <= 1) error->all(FLERR,"One or zero atoms in rigid body");
// wait to setup bodies until first init() using current atom properties
setupflag = 0;
// compute per body forces and torques at final_integrate() by default
earlyflag = 0;
// print statistics
int nsum = 0;
for (ibody = 0; ibody < nbody; ibody++) nsum += nrigid[ibody];
if (me == 0) {
if (screen) fprintf(screen,"%d rigid bodies with %d atoms\n",nbody,nsum);
if (logfile) fprintf(logfile,"%d rigid bodies with %d atoms\n",nbody,nsum);
}
}
/* ---------------------------------------------------------------------- */
FixRigid::~FixRigid()
{
// unregister callbacks to this fix from Atom class
atom->delete_callback(id,0);
delete random;
delete [] inpfile;
delete [] id_dilate;
delete [] id_gravity;
memory->destroy(mol2body);
memory->destroy(body2mol);
// delete locally stored per-atom arrays
memory->destroy(body);
memory->destroy(xcmimage);
memory->destroy(displace);
memory->destroy(eflags);
memory->destroy(orient);
memory->destroy(dorient);
// delete nbody-length arrays
memory->destroy(nrigid);
memory->destroy(masstotal);
memory->destroy(xcm);
memory->destroy(vcm);
memory->destroy(fcm);
memory->destroy(inertia);
memory->destroy(ex_space);
memory->destroy(ey_space);
memory->destroy(ez_space);
memory->destroy(angmom);
memory->destroy(omega);
memory->destroy(torque);
memory->destroy(quat);
memory->destroy(imagebody);
memory->destroy(fflag);
memory->destroy(tflag);
memory->destroy(langextra);
memory->destroy(sum);
memory->destroy(all);
memory->destroy(remapflag);
}
/* ---------------------------------------------------------------------- */
int FixRigid::setmask()
{
int mask = 0;
mask |= INITIAL_INTEGRATE;
mask |= FINAL_INTEGRATE;
if (langflag) mask |= POST_FORCE;
mask |= PRE_NEIGHBOR;
mask |= INITIAL_INTEGRATE_RESPA;
mask |= FINAL_INTEGRATE_RESPA;
return mask;
}
/* ---------------------------------------------------------------------- */
void FixRigid::init()
{
int i,ibody;
triclinic = domain->triclinic;
// atom style pointers to particles that store extra info
avec_ellipsoid = (AtomVecEllipsoid *) atom->style_match("ellipsoid");
avec_line = (AtomVecLine *) atom->style_match("line");
avec_tri = (AtomVecTri *) atom->style_match("tri");
// warn if more than one rigid fix
// if earlyflag, warn if any post-force fixes come after a rigid fix
int count = 0;
for (i = 0; i < modify->nfix; i++)
if (modify->fix[i]->rigid_flag) count++;
if (count > 1 && me == 0) error->warning(FLERR,"More than one fix rigid");
if (earlyflag) {
int rflag = 0;
for (i = 0; i < modify->nfix; i++) {
if (modify->fix[i]->rigid_flag) rflag = 1;
if (rflag && (modify->fmask[i] & POST_FORCE) &&
!modify->fix[i]->rigid_flag) {
char str[128];
snprintf(str,128,"Fix %s alters forces after fix rigid",
modify->fix[i]->id);
error->warning(FLERR,str);
}
}
}<|fim▁hole|>
// warn if body properties are read from inpfile
// and the gravity keyword is not set and a gravity fix exists
// this could mean body particles are overlapped
// and gravity is not applied correctly
if (inpfile && !id_gravity) {
for (i = 0; i < modify->nfix; i++) {
if (strcmp(modify->fix[i]->style,"gravity") == 0) {
if (comm->me == 0)
error->warning(FLERR,"Gravity may not be correctly applied "
"to rigid bodies if they consist of "
"overlapped particles");
break;
}
}
}
// error if npt,nph fix comes before rigid fix
for (i = 0; i < modify->nfix; i++) {
if (strcmp(modify->fix[i]->style,"npt") == 0) break;
if (strcmp(modify->fix[i]->style,"nph") == 0) break;
}
if (i < modify->nfix) {
for (int j = i; j < modify->nfix; j++)
if (strcmp(modify->fix[j]->style,"rigid") == 0)
error->all(FLERR,"Rigid fix must come before NPT/NPH fix");
}
// add gravity forces based on gravity vector from fix
if (id_gravity) {
int ifix = modify->find_fix(id_gravity);
if (ifix < 0) error->all(FLERR,"Fix rigid cannot find fix gravity ID");
if (strcmp(modify->fix[ifix]->style,"gravity") != 0)
error->all(FLERR,"Fix rigid gravity fix is invalid");
int tmp;
gvec = (double *) modify->fix[ifix]->extract("gvec",tmp);
}
// timestep info
dtv = update->dt;
dtf = 0.5 * update->dt * force->ftm2v;
dtq = 0.5 * update->dt;
if (strstr(update->integrate_style,"respa"))
step_respa = ((Respa *) update->integrate)->step;
// setup rigid bodies, using current atom info. if reinitflag is not set,
// do the initialization only once, b/c properties may not be re-computable
// especially if overlapping particles.
// do not do dynamic init if read body properties from inpfile.
// this is b/c the inpfile defines the static and dynamic properties and may
// not be computable if contain overlapping particles.
// setup_bodies_static() reads inpfile itself
if (reinitflag || !setupflag) {
setup_bodies_static();
if (!inpfile) setup_bodies_dynamic();
setupflag = 1;
}
// temperature scale factor
double ndof = 0.0;
for (ibody = 0; ibody < nbody; ibody++) {
ndof += fflag[ibody][0] + fflag[ibody][1] + fflag[ibody][2];
ndof += tflag[ibody][0] + tflag[ibody][1] + tflag[ibody][2];
}
ndof -= nlinear;
if (ndof > 0.0) tfactor = force->mvv2e / (ndof * force->boltz);
else tfactor = 0.0;
}
/* ----------------------------------------------------------------------
invoke pre_neighbor() to insure body xcmimage flags are reset
needed if Verlet::setup::pbc() has remapped/migrated atoms for 2nd run
------------------------------------------------------------------------- */
void FixRigid::setup_pre_neighbor()
{
pre_neighbor();
}
/* ----------------------------------------------------------------------
compute initial fcm and torque on bodies, also initial virial
reset all particle velocities to be consistent with vcm and omega
------------------------------------------------------------------------- */
void FixRigid::setup(int vflag)
{
int i,n,ibody;
// fcm = force on center-of-mass of each rigid body
double **f = atom->f;
int nlocal = atom->nlocal;
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
sum[ibody][0] += f[i][0];
sum[ibody][1] += f[i][1];
sum[ibody][2] += f[i][2];
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
for (ibody = 0; ibody < nbody; ibody++) {
fcm[ibody][0] = all[ibody][0];
fcm[ibody][1] = all[ibody][1];
fcm[ibody][2] = all[ibody][2];
}
// torque = torque on each rigid body
double **x = atom->x;
double dx,dy,dz;
double unwrap[3];
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
domain->unmap(x[i],xcmimage[i],unwrap);
dx = unwrap[0] - xcm[ibody][0];
dy = unwrap[1] - xcm[ibody][1];
dz = unwrap[2] - xcm[ibody][2];
sum[ibody][0] += dy * f[i][2] - dz * f[i][1];
sum[ibody][1] += dz * f[i][0] - dx * f[i][2];
sum[ibody][2] += dx * f[i][1] - dy * f[i][0];
}
// extended particles add their torque to torque of body
if (extended) {
double **torque_one = atom->torque;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (eflags[i] & TORQUE) {
sum[ibody][0] += torque_one[i][0];
sum[ibody][1] += torque_one[i][1];
sum[ibody][2] += torque_one[i][2];
}
}
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
for (ibody = 0; ibody < nbody; ibody++) {
torque[ibody][0] = all[ibody][0];
torque[ibody][1] = all[ibody][1];
torque[ibody][2] = all[ibody][2];
}
// zero langextra in case Langevin thermostat not used
// no point to calling post_force() here since langextra
// is only added to fcm/torque in final_integrate()
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) langextra[ibody][i] = 0.0;
// virial setup before call to set_v
if (vflag) v_setup(vflag);
else evflag = 0;
// set velocities from angmom & omega
for (ibody = 0; ibody < nbody; ibody++)
MathExtra::angmom_to_omega(angmom[ibody],ex_space[ibody],ey_space[ibody],
ez_space[ibody],inertia[ibody],omega[ibody]);
set_v();
// guesstimate virial as 2x the set_v contribution
if (vflag_global)
for (n = 0; n < 6; n++) virial[n] *= 2.0;
if (vflag_atom) {
for (i = 0; i < nlocal; i++)
for (n = 0; n < 6; n++)
vatom[i][n] *= 2.0;
}
}
/* ---------------------------------------------------------------------- */
void FixRigid::initial_integrate(int vflag)
{
double dtfm;
for (int ibody = 0; ibody < nbody; ibody++) {
// update vcm by 1/2 step
dtfm = dtf / masstotal[ibody];
vcm[ibody][0] += dtfm * fcm[ibody][0] * fflag[ibody][0];
vcm[ibody][1] += dtfm * fcm[ibody][1] * fflag[ibody][1];
vcm[ibody][2] += dtfm * fcm[ibody][2] * fflag[ibody][2];
// update xcm by full step
xcm[ibody][0] += dtv * vcm[ibody][0];
xcm[ibody][1] += dtv * vcm[ibody][1];
xcm[ibody][2] += dtv * vcm[ibody][2];
// update angular momentum by 1/2 step
angmom[ibody][0] += dtf * torque[ibody][0] * tflag[ibody][0];
angmom[ibody][1] += dtf * torque[ibody][1] * tflag[ibody][1];
angmom[ibody][2] += dtf * torque[ibody][2] * tflag[ibody][2];
// compute omega at 1/2 step from angmom at 1/2 step and current q
// update quaternion a full step via Richardson iteration
// returns new normalized quaternion, also updated omega at 1/2 step
// update ex,ey,ez to reflect new quaternion
MathExtra::angmom_to_omega(angmom[ibody],ex_space[ibody],ey_space[ibody],
ez_space[ibody],inertia[ibody],omega[ibody]);
MathExtra::richardson(quat[ibody],angmom[ibody],omega[ibody],
inertia[ibody],dtq);
MathExtra::q_to_exyz(quat[ibody],
ex_space[ibody],ey_space[ibody],ez_space[ibody]);
}
// virial setup before call to set_xv
if (vflag) v_setup(vflag);
else evflag = 0;
// set coords/orient and velocity/rotation of atoms in rigid bodies
// from quarternion and omega
set_xv();
}
/* ----------------------------------------------------------------------
apply Langevin thermostat to all 6 DOF of rigid bodies
computed by proc 0, broadcast to other procs
unlike fix langevin, this stores extra force in extra arrays,
which are added in when final_integrate() calculates a new fcm/torque
------------------------------------------------------------------------- */
void FixRigid::apply_langevin_thermostat()
{
if (me == 0) {
double gamma1,gamma2;
double delta = update->ntimestep - update->beginstep;
if (delta != 0.0) delta /= update->endstep - update->beginstep;
t_target = t_start + delta * (t_stop-t_start);
double tsqrt = sqrt(t_target);
double boltz = force->boltz;
double dt = update->dt;
double mvv2e = force->mvv2e;
double ftm2v = force->ftm2v;
for (int i = 0; i < nbody; i++) {
gamma1 = -masstotal[i] / t_period / ftm2v;
gamma2 = sqrt(masstotal[i]) * tsqrt *
sqrt(24.0*boltz/t_period/dt/mvv2e) / ftm2v;
langextra[i][0] = gamma1*vcm[i][0] + gamma2*(random->uniform()-0.5);
langextra[i][1] = gamma1*vcm[i][1] + gamma2*(random->uniform()-0.5);
langextra[i][2] = gamma1*vcm[i][2] + gamma2*(random->uniform()-0.5);
gamma1 = -1.0 / t_period / ftm2v;
gamma2 = tsqrt * sqrt(24.0*boltz/t_period/dt/mvv2e) / ftm2v;
langextra[i][3] = inertia[i][0]*gamma1*omega[i][0] +
sqrt(inertia[i][0])*gamma2*(random->uniform()-0.5);
langextra[i][4] = inertia[i][1]*gamma1*omega[i][1] +
sqrt(inertia[i][1])*gamma2*(random->uniform()-0.5);
langextra[i][5] = inertia[i][2]*gamma1*omega[i][2] +
sqrt(inertia[i][2])*gamma2*(random->uniform()-0.5);
}
}
MPI_Bcast(&langextra[0][0],6*nbody,MPI_DOUBLE,0,world);
}
/* ----------------------------------------------------------------------
called from FixEnforce2d post_force() for 2d problems
zero all body values that should be zero for 2d model
------------------------------------------------------------------------- */
void FixRigid::enforce2d()
{
for (int ibody = 0; ibody < nbody; ibody++) {
xcm[ibody][2] = 0.0;
vcm[ibody][2] = 0.0;
fcm[ibody][2] = 0.0;
torque[ibody][0] = 0.0;
torque[ibody][1] = 0.0;
angmom[ibody][0] = 0.0;
angmom[ibody][1] = 0.0;
omega[ibody][0] = 0.0;
omega[ibody][1] = 0.0;
if (langflag && langextra) {
langextra[ibody][2] = 0.0;
langextra[ibody][3] = 0.0;
langextra[ibody][4] = 0.0;
}
}
}
/* ---------------------------------------------------------------------- */
void FixRigid::compute_forces_and_torques()
{
int i,ibody;
// sum over atoms to get force and torque on rigid body
double **x = atom->x;
double **f = atom->f;
int nlocal = atom->nlocal;
double dx,dy,dz;
double unwrap[3];
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
sum[ibody][0] += f[i][0];
sum[ibody][1] += f[i][1];
sum[ibody][2] += f[i][2];
domain->unmap(x[i],xcmimage[i],unwrap);
dx = unwrap[0] - xcm[ibody][0];
dy = unwrap[1] - xcm[ibody][1];
dz = unwrap[2] - xcm[ibody][2];
sum[ibody][3] += dy*f[i][2] - dz*f[i][1];
sum[ibody][4] += dz*f[i][0] - dx*f[i][2];
sum[ibody][5] += dx*f[i][1] - dy*f[i][0];
}
// extended particles add their torque to torque of body
if (extended) {
double **torque_one = atom->torque;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (eflags[i] & TORQUE) {
sum[ibody][3] += torque_one[i][0];
sum[ibody][4] += torque_one[i][1];
sum[ibody][5] += torque_one[i][2];
}
}
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
// include Langevin thermostat forces
for (ibody = 0; ibody < nbody; ibody++) {
fcm[ibody][0] = all[ibody][0] + langextra[ibody][0];
fcm[ibody][1] = all[ibody][1] + langextra[ibody][1];
fcm[ibody][2] = all[ibody][2] + langextra[ibody][2];
torque[ibody][0] = all[ibody][3] + langextra[ibody][3];
torque[ibody][1] = all[ibody][4] + langextra[ibody][4];
torque[ibody][2] = all[ibody][5] + langextra[ibody][5];
}
// add gravity force to COM of each body
if (id_gravity) {
for (ibody = 0; ibody < nbody; ibody++) {
fcm[ibody][0] += gvec[0]*masstotal[ibody];
fcm[ibody][1] += gvec[1]*masstotal[ibody];
fcm[ibody][2] += gvec[2]*masstotal[ibody];
}
}
}
/* ---------------------------------------------------------------------- */
void FixRigid::post_force(int /*vflag*/)
{
if (langflag) apply_langevin_thermostat();
if (earlyflag) compute_forces_and_torques();
}
/* ---------------------------------------------------------------------- */
void FixRigid::final_integrate()
{
int ibody;
double dtfm;
if (!earlyflag) compute_forces_and_torques();
// update vcm and angmom
// fflag,tflag = 0 for some dimensions in 2d
for (ibody = 0; ibody < nbody; ibody++) {
// update vcm by 1/2 step
dtfm = dtf / masstotal[ibody];
vcm[ibody][0] += dtfm * fcm[ibody][0] * fflag[ibody][0];
vcm[ibody][1] += dtfm * fcm[ibody][1] * fflag[ibody][1];
vcm[ibody][2] += dtfm * fcm[ibody][2] * fflag[ibody][2];
// update angular momentum by 1/2 step
angmom[ibody][0] += dtf * torque[ibody][0] * tflag[ibody][0];
angmom[ibody][1] += dtf * torque[ibody][1] * tflag[ibody][1];
angmom[ibody][2] += dtf * torque[ibody][2] * tflag[ibody][2];
MathExtra::angmom_to_omega(angmom[ibody],ex_space[ibody],ey_space[ibody],
ez_space[ibody],inertia[ibody],omega[ibody]);
}
// set velocity/rotation of atoms in rigid bodies
// virial is already setup from initial_integrate
set_v();
}
/* ---------------------------------------------------------------------- */
void FixRigid::initial_integrate_respa(int vflag, int ilevel, int /*iloop*/)
{
dtv = step_respa[ilevel];
dtf = 0.5 * step_respa[ilevel] * force->ftm2v;
dtq = 0.5 * step_respa[ilevel];
if (ilevel == 0) initial_integrate(vflag);
else final_integrate();
}
/* ---------------------------------------------------------------------- */
void FixRigid::final_integrate_respa(int ilevel, int /*iloop*/)
{
dtf = 0.5 * step_respa[ilevel] * force->ftm2v;
final_integrate();
}
/* ----------------------------------------------------------------------
remap xcm of each rigid body back into periodic simulation box
done during pre_neighbor so will be after call to pbc()
and after fix_deform::pre_exchange() may have flipped box
use domain->remap() in case xcm is far away from box
due to first-time definition of rigid body in setup_bodies_static()
or due to box flip
also adjust imagebody = rigid body image flags, due to xcm remap
also reset body xcmimage flags of all atoms in bodies
xcmimage flags are relative to xcm so that body can be unwrapped
if don't do this, would need xcm to move with true image flags
then a body could end up very far away from box
set_xv() will then compute huge displacements every step to
reset coords of all body atoms to be back inside the box,
ditto for triclinic box flip, which causes numeric problems
------------------------------------------------------------------------- */
void FixRigid::pre_neighbor()
{
for (int ibody = 0; ibody < nbody; ibody++)
domain->remap(xcm[ibody],imagebody[ibody]);
image_shift();
}
/* ----------------------------------------------------------------------
reset body xcmimage flags of atoms in bodies
xcmimage flags are relative to xcm so that body can be unwrapped
xcmimage = true image flag - imagebody flag
------------------------------------------------------------------------- */
void FixRigid::image_shift()
{
int ibody;
imageint tdim,bdim,xdim[3];
imageint *image = atom->image;
int nlocal = atom->nlocal;
for (int i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
tdim = image[i] & IMGMASK;
bdim = imagebody[ibody] & IMGMASK;
xdim[0] = IMGMAX + tdim - bdim;
tdim = (image[i] >> IMGBITS) & IMGMASK;
bdim = (imagebody[ibody] >> IMGBITS) & IMGMASK;
xdim[1] = IMGMAX + tdim - bdim;
tdim = image[i] >> IMG2BITS;
bdim = imagebody[ibody] >> IMG2BITS;
xdim[2] = IMGMAX + tdim - bdim;
xcmimage[i] = (xdim[2] << IMG2BITS) | (xdim[1] << IMGBITS) | xdim[0];
}
}
/* ----------------------------------------------------------------------
count # of DOF removed by rigid bodies for atoms in igroup
return total count of DOF
------------------------------------------------------------------------- */
int FixRigid::dof(int tgroup)
{
// cannot count DOF correctly unless setup_bodies_static() has been called
if (!setupflag) {
if (comm->me == 0)
error->warning(FLERR,"Cannot count rigid body degrees-of-freedom "
"before bodies are initialized");
return 0;
}
int tgroupbit = group->bitmask[tgroup];
// nall = # of point particles in each rigid body
// mall = # of finite-size particles in each rigid body
// particles must also be in temperature group
int *mask = atom->mask;
int nlocal = atom->nlocal;
int *ncount = new int[nbody];
int *mcount = new int[nbody];
for (int ibody = 0; ibody < nbody; ibody++)
ncount[ibody] = mcount[ibody] = 0;
for (int i = 0; i < nlocal; i++)
if (body[i] >= 0 && mask[i] & tgroupbit) {
// do not count point particles or point dipoles as extended particles
// a spheroid dipole will be counted as extended
if (extended && (eflags[i] & ~(POINT | DIPOLE))) mcount[body[i]]++;
else ncount[body[i]]++;
}
int *nall = new int[nbody];
int *mall = new int[nbody];
MPI_Allreduce(ncount,nall,nbody,MPI_INT,MPI_SUM,world);
MPI_Allreduce(mcount,mall,nbody,MPI_INT,MPI_SUM,world);
// warn if nall+mall != nrigid for any body included in temperature group
int flag = 0;
for (int ibody = 0; ibody < nbody; ibody++) {
if (nall[ibody]+mall[ibody] > 0 &&
nall[ibody]+mall[ibody] != nrigid[ibody]) flag = 1;
}
if (flag && me == 0)
error->warning(FLERR,"Computing temperature of portions of rigid bodies");
// remove appropriate DOFs for each rigid body wholly in temperature group
// N = # of point particles in body
// M = # of finite-size particles in body
// 3d body has 3N + 6M dof to start with
// 2d body has 2N + 3M dof to start with
// 3d point-particle body with all non-zero I should have 6 dof, remove 3N-6
// 3d point-particle body (linear) with a 0 I should have 5 dof, remove 3N-5
// 2d point-particle body should have 3 dof, remove 2N-3
// 3d body with any finite-size M should have 6 dof, remove (3N+6M) - 6
// 2d body with any finite-size M should have 3 dof, remove (2N+3M) - 3
int n = 0;
nlinear = 0;
if (domain->dimension == 3) {
for (int ibody = 0; ibody < nbody; ibody++)
if (nall[ibody]+mall[ibody] == nrigid[ibody]) {
n += 3*nall[ibody] + 6*mall[ibody] - 6;
if (inertia[ibody][0] == 0.0 || inertia[ibody][1] == 0.0 ||
inertia[ibody][2] == 0.0) {
n++;
nlinear++;
}
}
} else if (domain->dimension == 2) {
for (int ibody = 0; ibody < nbody; ibody++)
if (nall[ibody]+mall[ibody] == nrigid[ibody])
n += 2*nall[ibody] + 3*mall[ibody] - 3;
}
delete [] ncount;
delete [] mcount;
delete [] nall;
delete [] mall;
return n;
}
/* ----------------------------------------------------------------------
adjust xcm of each rigid body due to box deformation
called by various fixes that change box size/shape
flag = 0/1 means map from box to lamda coords or vice versa
------------------------------------------------------------------------- */
void FixRigid::deform(int flag)
{
if (flag == 0)
for (int ibody = 0; ibody < nbody; ibody++)
domain->x2lamda(xcm[ibody],xcm[ibody]);
else
for (int ibody = 0; ibody < nbody; ibody++)
domain->lamda2x(xcm[ibody],xcm[ibody]);
}
/* ----------------------------------------------------------------------
set space-frame coords and velocity of each atom in each rigid body
set orientation and rotation of extended particles
x = Q displace + Xcm, mapped back to periodic box
v = Vcm + (W cross (x - Xcm))
------------------------------------------------------------------------- */
void FixRigid::set_xv()
{
int ibody;
int xbox,ybox,zbox;
double x0,x1,x2,v0,v1,v2,fc0,fc1,fc2,massone;
double xy,xz,yz;
double ione[3],exone[3],eyone[3],ezone[3],vr[6],p[3][3];
double **x = atom->x;
double **v = atom->v;
double **f = atom->f;
double *rmass = atom->rmass;
double *mass = atom->mass;
int *type = atom->type;
int nlocal = atom->nlocal;
double xprd = domain->xprd;
double yprd = domain->yprd;
double zprd = domain->zprd;
if (triclinic) {
xy = domain->xy;
xz = domain->xz;
yz = domain->yz;
}
// set x and v of each atom
for (int i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
xbox = (xcmimage[i] & IMGMASK) - IMGMAX;
ybox = (xcmimage[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (xcmimage[i] >> IMG2BITS) - IMGMAX;
// save old positions and velocities for virial
if (evflag) {
if (triclinic == 0) {
x0 = x[i][0] + xbox*xprd;
x1 = x[i][1] + ybox*yprd;
x2 = x[i][2] + zbox*zprd;
} else {
x0 = x[i][0] + xbox*xprd + ybox*xy + zbox*xz;
x1 = x[i][1] + ybox*yprd + zbox*yz;
x2 = x[i][2] + zbox*zprd;
}
v0 = v[i][0];
v1 = v[i][1];
v2 = v[i][2];
}
// x = displacement from center-of-mass, based on body orientation
// v = vcm + omega around center-of-mass
MathExtra::matvec(ex_space[ibody],ey_space[ibody],
ez_space[ibody],displace[i],x[i]);
v[i][0] = omega[ibody][1]*x[i][2] - omega[ibody][2]*x[i][1] +
vcm[ibody][0];
v[i][1] = omega[ibody][2]*x[i][0] - omega[ibody][0]*x[i][2] +
vcm[ibody][1];
v[i][2] = omega[ibody][0]*x[i][1] - omega[ibody][1]*x[i][0] +
vcm[ibody][2];
// add center of mass to displacement
// map back into periodic box via xbox,ybox,zbox
// for triclinic, add in box tilt factors as well
if (triclinic == 0) {
x[i][0] += xcm[ibody][0] - xbox*xprd;
x[i][1] += xcm[ibody][1] - ybox*yprd;
x[i][2] += xcm[ibody][2] - zbox*zprd;
} else {
x[i][0] += xcm[ibody][0] - xbox*xprd - ybox*xy - zbox*xz;
x[i][1] += xcm[ibody][1] - ybox*yprd - zbox*yz;
x[i][2] += xcm[ibody][2] - zbox*zprd;
}
// virial = unwrapped coords dotted into body constraint force
// body constraint force = implied force due to v change minus f external
// assume f does not include forces internal to body
// 1/2 factor b/c final_integrate contributes other half
// assume per-atom contribution is due to constraint force on that atom
if (evflag) {
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
fc0 = massone*(v[i][0] - v0)/dtf - f[i][0];
fc1 = massone*(v[i][1] - v1)/dtf - f[i][1];
fc2 = massone*(v[i][2] - v2)/dtf - f[i][2];
vr[0] = 0.5*x0*fc0;
vr[1] = 0.5*x1*fc1;
vr[2] = 0.5*x2*fc2;
vr[3] = 0.5*x0*fc1;
vr[4] = 0.5*x0*fc2;
vr[5] = 0.5*x1*fc2;
v_tally(1,&i,1.0,vr);
}
}
// set orientation, omega, angmom of each extended particle
if (extended) {
double theta_body,theta;
double *shape,*quatatom,*inertiaatom;
AtomVecEllipsoid::Bonus *ebonus;
if (avec_ellipsoid) ebonus = avec_ellipsoid->bonus;
AtomVecLine::Bonus *lbonus;
if (avec_line) lbonus = avec_line->bonus;
AtomVecTri::Bonus *tbonus;
if (avec_tri) tbonus = avec_tri->bonus;
double **omega_one = atom->omega;
double **angmom_one = atom->angmom;
double **mu = atom->mu;
int *ellipsoid = atom->ellipsoid;
int *line = atom->line;
int *tri = atom->tri;
for (int i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (eflags[i] & SPHERE) {
omega_one[i][0] = omega[ibody][0];
omega_one[i][1] = omega[ibody][1];
omega_one[i][2] = omega[ibody][2];
} else if (eflags[i] & ELLIPSOID) {
shape = ebonus[ellipsoid[i]].shape;
quatatom = ebonus[ellipsoid[i]].quat;
MathExtra::quatquat(quat[ibody],orient[i],quatatom);
MathExtra::qnormalize(quatatom);
ione[0] = EINERTIA*rmass[i] * (shape[1]*shape[1] + shape[2]*shape[2]);
ione[1] = EINERTIA*rmass[i] * (shape[0]*shape[0] + shape[2]*shape[2]);
ione[2] = EINERTIA*rmass[i] * (shape[0]*shape[0] + shape[1]*shape[1]);
MathExtra::q_to_exyz(quatatom,exone,eyone,ezone);
MathExtra::omega_to_angmom(omega[ibody],exone,eyone,ezone,ione,
angmom_one[i]);
} else if (eflags[i] & LINE) {
if (quat[ibody][3] >= 0.0) theta_body = 2.0*acos(quat[ibody][0]);
else theta_body = -2.0*acos(quat[ibody][0]);
theta = orient[i][0] + theta_body;
while (theta <= -MY_PI) theta += MY_2PI;
while (theta > MY_PI) theta -= MY_2PI;
lbonus[line[i]].theta = theta;
omega_one[i][0] = omega[ibody][0];
omega_one[i][1] = omega[ibody][1];
omega_one[i][2] = omega[ibody][2];
} else if (eflags[i] & TRIANGLE) {
inertiaatom = tbonus[tri[i]].inertia;
quatatom = tbonus[tri[i]].quat;
MathExtra::quatquat(quat[ibody],orient[i],quatatom);
MathExtra::qnormalize(quatatom);
MathExtra::q_to_exyz(quatatom,exone,eyone,ezone);
MathExtra::omega_to_angmom(omega[ibody],exone,eyone,ezone,
inertiaatom,angmom_one[i]);
}
if (eflags[i] & DIPOLE) {
MathExtra::quat_to_mat(quat[ibody],p);
MathExtra::matvec(p,dorient[i],mu[i]);
MathExtra::snormalize3(mu[i][3],mu[i],mu[i]);
}
}
}
}
/* ----------------------------------------------------------------------
set space-frame velocity of each atom in a rigid body
set omega and angmom of extended particles
v = Vcm + (W cross (x - Xcm))
------------------------------------------------------------------------- */
void FixRigid::set_v()
{
int xbox,ybox,zbox;
double x0,x1,x2,v0,v1,v2,fc0,fc1,fc2,massone;
double xy,xz,yz;
double ione[3],exone[3],eyone[3],ezone[3],delta[3],vr[6];
double **x = atom->x;
double **v = atom->v;
double **f = atom->f;
double *rmass = atom->rmass;
double *mass = atom->mass;
int *type = atom->type;
int nlocal = atom->nlocal;
double xprd = domain->xprd;
double yprd = domain->yprd;
double zprd = domain->zprd;
if (triclinic) {
xy = domain->xy;
xz = domain->xz;
yz = domain->yz;
}
// set v of each atom
for (int i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
const int ibody = body[i];
MathExtra::matvec(ex_space[ibody],ey_space[ibody],
ez_space[ibody],displace[i],delta);
// save old velocities for virial
if (evflag) {
v0 = v[i][0];
v1 = v[i][1];
v2 = v[i][2];
}
v[i][0] = omega[ibody][1]*delta[2] - omega[ibody][2]*delta[1] +
vcm[ibody][0];
v[i][1] = omega[ibody][2]*delta[0] - omega[ibody][0]*delta[2] +
vcm[ibody][1];
v[i][2] = omega[ibody][0]*delta[1] - omega[ibody][1]*delta[0] +
vcm[ibody][2];
// virial = unwrapped coords dotted into body constraint force
// body constraint force = implied force due to v change minus f external
// assume f does not include forces internal to body
// 1/2 factor b/c initial_integrate contributes other half
// assume per-atom contribution is due to constraint force on that atom
if (evflag) {
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
fc0 = massone*(v[i][0] - v0)/dtf - f[i][0];
fc1 = massone*(v[i][1] - v1)/dtf - f[i][1];
fc2 = massone*(v[i][2] - v2)/dtf - f[i][2];
xbox = (xcmimage[i] & IMGMASK) - IMGMAX;
ybox = (xcmimage[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (xcmimage[i] >> IMG2BITS) - IMGMAX;
if (triclinic == 0) {
x0 = x[i][0] + xbox*xprd;
x1 = x[i][1] + ybox*yprd;
x2 = x[i][2] + zbox*zprd;
} else {
x0 = x[i][0] + xbox*xprd + ybox*xy + zbox*xz;
x1 = x[i][1] + ybox*yprd + zbox*yz;
x2 = x[i][2] + zbox*zprd;
}
vr[0] = 0.5*x0*fc0;
vr[1] = 0.5*x1*fc1;
vr[2] = 0.5*x2*fc2;
vr[3] = 0.5*x0*fc1;
vr[4] = 0.5*x0*fc2;
vr[5] = 0.5*x1*fc2;
v_tally(1,&i,1.0,vr);
}
}
// set omega, angmom of each extended particle
if (extended) {
double *shape,*quatatom,*inertiaatom;
AtomVecEllipsoid::Bonus *ebonus;
if (avec_ellipsoid) ebonus = avec_ellipsoid->bonus;
AtomVecTri::Bonus *tbonus;
if (avec_tri) tbonus = avec_tri->bonus;
double **omega_one = atom->omega;
double **angmom_one = atom->angmom;
int *ellipsoid = atom->ellipsoid;
int *tri = atom->tri;
for (int i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
const int ibody = body[i];
if (eflags[i] & SPHERE) {
omega_one[i][0] = omega[ibody][0];
omega_one[i][1] = omega[ibody][1];
omega_one[i][2] = omega[ibody][2];
} else if (eflags[i] & ELLIPSOID) {
shape = ebonus[ellipsoid[i]].shape;
quatatom = ebonus[ellipsoid[i]].quat;
ione[0] = EINERTIA*rmass[i] * (shape[1]*shape[1] + shape[2]*shape[2]);
ione[1] = EINERTIA*rmass[i] * (shape[0]*shape[0] + shape[2]*shape[2]);
ione[2] = EINERTIA*rmass[i] * (shape[0]*shape[0] + shape[1]*shape[1]);
MathExtra::q_to_exyz(quatatom,exone,eyone,ezone);
MathExtra::omega_to_angmom(omega[ibody],exone,eyone,ezone,ione,
angmom_one[i]);
} else if (eflags[i] & LINE) {
omega_one[i][0] = omega[ibody][0];
omega_one[i][1] = omega[ibody][1];
omega_one[i][2] = omega[ibody][2];
} else if (eflags[i] & TRIANGLE) {
inertiaatom = tbonus[tri[i]].inertia;
quatatom = tbonus[tri[i]].quat;
MathExtra::q_to_exyz(quatatom,exone,eyone,ezone);
MathExtra::omega_to_angmom(omega[ibody],exone,eyone,ezone,
inertiaatom,angmom_one[i]);
}
}
}
}
/* ----------------------------------------------------------------------
one-time initialization of static rigid body attributes
sets extended flags, masstotal, center-of-mass
sets Cartesian and diagonalized inertia tensor
sets body image flags
may read some properties from inpfile
------------------------------------------------------------------------- */
void FixRigid::setup_bodies_static()
{
int i,ibody;
// extended = 1 if any particle in a rigid body is finite size
// or has a dipole moment
extended = orientflag = dorientflag = 0;
AtomVecEllipsoid::Bonus *ebonus;
if (avec_ellipsoid) ebonus = avec_ellipsoid->bonus;
AtomVecLine::Bonus *lbonus;
if (avec_line) lbonus = avec_line->bonus;
AtomVecTri::Bonus *tbonus;
if (avec_tri) tbonus = avec_tri->bonus;
double **mu = atom->mu;
double *radius = atom->radius;
double *rmass = atom->rmass;
double *mass = atom->mass;
int *ellipsoid = atom->ellipsoid;
int *line = atom->line;
int *tri = atom->tri;
int *type = atom->type;
int nlocal = atom->nlocal;
if (atom->radius_flag || atom->ellipsoid_flag || atom->line_flag ||
atom->tri_flag || atom->mu_flag) {
int flag = 0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
if (radius && radius[i] > 0.0) flag = 1;
if (ellipsoid && ellipsoid[i] >= 0) flag = 1;
if (line && line[i] >= 0) flag = 1;
if (tri && tri[i] >= 0) flag = 1;
if (mu && mu[i][3] > 0.0) flag = 1;
}
MPI_Allreduce(&flag,&extended,1,MPI_INT,MPI_MAX,world);
}
// grow extended arrays and set extended flags for each particle
// orientflag = 4 if any particle stores ellipsoid or tri orientation
// orientflag = 1 if any particle stores line orientation
// dorientflag = 1 if any particle stores dipole orientation
if (extended) {
if (atom->ellipsoid_flag) orientflag = 4;
if (atom->line_flag) orientflag = 1;
if (atom->tri_flag) orientflag = 4;
if (atom->mu_flag) dorientflag = 1;
grow_arrays(atom->nmax);
for (i = 0; i < nlocal; i++) {
eflags[i] = 0;
if (body[i] < 0) continue;
// set to POINT or SPHERE or ELLIPSOID or LINE
if (radius && radius[i] > 0.0) {
eflags[i] |= SPHERE;
eflags[i] |= OMEGA;
eflags[i] |= TORQUE;
} else if (ellipsoid && ellipsoid[i] >= 0) {
eflags[i] |= ELLIPSOID;
eflags[i] |= ANGMOM;
eflags[i] |= TORQUE;
} else if (line && line[i] >= 0) {
eflags[i] |= LINE;
eflags[i] |= OMEGA;
eflags[i] |= TORQUE;
} else if (tri && tri[i] >= 0) {
eflags[i] |= TRIANGLE;
eflags[i] |= ANGMOM;
eflags[i] |= TORQUE;
} else eflags[i] |= POINT;
// set DIPOLE if atom->mu and mu[3] > 0.0
if (atom->mu_flag && mu[i][3] > 0.0)
eflags[i] |= DIPOLE;
}
}
// set body xcmimage flags = true image flags
imageint *image = atom->image;
for (i = 0; i < nlocal; i++)
if (body[i] >= 0) xcmimage[i] = image[i];
else xcmimage[i] = 0;
// compute masstotal & center-of-mass of each rigid body
// error if image flag is not 0 in a non-periodic dim
double **x = atom->x;
int *periodicity = domain->periodicity;
double xprd = domain->xprd;
double yprd = domain->yprd;
double zprd = domain->zprd;
double xy = domain->xy;
double xz = domain->xz;
double yz = domain->yz;
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
int xbox,ybox,zbox;
double massone,xunwrap,yunwrap,zunwrap;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
xbox = (xcmimage[i] & IMGMASK) - IMGMAX;
ybox = (xcmimage[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (xcmimage[i] >> IMG2BITS) - IMGMAX;
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
if ((xbox && !periodicity[0]) || (ybox && !periodicity[1]) ||
(zbox && !periodicity[2]))
error->one(FLERR,"Fix rigid atom has non-zero image flag "
"in a non-periodic dimension");
if (triclinic == 0) {
xunwrap = x[i][0] + xbox*xprd;
yunwrap = x[i][1] + ybox*yprd;
zunwrap = x[i][2] + zbox*zprd;
} else {
xunwrap = x[i][0] + xbox*xprd + ybox*xy + zbox*xz;
yunwrap = x[i][1] + ybox*yprd + zbox*yz;
zunwrap = x[i][2] + zbox*zprd;
}
sum[ibody][0] += xunwrap * massone;
sum[ibody][1] += yunwrap * massone;
sum[ibody][2] += zunwrap * massone;
sum[ibody][3] += massone;
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
for (ibody = 0; ibody < nbody; ibody++) {
masstotal[ibody] = all[ibody][3];
xcm[ibody][0] = all[ibody][0]/masstotal[ibody];
xcm[ibody][1] = all[ibody][1]/masstotal[ibody];
xcm[ibody][2] = all[ibody][2]/masstotal[ibody];
}
// set vcm, angmom = 0.0 in case inpfile is used
// and doesn't overwrite all body's values
// since setup_bodies_dynamic() will not be called
for (ibody = 0; ibody < nbody; ibody++) {
vcm[ibody][0] = vcm[ibody][1] = vcm[ibody][2] = 0.0;
angmom[ibody][0] = angmom[ibody][1] = angmom[ibody][2] = 0.0;
}
// set rigid body image flags to default values
for (ibody = 0; ibody < nbody; ibody++)
imagebody[ibody] = ((imageint) IMGMAX << IMG2BITS) |
((imageint) IMGMAX << IMGBITS) | IMGMAX;
// overwrite masstotal, center-of-mass, image flags with file values
// inbody[i] = 0/1 if Ith rigid body is initialized by file
int *inbody;
if (inpfile) {
memory->create(inbody,nbody,"rigid:inbody");
for (ibody = 0; ibody < nbody; ibody++) inbody[ibody] = 0;
readfile(0,masstotal,xcm,vcm,angmom,imagebody,inbody);
}
// remap the xcm of each body back into simulation box
// and reset body and atom xcmimage flags via pre_neighbor()
pre_neighbor();
// compute 6 moments of inertia of each body in Cartesian reference frame
// dx,dy,dz = coords relative to center-of-mass
// symmetric 3x3 inertia tensor stored in Voigt notation as 6-vector
double dx,dy,dz;
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
xbox = (xcmimage[i] & IMGMASK) - IMGMAX;
ybox = (xcmimage[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (xcmimage[i] >> IMG2BITS) - IMGMAX;
if (triclinic == 0) {
xunwrap = x[i][0] + xbox*xprd;
yunwrap = x[i][1] + ybox*yprd;
zunwrap = x[i][2] + zbox*zprd;
} else {
xunwrap = x[i][0] + xbox*xprd + ybox*xy + zbox*xz;
yunwrap = x[i][1] + ybox*yprd + zbox*yz;
zunwrap = x[i][2] + zbox*zprd;
}
dx = xunwrap - xcm[ibody][0];
dy = yunwrap - xcm[ibody][1];
dz = zunwrap - xcm[ibody][2];
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
sum[ibody][0] += massone * (dy*dy + dz*dz);
sum[ibody][1] += massone * (dx*dx + dz*dz);
sum[ibody][2] += massone * (dx*dx + dy*dy);
sum[ibody][3] -= massone * dy*dz;
sum[ibody][4] -= massone * dx*dz;
sum[ibody][5] -= massone * dx*dy;
}
// extended particles may contribute extra terms to moments of inertia
if (extended) {
double ivec[6];
double *shape,*quatatom,*inertiaatom;
double length,theta;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
if (eflags[i] & SPHERE) {
sum[ibody][0] += SINERTIA*massone * radius[i]*radius[i];
sum[ibody][1] += SINERTIA*massone * radius[i]*radius[i];
sum[ibody][2] += SINERTIA*massone * radius[i]*radius[i];
} else if (eflags[i] & ELLIPSOID) {
shape = ebonus[ellipsoid[i]].shape;
quatatom = ebonus[ellipsoid[i]].quat;
MathExtra::inertia_ellipsoid(shape,quatatom,massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
} else if (eflags[i] & LINE) {
length = lbonus[line[i]].length;
theta = lbonus[line[i]].theta;
MathExtra::inertia_line(length,theta,massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
} else if (eflags[i] & TRIANGLE) {
inertiaatom = tbonus[tri[i]].inertia;
quatatom = tbonus[tri[i]].quat;
MathExtra::inertia_triangle(inertiaatom,quatatom,massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
}
}
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
// overwrite Cartesian inertia tensor with file values
if (inpfile) readfile(1,NULL,all,NULL,NULL,NULL,inbody);
// diagonalize inertia tensor for each body via Jacobi rotations
// inertia = 3 eigenvalues = principal moments of inertia
// evectors and exzy_space = 3 evectors = principal axes of rigid body
int ierror;
double cross[3];
double tensor[3][3],evectors[3][3];
for (ibody = 0; ibody < nbody; ibody++) {
tensor[0][0] = all[ibody][0];
tensor[1][1] = all[ibody][1];
tensor[2][2] = all[ibody][2];
tensor[1][2] = tensor[2][1] = all[ibody][3];
tensor[0][2] = tensor[2][0] = all[ibody][4];
tensor[0][1] = tensor[1][0] = all[ibody][5];
ierror = MathExtra::jacobi(tensor,inertia[ibody],evectors);
if (ierror) error->all(FLERR,
"Insufficient Jacobi rotations for rigid body");
ex_space[ibody][0] = evectors[0][0];
ex_space[ibody][1] = evectors[1][0];
ex_space[ibody][2] = evectors[2][0];
ey_space[ibody][0] = evectors[0][1];
ey_space[ibody][1] = evectors[1][1];
ey_space[ibody][2] = evectors[2][1];
ez_space[ibody][0] = evectors[0][2];
ez_space[ibody][1] = evectors[1][2];
ez_space[ibody][2] = evectors[2][2];
// if any principal moment < scaled EPSILON, set to 0.0
double max;
max = MAX(inertia[ibody][0],inertia[ibody][1]);
max = MAX(max,inertia[ibody][2]);
if (inertia[ibody][0] < EPSILON*max) inertia[ibody][0] = 0.0;
if (inertia[ibody][1] < EPSILON*max) inertia[ibody][1] = 0.0;
if (inertia[ibody][2] < EPSILON*max) inertia[ibody][2] = 0.0;
// enforce 3 evectors as a right-handed coordinate system
// flip 3rd vector if needed
MathExtra::cross3(ex_space[ibody],ey_space[ibody],cross);
if (MathExtra::dot3(cross,ez_space[ibody]) < 0.0)
MathExtra::negate3(ez_space[ibody]);
// create initial quaternion
MathExtra::exyz_to_q(ex_space[ibody],ey_space[ibody],ez_space[ibody],
quat[ibody]);
}
// displace = initial atom coords in basis of principal axes
// set displace = 0.0 for atoms not in any rigid body
// for extended particles, set their orientation wrt to rigid body
double qc[4],delta[3];
double *quatatom;
double theta_body;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) {
displace[i][0] = displace[i][1] = displace[i][2] = 0.0;
continue;
}
ibody = body[i];
xbox = (xcmimage[i] & IMGMASK) - IMGMAX;
ybox = (xcmimage[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (xcmimage[i] >> IMG2BITS) - IMGMAX;
if (triclinic == 0) {
xunwrap = x[i][0] + xbox*xprd;
yunwrap = x[i][1] + ybox*yprd;
zunwrap = x[i][2] + zbox*zprd;
} else {
xunwrap = x[i][0] + xbox*xprd + ybox*xy + zbox*xz;
yunwrap = x[i][1] + ybox*yprd + zbox*yz;
zunwrap = x[i][2] + zbox*zprd;
}
delta[0] = xunwrap - xcm[ibody][0];
delta[1] = yunwrap - xcm[ibody][1];
delta[2] = zunwrap - xcm[ibody][2];
MathExtra::transpose_matvec(ex_space[ibody],ey_space[ibody],
ez_space[ibody],delta,displace[i]);
if (extended) {
if (eflags[i] & ELLIPSOID) {
quatatom = ebonus[ellipsoid[i]].quat;
MathExtra::qconjugate(quat[ibody],qc);
MathExtra::quatquat(qc,quatatom,orient[i]);
MathExtra::qnormalize(orient[i]);
} else if (eflags[i] & LINE) {
if (quat[ibody][3] >= 0.0) theta_body = 2.0*acos(quat[ibody][0]);
else theta_body = -2.0*acos(quat[ibody][0]);
orient[i][0] = lbonus[line[i]].theta - theta_body;
while (orient[i][0] <= -MY_PI) orient[i][0] += MY_2PI;
while (orient[i][0] > MY_PI) orient[i][0] -= MY_2PI;
if (orientflag == 4) orient[i][1] = orient[i][2] = orient[i][3] = 0.0;
} else if (eflags[i] & TRIANGLE) {
quatatom = tbonus[tri[i]].quat;
MathExtra::qconjugate(quat[ibody],qc);
MathExtra::quatquat(qc,quatatom,orient[i]);
MathExtra::qnormalize(orient[i]);
} else if (orientflag == 4) {
orient[i][0] = orient[i][1] = orient[i][2] = orient[i][3] = 0.0;
} else if (orientflag == 1)
orient[i][0] = 0.0;
if (eflags[i] & DIPOLE) {
MathExtra::transpose_matvec(ex_space[ibody],ey_space[ibody],
ez_space[ibody],mu[i],dorient[i]);
MathExtra::snormalize3(mu[i][3],dorient[i],dorient[i]);
} else if (dorientflag)
dorient[i][0] = dorient[i][1] = dorient[i][2] = 0.0;
}
}
// test for valid principal moments & axes
// recompute moments of inertia around new axes
// 3 diagonal moments should equal principal moments
// 3 off-diagonal moments should be 0.0
// extended particles may contribute extra terms to moments of inertia
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
sum[ibody][0] += massone *
(displace[i][1]*displace[i][1] + displace[i][2]*displace[i][2]);
sum[ibody][1] += massone *
(displace[i][0]*displace[i][0] + displace[i][2]*displace[i][2]);
sum[ibody][2] += massone *
(displace[i][0]*displace[i][0] + displace[i][1]*displace[i][1]);
sum[ibody][3] -= massone * displace[i][1]*displace[i][2];
sum[ibody][4] -= massone * displace[i][0]*displace[i][2];
sum[ibody][5] -= massone * displace[i][0]*displace[i][1];
}
if (extended) {
double ivec[6];
double *shape,*inertiaatom;
double length;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
if (eflags[i] & SPHERE) {
sum[ibody][0] += SINERTIA*massone * radius[i]*radius[i];
sum[ibody][1] += SINERTIA*massone * radius[i]*radius[i];
sum[ibody][2] += SINERTIA*massone * radius[i]*radius[i];
} else if (eflags[i] & ELLIPSOID) {
shape = ebonus[ellipsoid[i]].shape;
MathExtra::inertia_ellipsoid(shape,orient[i],massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
} else if (eflags[i] & LINE) {
length = lbonus[line[i]].length;
MathExtra::inertia_line(length,orient[i][0],massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
} else if (eflags[i] & TRIANGLE) {
inertiaatom = tbonus[tri[i]].inertia;
MathExtra::inertia_triangle(inertiaatom,orient[i],massone,ivec);
sum[ibody][0] += ivec[0];
sum[ibody][1] += ivec[1];
sum[ibody][2] += ivec[2];
sum[ibody][3] += ivec[3];
sum[ibody][4] += ivec[4];
sum[ibody][5] += ivec[5];
}
}
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
// error check that re-computed moments of inertia match diagonalized ones
// do not do test for bodies with params read from inpfile
double norm;
for (ibody = 0; ibody < nbody; ibody++) {
if (inpfile && inbody[ibody]) continue;
if (inertia[ibody][0] == 0.0) {
if (fabs(all[ibody][0]) > TOLERANCE)
error->all(FLERR,"Fix rigid: Bad principal moments");
} else {
if (fabs((all[ibody][0]-inertia[ibody][0])/inertia[ibody][0]) >
TOLERANCE) error->all(FLERR,"Fix rigid: Bad principal moments");
}
if (inertia[ibody][1] == 0.0) {
if (fabs(all[ibody][1]) > TOLERANCE)
error->all(FLERR,"Fix rigid: Bad principal moments");
} else {
if (fabs((all[ibody][1]-inertia[ibody][1])/inertia[ibody][1]) >
TOLERANCE) error->all(FLERR,"Fix rigid: Bad principal moments");
}
if (inertia[ibody][2] == 0.0) {
if (fabs(all[ibody][2]) > TOLERANCE)
error->all(FLERR,"Fix rigid: Bad principal moments");
} else {
if (fabs((all[ibody][2]-inertia[ibody][2])/inertia[ibody][2]) >
TOLERANCE) error->all(FLERR,"Fix rigid: Bad principal moments");
}
norm = (inertia[ibody][0] + inertia[ibody][1] + inertia[ibody][2]) / 3.0;
if (fabs(all[ibody][3]/norm) > TOLERANCE ||
fabs(all[ibody][4]/norm) > TOLERANCE ||
fabs(all[ibody][5]/norm) > TOLERANCE)
error->all(FLERR,"Fix rigid: Bad principal moments");
}
if (inpfile) memory->destroy(inbody);
}
/* ----------------------------------------------------------------------
one-time initialization of dynamic rigid body attributes
set vcm and angmom, computed explicitly from constituent particles
not done if body properites read from file, e.g. for overlapping particles
------------------------------------------------------------------------- */
void FixRigid::setup_bodies_dynamic()
{
int i,ibody;
double massone,radone;
// vcm = velocity of center-of-mass of each rigid body
// angmom = angular momentum of each rigid body
double **x = atom->x;
double **v = atom->v;
double *rmass = atom->rmass;
double *mass = atom->mass;
int *type = atom->type;
int nlocal = atom->nlocal;
double dx,dy,dz;
double unwrap[3];
for (ibody = 0; ibody < nbody; ibody++)
for (i = 0; i < 6; i++) sum[ibody][i] = 0.0;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (rmass) massone = rmass[i];
else massone = mass[type[i]];
sum[ibody][0] += v[i][0] * massone;
sum[ibody][1] += v[i][1] * massone;
sum[ibody][2] += v[i][2] * massone;
domain->unmap(x[i],xcmimage[i],unwrap);
dx = unwrap[0] - xcm[ibody][0];
dy = unwrap[1] - xcm[ibody][1];
dz = unwrap[2] - xcm[ibody][2];
sum[ibody][3] += dy * massone*v[i][2] - dz * massone*v[i][1];
sum[ibody][4] += dz * massone*v[i][0] - dx * massone*v[i][2];
sum[ibody][5] += dx * massone*v[i][1] - dy * massone*v[i][0];
}
// extended particles add their rotation to angmom of body
if (extended) {
AtomVecLine::Bonus *lbonus;
if (avec_line) lbonus = avec_line->bonus;
double **omega_one = atom->omega;
double **angmom_one = atom->angmom;
double *radius = atom->radius;
int *line = atom->line;
for (i = 0; i < nlocal; i++) {
if (body[i] < 0) continue;
ibody = body[i];
if (eflags[i] & OMEGA) {
if (eflags[i] & SPHERE) {
radone = radius[i];
sum[ibody][3] += SINERTIA*rmass[i] * radone*radone * omega_one[i][0];
sum[ibody][4] += SINERTIA*rmass[i] * radone*radone * omega_one[i][1];
sum[ibody][5] += SINERTIA*rmass[i] * radone*radone * omega_one[i][2];
} else if (eflags[i] & LINE) {
radone = lbonus[line[i]].length;
sum[ibody][5] += LINERTIA*rmass[i] * radone*radone * omega_one[i][2];
}
}
if (eflags[i] & ANGMOM) {
sum[ibody][3] += angmom_one[i][0];
sum[ibody][4] += angmom_one[i][1];
sum[ibody][5] += angmom_one[i][2];
}
}
}
MPI_Allreduce(sum[0],all[0],6*nbody,MPI_DOUBLE,MPI_SUM,world);
// normalize velocity of COM
for (ibody = 0; ibody < nbody; ibody++) {
vcm[ibody][0] = all[ibody][0]/masstotal[ibody];
vcm[ibody][1] = all[ibody][1]/masstotal[ibody];
vcm[ibody][2] = all[ibody][2]/masstotal[ibody];
angmom[ibody][0] = all[ibody][3];
angmom[ibody][1] = all[ibody][4];
angmom[ibody][2] = all[ibody][5];
}
}
/* ----------------------------------------------------------------------
read per rigid body info from user-provided file
which = 0 to read everthing except 6 moments of inertia
which = 1 to read 6 moments of inertia
flag inbody = 0 for bodies whose info is read from file
nlines = # of lines of rigid body info
one line = rigid-ID mass xcm ycm zcm ixx iyy izz ixy ixz iyz
vxcm vycm vzcm lx ly lz ix iy iz
------------------------------------------------------------------------- */
void FixRigid::readfile(int which, double *vec,
double **array1, double **array2, double **array3,
imageint *ivec, int *inbody)
{
int j,nchunk,id,eofflag,xbox,ybox,zbox;
int nlines;
FILE *fp;
char *eof,*start,*next,*buf;
char line[MAXLINE];
if (me == 0) {
fp = fopen(inpfile,"r");
if (fp == NULL) {
char str[128];
snprintf(str,128,"Cannot open fix rigid inpfile %s",inpfile);
error->one(FLERR,str);
}
while (1) {
eof = fgets(line,MAXLINE,fp);
if (eof == NULL) error->one(FLERR,"Unexpected end of fix rigid file");
start = &line[strspn(line," \t\n\v\f\r")];
if (*start != '\0' && *start != '#') break;
}
sscanf(line,"%d",&nlines);
}
MPI_Bcast(&nlines,1,MPI_INT,0,world);
if (nlines == 0) error->all(FLERR,"Fix rigid file has no lines");
char *buffer = new char[CHUNK*MAXLINE];
char **values = new char*[ATTRIBUTE_PERBODY];
int nread = 0;
while (nread < nlines) {
nchunk = MIN(nlines-nread,CHUNK);
eofflag = comm->read_lines_from_file(fp,nchunk,MAXLINE,buffer);
if (eofflag) error->all(FLERR,"Unexpected end of fix rigid file");
buf = buffer;
next = strchr(buf,'\n');
*next = '\0';
int nwords = atom->count_words(buf);
*next = '\n';
if (nwords != ATTRIBUTE_PERBODY)
error->all(FLERR,"Incorrect rigid body format in fix rigid file");
// loop over lines of rigid body attributes
// tokenize the line into values
// id = rigid body ID
// use ID as-is for SINGLE, as mol-ID for MOLECULE, as-is for GROUP
// for which = 0, store all but inertia in vecs and arrays
// for which = 1, store inertia tensor array, invert 3,4,5 values to Voigt
for (int i = 0; i < nchunk; i++) {
next = strchr(buf,'\n');
values[0] = strtok(buf," \t\n\r\f");
for (j = 1; j < nwords; j++)
values[j] = strtok(NULL," \t\n\r\f");
id = atoi(values[0]);
if (rstyle == MOLECULE) {
if (id <= 0 || id > maxmol)
error->all(FLERR,"Invalid rigid body ID in fix rigid file");
id = mol2body[id];
} else id--;
if (id < 0 || id >= nbody)
error->all(FLERR,"Invalid rigid body ID in fix rigid file");
inbody[id] = 1;
if (which == 0) {
vec[id] = atof(values[1]);
array1[id][0] = atof(values[2]);
array1[id][1] = atof(values[3]);
array1[id][2] = atof(values[4]);
array2[id][0] = atof(values[11]);
array2[id][1] = atof(values[12]);
array2[id][2] = atof(values[13]);
array3[id][0] = atof(values[14]);
array3[id][1] = atof(values[15]);
array3[id][2] = atof(values[16]);
xbox = atoi(values[17]);
ybox = atoi(values[18]);
zbox = atoi(values[19]);
ivec[id] = ((imageint) (xbox + IMGMAX) & IMGMASK) |
(((imageint) (ybox + IMGMAX) & IMGMASK) << IMGBITS) |
(((imageint) (zbox + IMGMAX) & IMGMASK) << IMG2BITS);
} else {
array1[id][0] = atof(values[5]);
array1[id][1] = atof(values[6]);
array1[id][2] = atof(values[7]);
array1[id][3] = atof(values[10]);
array1[id][4] = atof(values[9]);
array1[id][5] = atof(values[8]);
}
buf = next + 1;
}
nread += nchunk;
}
if (me == 0) fclose(fp);
delete [] buffer;
delete [] values;
}
/* ----------------------------------------------------------------------
write out restart info for mass, COM, inertia tensor, image flags to file
identical format to inpfile option, so info can be read in when restarting
only proc 0 writes list of global bodies to file
------------------------------------------------------------------------- */
void FixRigid::write_restart_file(char *file)
{
if (me) return;
char outfile[128];
snprintf(outfile,128,"%s.rigid",file);
FILE *fp = fopen(outfile,"w");
if (fp == NULL) {
char str[192];
snprintf(str,192,"Cannot open fix rigid restart file %s",outfile);
error->one(FLERR,str);
}
fprintf(fp,"# fix rigid mass, COM, inertia tensor info for "
"%d bodies on timestep " BIGINT_FORMAT "\n\n",
nbody,update->ntimestep);
fprintf(fp,"%d\n",nbody);
// compute I tensor against xyz axes from diagonalized I and current quat
// Ispace = P Idiag P_transpose
// P is stored column-wise in exyz_space
int xbox,ybox,zbox;
double p[3][3],pdiag[3][3],ispace[3][3];
int id;
for (int i = 0; i < nbody; i++) {
if (rstyle == SINGLE || rstyle == GROUP) id = i;
else id = body2mol[i];
MathExtra::col2mat(ex_space[i],ey_space[i],ez_space[i],p);
MathExtra::times3_diag(p,inertia[i],pdiag);
MathExtra::times3_transpose(pdiag,p,ispace);
xbox = (imagebody[i] & IMGMASK) - IMGMAX;
ybox = (imagebody[i] >> IMGBITS & IMGMASK) - IMGMAX;
zbox = (imagebody[i] >> IMG2BITS) - IMGMAX;
fprintf(fp,"%d %-1.16e %-1.16e %-1.16e %-1.16e "
"%-1.16e %-1.16e %-1.16e %-1.16e %-1.16e %-1.16e "
"%-1.16e %-1.16e %-1.16e %-1.16e %-1.16e %-1.16e "
"%d %d %d\n",
id,masstotal[i],xcm[i][0],xcm[i][1],xcm[i][2],
ispace[0][0],ispace[1][1],ispace[2][2],
ispace[0][1],ispace[0][2],ispace[1][2],
vcm[i][0],vcm[i][1],vcm[i][2],
angmom[i][0],angmom[i][1],angmom[i][2],
xbox,ybox,zbox);
}
fclose(fp);
}
/* ----------------------------------------------------------------------
memory usage of local atom-based arrays
------------------------------------------------------------------------- */
double FixRigid::memory_usage()
{
int nmax = atom->nmax;
double bytes = nmax * sizeof(int);
bytes += nmax * sizeof(imageint);
bytes += nmax*3 * sizeof(double);
bytes += maxvatom*6 * sizeof(double); // vatom
if (extended) {
bytes += nmax * sizeof(int);
if (orientflag) bytes = nmax*orientflag * sizeof(double);
if (dorientflag) bytes = nmax*3 * sizeof(double);
}
return bytes;
}
/* ----------------------------------------------------------------------
allocate local atom-based arrays
------------------------------------------------------------------------- */
void FixRigid::grow_arrays(int nmax)
{
memory->grow(body,nmax,"rigid:body");
memory->grow(xcmimage,nmax,"rigid:xcmimage");
memory->grow(displace,nmax,3,"rigid:displace");
if (extended) {
memory->grow(eflags,nmax,"rigid:eflags");
if (orientflag) memory->grow(orient,nmax,orientflag,"rigid:orient");
if (dorientflag) memory->grow(dorient,nmax,3,"rigid:dorient");
}
// check for regrow of vatom
// must be done whether per-atom virial is accumulated on this step or not
// b/c this is only time grow_array() may be called
// need to regrow b/c vatom is calculated before and after atom migration
if (nmax > maxvatom) {
maxvatom = atom->nmax;
memory->grow(vatom,maxvatom,6,"fix:vatom");
}
}
/* ----------------------------------------------------------------------
copy values within local atom-based arrays
------------------------------------------------------------------------- */
void FixRigid::copy_arrays(int i, int j, int /*delflag*/)
{
body[j] = body[i];
xcmimage[j] = xcmimage[i];
displace[j][0] = displace[i][0];
displace[j][1] = displace[i][1];
displace[j][2] = displace[i][2];
if (extended) {
eflags[j] = eflags[i];
for (int k = 0; k < orientflag; k++)
orient[j][k] = orient[i][k];
if (dorientflag) {
dorient[j][0] = dorient[i][0];
dorient[j][1] = dorient[i][1];
dorient[j][2] = dorient[i][2];
}
}
// must also copy vatom if per-atom virial calculated on this timestep
// since vatom is calculated before and after atom migration
if (vflag_atom)
for (int k = 0; k < 6; k++)
vatom[j][k] = vatom[i][k];
}
/* ----------------------------------------------------------------------
initialize one atom's array values, called when atom is created
------------------------------------------------------------------------- */
void FixRigid::set_arrays(int i)
{
body[i] = -1;
xcmimage[i] = 0;
displace[i][0] = 0.0;
displace[i][1] = 0.0;
displace[i][2] = 0.0;
// must also zero vatom if per-atom virial calculated on this timestep
// since vatom is calculated before and after atom migration
if (vflag_atom)
for (int k = 0; k < 6; k++)
vatom[i][k] = 0.0;
}
/* ----------------------------------------------------------------------
pack values in local atom-based arrays for exchange with another proc
------------------------------------------------------------------------- */
int FixRigid::pack_exchange(int i, double *buf)
{
buf[0] = ubuf(body[i]).d;
buf[1] = ubuf(xcmimage[i]).d;
buf[2] = displace[i][0];
buf[3] = displace[i][1];
buf[4] = displace[i][2];
if (!extended) return 5;
int m = 5;
buf[m++] = eflags[i];
for (int j = 0; j < orientflag; j++)
buf[m++] = orient[i][j];
if (dorientflag) {
buf[m++] = dorient[i][0];
buf[m++] = dorient[i][1];
buf[m++] = dorient[i][2];
}
// must also pack vatom if per-atom virial calculated on this timestep
// since vatom is calculated before and after atom migration
if (vflag_atom)
for (int k = 0; k < 6; k++)
buf[m++] = vatom[i][k];
return m;
}
/* ----------------------------------------------------------------------
unpack values in local atom-based arrays from exchange with another proc
------------------------------------------------------------------------- */
int FixRigid::unpack_exchange(int nlocal, double *buf)
{
body[nlocal] = (int) ubuf(buf[0]).i;
xcmimage[nlocal] = (imageint) ubuf(buf[1]).i;
displace[nlocal][0] = buf[2];
displace[nlocal][1] = buf[3];
displace[nlocal][2] = buf[4];
if (!extended) return 5;
int m = 5;
eflags[nlocal] = static_cast<int> (buf[m++]);
for (int j = 0; j < orientflag; j++)
orient[nlocal][j] = buf[m++];
if (dorientflag) {
dorient[nlocal][0] = buf[m++];
dorient[nlocal][1] = buf[m++];
dorient[nlocal][2] = buf[m++];
}
// must also unpack vatom if per-atom virial calculated on this timestep
// since vatom is calculated before and after atom migration
if (vflag_atom)
for (int k = 0; k < 6; k++)
vatom[nlocal][k] = buf[m++];
return m;
}
/* ---------------------------------------------------------------------- */
void FixRigid::reset_dt()
{
dtv = update->dt;
dtf = 0.5 * update->dt * force->ftm2v;
dtq = 0.5 * update->dt;
}
/* ----------------------------------------------------------------------
zero linear momentum of each rigid body
set Vcm to 0.0, then reset velocities of particles via set_v()
------------------------------------------------------------------------- */
void FixRigid::zero_momentum()
{
for (int ibody = 0; ibody < nbody; ibody++)
vcm[ibody][0] = vcm[ibody][1] = vcm[ibody][2] = 0.0;
evflag = 0;
set_v();
}
/* ----------------------------------------------------------------------
zero angular momentum of each rigid body
set angmom/omega to 0.0, then reset velocities of particles via set_v()
------------------------------------------------------------------------- */
void FixRigid::zero_rotation()
{
for (int ibody = 0; ibody < nbody; ibody++) {
angmom[ibody][0] = angmom[ibody][1] = angmom[ibody][2] = 0.0;
omega[ibody][0] = omega[ibody][1] = omega[ibody][2] = 0.0;
}
evflag = 0;
set_v();
}
/* ---------------------------------------------------------------------- */
int FixRigid::modify_param(int narg, char **arg)
{
if (strcmp(arg[0],"bodyforces") == 0) {
if (narg < 2) error->all(FLERR,"Illegal fix_modify command");
if (strcmp(arg[1],"early") == 0) earlyflag = 1;
else if (strcmp(arg[1],"late") == 0) earlyflag = 0;
else error->all(FLERR,"Illegal fix_modify command");
// reset fix mask
// must do here and not in init,
// since modify.cpp::init() uses fix masks before calling fix::init()
for (int i = 0; i < modify->nfix; i++)
if (strcmp(modify->fix[i]->id,id) == 0) {
if (earlyflag) modify->fmask[i] |= POST_FORCE;
else if (!langflag) modify->fmask[i] &= ~POST_FORCE;
break;
}
return 2;
}
return 0;
}
/* ----------------------------------------------------------------------
return temperature of collection of rigid bodies
non-active DOF are removed by fflag/tflag and in tfactor
------------------------------------------------------------------------- */
double FixRigid::compute_scalar()
{
double wbody[3],rot[3][3];
double t = 0.0;
for (int i = 0; i < nbody; i++) {
t += masstotal[i] * (fflag[i][0]*vcm[i][0]*vcm[i][0] +
fflag[i][1]*vcm[i][1]*vcm[i][1] +
fflag[i][2]*vcm[i][2]*vcm[i][2]);
// wbody = angular velocity in body frame
MathExtra::quat_to_mat(quat[i],rot);
MathExtra::transpose_matvec(rot,angmom[i],wbody);
if (inertia[i][0] == 0.0) wbody[0] = 0.0;
else wbody[0] /= inertia[i][0];
if (inertia[i][1] == 0.0) wbody[1] = 0.0;
else wbody[1] /= inertia[i][1];
if (inertia[i][2] == 0.0) wbody[2] = 0.0;
else wbody[2] /= inertia[i][2];
t += tflag[i][0]*inertia[i][0]*wbody[0]*wbody[0] +
tflag[i][1]*inertia[i][1]*wbody[1]*wbody[1] +
tflag[i][2]*inertia[i][2]*wbody[2]*wbody[2];
}
t *= tfactor;
return t;
}
/* ---------------------------------------------------------------------- */
void *FixRigid::extract(const char *str, int &dim)
{
if (strcmp(str,"body") == 0) {
dim = 1;
return body;
}
if (strcmp(str,"masstotal") == 0) {
dim = 1;
return masstotal;
}
if (strcmp(str,"t_target") == 0) {
dim = 0;
return &t_target;
}
return NULL;
}
/* ----------------------------------------------------------------------
return translational KE for all rigid bodies
KE = 1/2 M Vcm^2
------------------------------------------------------------------------- */
double FixRigid::extract_ke()
{
double ke = 0.0;
for (int i = 0; i < nbody; i++)
ke += masstotal[i] *
(vcm[i][0]*vcm[i][0] + vcm[i][1]*vcm[i][1] + vcm[i][2]*vcm[i][2]);
return 0.5*ke;
}
/* ----------------------------------------------------------------------
return rotational KE for all rigid bodies
Erotational = 1/2 I wbody^2
------------------------------------------------------------------------- */
double FixRigid::extract_erotational()
{
double wbody[3],rot[3][3];
double erotate = 0.0;
for (int i = 0; i < nbody; i++) {
// wbody = angular velocity in body frame
MathExtra::quat_to_mat(quat[i],rot);
MathExtra::transpose_matvec(rot,angmom[i],wbody);
if (inertia[i][0] == 0.0) wbody[0] = 0.0;
else wbody[0] /= inertia[i][0];
if (inertia[i][1] == 0.0) wbody[1] = 0.0;
else wbody[1] /= inertia[i][1];
if (inertia[i][2] == 0.0) wbody[2] = 0.0;
else wbody[2] /= inertia[i][2];
erotate += inertia[i][0]*wbody[0]*wbody[0] +
inertia[i][1]*wbody[1]*wbody[1] + inertia[i][2]*wbody[2]*wbody[2];
}
return 0.5*erotate;
}
/* ----------------------------------------------------------------------
return attributes of a rigid body
15 values per body
xcm = 0,1,2; vcm = 3,4,5; fcm = 6,7,8; torque = 9,10,11; image = 12,13,14
------------------------------------------------------------------------- */
double FixRigid::compute_array(int i, int j)
{
if (j < 3) return xcm[i][j];
if (j < 6) return vcm[i][j-3];
if (j < 9) return fcm[i][j-6];
if (j < 12) return torque[i][j-9];
if (j == 12) return (imagebody[i] & IMGMASK) - IMGMAX;
if (j == 13) return (imagebody[i] >> IMGBITS & IMGMASK) - IMGMAX;
return (imagebody[i] >> IMG2BITS) - IMGMAX;
}<|fim▁end|>
| |
<|file_name|>CreditTest.java<|end_file_name|><|fim▁begin|>package unit.com.bitdubai.fermat_dmp_plugin.layer.basic_wallet.bitcoin_wallet.developer.bitdubai.version_1.structure.BitcoinWalletBasicWalletAvailableBalance;
import com.bitdubai.fermat_api.layer.dmp_basic_wallet.common.exceptions.CantRegisterCreditException;
import com.bitdubai.fermat_api.layer.dmp_basic_wallet.bitcoin_wallet.interfaces.BitcoinWalletTransactionRecord;
import com.bitdubai.fermat_api.layer.osa_android.database_system.Database;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTable;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTableRecord;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTransaction;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantLoadTableToMemoryException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantOpenDatabaseException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.DatabaseNotFoundException;
import com.bitdubai.fermat_dmp_plugin.layer.basic_wallet.bitcoin_wallet.developer.bitdubai.version_1.structure.BitcoinWalletBasicWalletAvailableBalance;
import com.bitdubai.fermat_dmp_plugin.layer.basic_wallet.bitcoin_wallet.developer.bitdubai.version_1.structure.BitcoinWalletDatabaseConstants;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.ArrayList;
import java.util.List;
import unit.com.bitdubai.fermat_dmp_plugin.layer.basic_wallet.bitcoin_wallet.developer.bitdubai.version_1.structure.mocks.MockBitcoinWalletTransactionRecord;
import unit.com.bitdubai.fermat_dmp_plugin.layer.basic_wallet.bitcoin_wallet.developer.bitdubai.version_1.structure.mocks.MockDatabaseTableRecord;
import static com.googlecode.catchexception.CatchException.catchException;
import static com.googlecode.catchexception.CatchException.caughtException;
import static org.fest.assertions.api.Assertions.*;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.when;
/**
* Created by jorgegonzalez on 2015.07.14..
*/
@RunWith(MockitoJUnitRunner.class)
public class CreditTest {
@Mock
private Database mockDatabase;
@Mock
private DatabaseTable mockWalletTable;
@Mock
private DatabaseTable mockBalanceTable;
@Mock
private DatabaseTransaction mockTransaction;
private List<DatabaseTableRecord> mockRecords;
private DatabaseTableRecord mockBalanceRecord;
private DatabaseTableRecord mockWalletRecord;
private BitcoinWalletTransactionRecord mockTransactionRecord;
private BitcoinWalletBasicWalletAvailableBalance testBalance;
@Before
public void setUpMocks(){
mockTransactionRecord = new MockBitcoinWalletTransactionRecord();
mockBalanceRecord = new MockDatabaseTableRecord();
mockWalletRecord = new MockDatabaseTableRecord();<|fim▁hole|> }
public void setUpMockitoRules(){
when(mockDatabase.getTable(BitcoinWalletDatabaseConstants.BITCOIN_WALLET_TABLE_NAME)).thenReturn(mockWalletTable);
when(mockDatabase.getTable(BitcoinWalletDatabaseConstants.BITCOIN_WALLET_BALANCE_TABLE_NAME)).thenReturn(mockBalanceTable);
when(mockBalanceTable.getRecords()).thenReturn(mockRecords);
when(mockWalletTable.getEmptyRecord()).thenReturn(mockWalletRecord);
when(mockDatabase.newTransaction()).thenReturn(mockTransaction);
}
@Before
public void setUpAvailableBalance(){
testBalance = new BitcoinWalletBasicWalletAvailableBalance(mockDatabase);
}
@Test
public void Credit_SuccesfullyInvoked_ReturnsAvailableBalance() throws Exception{
catchException(testBalance).credit(mockTransactionRecord);
assertThat(caughtException()).isNull();
}
@Test
public void Credit_OpenDatabaseCantOpenDatabase_ThrowsCantRegisterCreditException() throws Exception{
doThrow(new CantOpenDatabaseException("MOCK", null, null, null)).when(mockDatabase).openDatabase();
catchException(testBalance).credit(mockTransactionRecord);
assertThat(caughtException())
.isNotNull()
.isInstanceOf(CantRegisterCreditException.class);
}
@Test
public void Credit_OpenDatabaseDatabaseNotFound_ThrowsCantRegisterCreditException() throws Exception{
doThrow(new DatabaseNotFoundException("MOCK", null, null, null)).when(mockDatabase).openDatabase();
catchException(testBalance).credit(mockTransactionRecord);
assertThat(caughtException())
.isNotNull()
.isInstanceOf(CantRegisterCreditException.class);
}
@Test
public void Credit_DaoCantCalculateBalanceException_ThrowsCantRegisterCreditException() throws Exception{
doThrow(new CantLoadTableToMemoryException("MOCK", null, null, null)).when(mockWalletTable).loadToMemory();
catchException(testBalance).credit(mockTransactionRecord);
assertThat(caughtException())
.isNotNull()
.isInstanceOf(CantRegisterCreditException.class);
}
@Test
public void Credit_GeneralException_ThrowsCantRegisterCreditException() throws Exception{
when(mockBalanceTable.getRecords()).thenReturn(null);
catchException(testBalance).credit(mockTransactionRecord);
assertThat(caughtException())
.isNotNull()
.isInstanceOf(CantRegisterCreditException.class);
}
}<|fim▁end|>
|
mockRecords = new ArrayList<>();
mockRecords.add(mockBalanceRecord);
setUpMockitoRules();
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
from front.views import *
from front.views import views as front_views
from django.views.decorators.csrf import csrf_exempt
if not settings.DEBUG:
s = {'SSL': settings.ENABLE_SSL}
else:
s = {}
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'DisSoNet.views.home', name='home'),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^test/', front_views.test, name='test'),
url(r'^privacy/', front_views.privacy, name='privacy'),
url(r'^stream_debug/', front_views.stream_debug, name='stream'),
url(r'^admin/', include(admin.site.urls)),
url(r'^github/setup/', GitHubView.as_view(), s, name='initGithub'),
url(r'^accounts/login/', LoginView.as_view(), s, name='login'),
url(r'^accounts/logout/', LogoutView.as_view(), s, name='logout'),
url(r'^accounts/view/', UserView.as_view(), s, name='user_view'),
url(r'^accounts/register/', RegisterView.as_view(), s, name='register'),
url(r'^accounts/reset/$', front_views.reset, s, name='reset'),
url(r'^accounts/reset/e/(?P<email>[\w-]+)/$', front_views.reset,
s, name='reset'),
url(r'^accounts/reset/done/$', front_views.reset_done,
s, name='reset_done'),
url(r'^accounts/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>[\w-]+)/$',
front_views.reset_confirm, s, name='reset_confirm'),
url(r'^accounts/reset/complete/$', front_views.reset_complete,
name='reset_complete'),
<|fim▁hole|> url(r'^post/(?P<post_id>[\w-]+)/?$', PostResource.as_view(), name='post_resource'),
url(r'^posts/(?P<post_id>[\w-]+)/?$', PostResource.as_view(), name='post_resource'),
# urls for post(s)/<post_id>/comments/
url(r'^post/(?P<post_id>[\w-]+)/comments/?$',
csrf_exempt(PostComments.as_view()), name='post_comments'),
url(r'^posts/(?P<post_id>[\w-]+)/comments/?$',
csrf_exempt(PostComments.as_view()), name='post_comments'),
url(r'^author/posts/?$', AuthorStream.as_view(), name='author_posts'),
url(r'^author/(?P<author_id>[\w-]+)/posts/?$', VisiblePostToUser.as_view(),
name='visibile_posts'),
url(r'^author/(?P<author_id>[\w-]+)/?$', AuthorProfile.as_view(),
name='author_profile'),
url(r'^friendrequest/$', csrf_exempt(FriendRequestView.as_view()),
name='friend_request'),
url(r'^friends/(?P<user_id_1>[\w-]+)/(?P<user_id_2>[\w-]+)/$',
AreFriends.as_view(), name='are_friends'),
url(r'^friends/?', FriendsView.as_view(),
s, name='friends_view'),
url(r'^test_rest/(?P<id>[\w-]+)/?$', front_views.test_rest, name="test_rest"),
)<|fim▁end|>
|
# urls for post(s)/
url(r'^post/?$', PublicPosts.as_view(), name='public_posts'),
url(r'^posts/?$', PublicPosts.as_view(), name='public_posts'),
# urls for post(s)/<post_id>/
|
<|file_name|>logger.rs<|end_file_name|><|fim▁begin|>extern crate env_logger;
extern crate log_panics;
extern crate log;
#[cfg(target_os = "android")]
extern crate android_logger;
use self::env_logger::Builder as EnvLoggerBuilder;
use self::log::{LevelFilter, Level};
use std::env;
use std::io::Write;
#[cfg(target_os = "android")]
use self::android_logger::Filter;
use log::{Record, Metadata};
use libc::{c_void, c_char};
use std::ffi::CString;
use std::ptr;
use indy_api_types::errors::prelude::*;
use indy_utils::ctypes;
use indy_api_types::errors::IndyErrorKind::InvalidStructure;
pub static mut LOGGER_STATE: LoggerState = LoggerState::Default;
pub enum LoggerState {
Default,
Custom
}
impl LoggerState {
pub fn get(&self) -> (*const c_void, Option<EnabledCB>, Option<LogCB>, Option<FlushCB>) {
match self {
LoggerState::Default => (ptr::null(), Some(LibindyDefaultLogger::enabled), Some(LibindyDefaultLogger::log), Some(LibindyDefaultLogger::flush)),
LoggerState::Custom => unsafe { (CONTEXT, ENABLED_CB, LOG_CB, FLUSH_CB) },
}
}
}
pub type EnabledCB = extern fn(context: *const c_void,
level: u32,
target: *const c_char) -> bool;
pub type LogCB = extern fn(context: *const c_void,
level: u32,
target: *const c_char,
message: *const c_char,
module_path: *const c_char,
file: *const c_char,
line: u32);
pub type FlushCB = extern fn(context: *const c_void);
static mut CONTEXT: *const c_void = ptr::null();
static mut ENABLED_CB: Option<EnabledCB> = None;
static mut LOG_CB: Option<LogCB> = None;
static mut FLUSH_CB: Option<FlushCB> = None;
#[cfg(debug_assertions)]
const DEFAULT_MAX_LEVEL: LevelFilter = LevelFilter::Trace;
#[cfg(not(debug_assertions))]
const DEFAULT_MAX_LEVEL: LevelFilter = LevelFilter::Info;
pub struct LibindyLogger {
context: *const c_void,
enabled: Option<EnabledCB>,
log: LogCB,
flush: Option<FlushCB>,
}
impl LibindyLogger {
fn new(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>) -> Self {
LibindyLogger { context, enabled, log, flush }
}
}
impl log::Log for LibindyLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
if let Some(enabled_cb) = self.enabled {
let level = metadata.level() as u32;
let target = CString::new(metadata.target()).unwrap();
enabled_cb(self.context,
level,
target.as_ptr(),
)
} else { true }
}
fn log(&self, record: &Record) {
let log_cb = self.log;
let level = record.level() as u32;
let target = CString::new(record.target()).unwrap();
let message = CString::new(record.args().to_string()).unwrap();
let module_path = record.module_path().map(|a| CString::new(a).unwrap());
let file = record.file().map(|a| CString::new(a).unwrap());
let line = record.line().unwrap_or(0);
log_cb(self.context,
level,
target.as_ptr(),
message.as_ptr(),
module_path.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()),
file.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()),
line,
)
}
fn flush(&self) {
if let Some(flush_cb) = self.flush {
flush_cb(self.context)
}
}
}
unsafe impl Sync for LibindyLogger {}
unsafe impl Send for LibindyLogger {}
impl LibindyLogger {
pub fn init(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>, max_lvl: Option<u32>) -> Result<(), IndyError> {
let logger = LibindyLogger::new(context, enabled, log, flush);
log::set_boxed_logger(Box::new(logger))?;
let max_lvl = match max_lvl {
Some(max_lvl) => LibindyLogger::map_u32_lvl_to_filter(max_lvl)?,
None => DEFAULT_MAX_LEVEL,
};
log::set_max_level(max_lvl);
unsafe {
LOGGER_STATE = LoggerState::Custom;
CONTEXT = context;
ENABLED_CB = enabled;
LOG_CB = Some(log);
FLUSH_CB = flush
};
Ok(())
}
<|fim▁hole|> fn map_u32_lvl_to_filter(max_level: u32) -> IndyResult<LevelFilter> {
let max_level = match max_level {
0 => LevelFilter::Off,
1 => LevelFilter::Error,
2 => LevelFilter::Warn,
3 => LevelFilter::Info,
4 => LevelFilter::Debug,
5 => LevelFilter::Trace,
_ => return Err(IndyError::from(InvalidStructure)),
};
Ok(max_level)
}
pub fn set_max_level(max_level: u32) -> IndyResult<LevelFilter> {
let max_level_filter = LibindyLogger::map_u32_lvl_to_filter(max_level)?;
log::set_max_level(max_level_filter);
Ok(max_level_filter)
}
}
pub struct LibindyDefaultLogger;
impl LibindyDefaultLogger {
pub fn init(pattern: Option<String>) -> Result<(), IndyError> {
let pattern = pattern.or_else(|| env::var("RUST_LOG").ok());
log_panics::init(); //Logging of panics is essential for android. As android does not log to stdout for native code
if cfg!(target_os = "android") {
#[cfg(target_os = "android")]
let log_filter = match pattern {
Some(val) => match val.to_lowercase().as_ref() {
"error" => Filter::default().with_min_level(log::Level::Error),
"warn" => Filter::default().with_min_level(log::Level::Warn),
"info" => Filter::default().with_min_level(log::Level::Info),
"debug" => Filter::default().with_min_level(log::Level::Debug),
"trace" => Filter::default().with_min_level(log::Level::Trace),
_ => Filter::default().with_min_level(log::Level::Error),
},
None => Filter::default().with_min_level(log::Level::Error)
};
//Set logging to off when deploying production android app.
#[cfg(target_os = "android")]
android_logger::init_once(log_filter);
info!("Logging for Android");
} else {
EnvLoggerBuilder::new()
.format(|buf, record| writeln!(buf, "{:>5}|{:<30}|{:>35}:{:<4}| {}", record.level(), record.target(), record.file().get_or_insert(""), record.line().get_or_insert(0), record.args()))
.filter(None, LevelFilter::Off)
.parse_filters(pattern.as_ref().map(String::as_str).unwrap_or(""))
.try_init()?;
}
unsafe { LOGGER_STATE = LoggerState::Default };
Ok(())
}
extern fn enabled(_context: *const c_void,
level: u32,
target: *const c_char) -> bool {
let level = get_level(level);
let target = ctypes::c_str_to_string(target).unwrap().unwrap();
let metadata: Metadata = Metadata::builder()
.level(level)
.target(&target)
.build();
log::logger().enabled(&metadata)
}
extern fn log(_context: *const c_void,
level: u32,
target: *const c_char,
args: *const c_char,
module_path: *const c_char,
file: *const c_char,
line: u32) {
let target = ctypes::c_str_to_string(target).unwrap().unwrap();
let args = ctypes::c_str_to_string(args).unwrap().unwrap();
let module_path = ctypes::c_str_to_string(module_path).unwrap();
let file = ctypes::c_str_to_string(file).unwrap();
let level = get_level(level);
log::logger().log(
&Record::builder()
.args(format_args!("{}", args))
.level(level)
.target(&target)
.module_path(module_path)
.file(file)
.line(Some(line))
.build(),
);
}
extern fn flush(_context: *const c_void) {
log::logger().flush()
}
}
fn get_level(level: u32) -> Level {
match level {
1 => Level::Error,
2 => Level::Warn,
3 => Level::Info,
4 => Level::Debug,
5 => Level::Trace,
_ => unreachable!(),
}
}
#[macro_export]
macro_rules! try_log {
($expr:expr) => (match $expr {
Ok(val) => val,
Err(err) => {
error!("try_log! | {}", err);
return Err(From::from(err))
}
})
}
macro_rules! _map_err {
($lvl:expr, $expr:expr) => (
|err| {
log!($lvl, "{} - {}", $expr, err);
err
}
);
($lvl:expr) => (
|err| {
log!($lvl, "{}", err);
err
}
)
}
#[macro_export]
macro_rules! map_err_err {
() => ( _map_err!(::log::Level::Error) );
($($arg:tt)*) => ( _map_err!(::log::Level::Error, $($arg)*) )
}
#[macro_export]
macro_rules! map_err_trace {
() => ( _map_err!(::log::Level::Trace) );
($($arg:tt)*) => ( _map_err!(::log::Level::Trace, $($arg)*) )
}
#[macro_export]
macro_rules! map_err_info {
() => ( _map_err!(::log::Level::Info) );
($($arg:tt)*) => ( _map_err!(::log::Level::Info, $($arg)*) )
}
#[cfg(debug_assertions)]
#[macro_export]
macro_rules! secret {
($val:expr) => {{ $val }};
}
#[cfg(not(debug_assertions))]
#[macro_export]
macro_rules! secret {
($val:expr) => {{ "_" }};
}<|fim▁end|>
| |
<|file_name|>fingo.carousel.js<|end_file_name|><|fim▁begin|>/*! fingoCarousel.js © heoyunjee, 2016 */
function(global, $){
'use strict';
/**
* width: carousel width
* height: carousel height
* margin: tabpanel margin
* count: how many tabpanels will move when you click button
* col: how many columns in carousel mask
* row: how many rows in carousel mask
* infinite: infinite carousel or not(true or false)
* index: index of active tabpanel
*/
// Default Options
var defaults = {
'width': 1240,
'height': 390,
'margin': 0,
'count': 1,
'col': 1,
'row': 1,
'infinite': false,
'index': 0
};
// Constructor Function
var Carousel = function(widget, options) {
// Public
this.$widget = $(widget);
this.settings = $.extend({}, defaults, options);
this.carousel_infinite = false;
this.carousel_row = 0;
this.carousel_width = 0;
this.carousel_height = 0;
this.carousel_count = 0;
this.carousel_col = 0;
this.carousel_content_margin = 0;
this.active_index = 0;
this.carousel_one_tab = 0;
this.carousel_content_width = 0;
this.carousel_content_height= 0;
this.$carousel = null;
this.$carousel_headline = null;
this.$carousel_tablist = null;
this.$carousel_tabs = null;
this.$carousel_button_group = null;
this.$carousel_mask = null;
this.$carousel_tabpanels = null;
this.$carousel_tabpanel_imgs = null;
this.$carousel_tabpanel_content_videos = null;
this.start_tabpanel_index = 0;
// 초기 설정
this.init();
// 이벤트 연결
this.events();
};
// Prototype Object
Carousel.prototype = {
'init': function() {
var $this = this;
var $widget = this.$widget;
// 캐러셀 내부 구성 요소 참조
this.$carousel = $widget;
this.$carousel_headline = this.$carousel.children(':header:first');
this.$carousel_tablist = this.$carousel.children('ul').wrap('<div/>').parent();
this.$carousel_tabs = this.$carousel_tablist.find('a');
this.$carousel_tabpanels = this.$carousel.children().find('figure');
this.$carousel_content = this.$carousel_tabpanels.children().parent();
this.$carousel_tabpanel_imgs = this.$carousel.children().last().find('img').not('.icon');
this.$carousel_tabpanel_content_videos = this.$carousel.children().last().find('iframe');
this.setResponsive();
this.carousel_width = this.settings.width;
this.carousel_height = this.settings.height;
this.carousel_infinite = this.settings.infinite;
this.carousel_row = this.settings.row;
this.carousel_count = this.settings.count;
this.carousel_col = this.settings.col;
this.carousel_content_margin = this.settings.margin;
this.start_tabpanel_index = this.settings.index;
// 동적으로 캐러셀 구조 생성/추가
this.createPrevNextButtons();
this.createCarouselMask();
// 역할별 스타일링 되는 클래스 설정
this.settingClass();
this.settingSliding();
},
'createPrevNextButtons': function() {
var button_group = ['<div>',
'<button type="button"></button>',
'<button type="button"></button>',
'</div>'].join('');
this.$carousel_button_group = $(button_group).insertAfter( this.$carousel_tablist );
},
'createCarouselMask': function() {
this.$carousel_tabpanels.parent().closest('div').wrap('<div/>');
this.$carousel_mask = this.$carousel.children().last();
},
'settingClass': function() {
this.$carousel.addClass('ui-carousel');
this.$carousel_headline.addClass('ui-carousel-headline');
this.$carousel_tablist.addClass('ui-carousel-tablist');
this.$carousel_tabs.addClass('ui-carousel-tab');
this.$carousel_button_group.addClass('ui-carousel-button-group');
this.$carousel_button_group.children().first().addClass('ui-carousel-prev-button');
this.$carousel_button_group.children().last().addClass('ui-carousel-next-button');
this.$carousel_tabpanels.addClass('ui-carousel-tabpanel');
this.$carousel_tabpanels.parent().closest('div').addClass('ui-carousel-tabpanel-wrapper');
this.$carousel_mask.addClass('ui-carousel-mask');
this.$carousel_tabpanel_imgs.addClass('ui-carousel-image');
this.$carousel_tabpanel_content_videos.addClass('ui-carousel-video');
if(this.carousel_row === 2) {
var j = 1;
var j2 = 1;
for(var i = 0, l = this.$carousel_tabpanels.length; i < l; i++) {
if(i%2===1){
this.$carousel_tabpanels.eq(i).addClass('top-2');
this.$carousel_tabpanels.eq(i).addClass('left-' + j);
j++;
} else {
this.$carousel_tabpanels.eq(i).addClass('top-1');
this.$carousel_tabpanels.eq(i).addClass('left-' + j2);
j2++;
}
}
}
},
'settingSliding': function() {
var $carousel = this.$carousel;
var $tabpanel = this.$carousel_tabpanels;
var $tabpanel_wrapper = $tabpanel.parent();<|fim▁hole|>
this.carousel_content_width = this.$carousel_tabpanel_imgs.eq(0).width();
// carousel 높이 설정
$carousel.height(this.carousel_height);
// Set carousel tabpanel(div or img) size and margin
if(this.settings.col === 1) {
$tabpanel.width($carousel.width());
} else {
$tabpanel
.width(this.carousel_content_width)
.css('margin-right', this.carousel_content_margin);
}
// Set carousel tabpanel wrapper width
$tabpanel_wrapper.width(($tabpanel.width() + this.carousel_content_margin) * ($tabpanel.length + 1));
// Set carousel one tab mask width
this.carousel_one_tab = ($tabpanel.width() + this.carousel_content_margin) * this.carousel_count;
if(this.start_tabpanel_index !== 0) {
for(var i = 0, l = this.start_tabpanel_index + 1; i < l; i++) {
this.$carousel_tabpanels.last().parent().prepend(this.$carousel_tabpanels.eq($tabpanel.length - (i + 1)));
}
}
// Carousel 상태 초기화
if(this.carousel_infinite === true) {
// tabpanel active 상태 초기화
this.$carousel_tabpanels.eq(this.active_index).radioClass('active');
// tabpanel wrapper 위치 초기화
$tabpanel_wrapper.css('left', -this.carousel_one_tab);
} else if(this.carousel_col !== 1){
// Infinite Carousel이 아닐때
// prevBtn 비활성화
this.prevBtnDisable();
}
// 인디케이터 active 상태 초기화
this.$carousel_tabs.eq(this.active_index).parent().radioClass('active');
},
'prevBtnActive': function() {
this.$carousel.find('.ui-carousel-prev-button')
.attr('aria-disabled', 'false')
.css({'opacity': 1, 'display': 'block'});
},
'prevBtnDisable': function() {
this.$carousel.find('.ui-carousel-prev-button')
.attr('aria-disabled', 'true')
.css({'opacity': 0, 'display': 'none'});
},
'events': function() {
var widget = this;
var $carousel = widget.$carousel;
var $tabs = widget.$carousel_tabs;
var $buttons = widget.$carousel_button_group.children();
// buttons event
$buttons.on('click', function() {
if ( this.className === 'ui-carousel-prev-button' ) {
widget.prevPanel();
} else {
widget.nextPanel();
}
});
// tabs event
$.each($tabs, function(index) {
var $tab = $tabs.eq(index);
$tab.on('click', $.proxy(widget.viewTabpanel, widget, index, null));
});
},
'setActiveIndex': function(index) {
// 활성화된 인덱스를 사용자가 클릭한 인덱스로 변경
this.active_index = index;
// tab 최대 개수
var carousel_tabs_max = (this.$carousel_tabpanels.length / (this.carousel_count * this.carousel_row)) - 1;
// 한 마스크 안에 패널이 다 채워지지 않을 경우
if((this.$carousel_tabpanels.length % (this.carousel_count * this.carousel_row)) !== 0) {
carousel_tabs_max = carousel_tabs_max + 1;
}
// 처음 또는 마지막 인덱스에 해당할 경우 마지막 또는 처음으로 변경하는 조건 처리
if ( this.active_index < 0 ) {
this.active_index = carousel_tabs_max;
}
if ( this.active_index > carousel_tabs_max ) {
this.active_index = 0;
}
return this.active_index;
},
'nextPanel': function() {
if(!this.$carousel_tabpanels.parent().is(':animated')) {
var active_index = this.setActiveIndex(this.active_index + 1);
this.viewTabpanel(active_index, 'next');
}
},
'prevPanel': function() {
if(!this.$carousel_tabpanels.parent().is(':animated')) {
var active_index = this.setActiveIndex(this.active_index - 1);
this.viewTabpanel(active_index, 'prev');
}
},
'viewTabpanel': function(index, btn, e) {
// 사용자가 클릭을 하는 행위가 발생하면 이벤트 객체를 받기 때문에
// 조건 확인을 통해 브라우저의 기본 동작 차단
if (e) { e.preventDefault(); }
this.active_index = index;
var $carousel_wrapper = this.$carousel_tabpanels.eq(index).parent();
var one_width = this.carousel_one_tab;
// Infinite Carousel
if(this.carousel_infinite === true) {
// index에 해당되는 탭패널 활성화
this.$carousel_tabpanels.eq(index).radioClass('active');
// next 버튼 눌렀을때
if(btn === 'next') {
$carousel_wrapper.stop().animate({
'left': -one_width * 2
}, 500, 'easeOutExpo', function() {
$carousel_wrapper.append($carousel_wrapper.children().first());
$carousel_wrapper.css('left', -one_width);
this.animating = false;
});
// prev 버튼 눌렀을때
} else if(btn === 'prev') {
$carousel_wrapper.stop().animate({
'left': 0
}, 500, 'easeOutExpo', function() {
$carousel_wrapper.prepend($carousel_wrapper.children().last());
$carousel_wrapper.css('left', -one_width);
});
}
} else if(this.carousel_infinite === false) {
if(this.carousel_col !== 1) {
if(index === 0) {
this.prevBtnDisable();
} else {
this.prevBtnActive();
}
}
$carousel_wrapper.stop().animate({
'left': index * -this.carousel_one_tab
}, 600, 'easeOutExpo');
}
// 인디케이터 라디오클래스 활성화
this.$carousel_tabs.eq(index).parent().radioClass('active');
},
'setResponsive': function() {
if(global.innerWidth <= 750) {
this.settings.width = this.settings.width.mobile || this.settings.width;
this.settings.height = this.settings.height.mobile || this.settings.height;
this.settings.margin = this.settings.margin.mobile || this.settings.margin;
this.settings.count = this.settings.count.mobile || this.settings.count;
this.settings.col = this.settings.col.mobile || this.settings.col;
this.settings.row = this.settings.row.mobile || this.settings.row;
if(this.settings.infinite.mobile !== undefined) {
this.settings.infinite = this.settings.infinite.mobile;
}
this.settings.index = 0;
} else if(global.innerWidth <= 1024) {
this.settings.width = this.settings.width.tablet || this.settings.width;
this.settings.height = this.settings.height.tablet || this.settings.height;
this.settings.margin = this.settings.margin.tablet || this.settings.margin;
this.settings.count = this.settings.count.tablet || this.settings.count;
this.settings.col = this.settings.col.tablet || this.settings.col;
this.settings.row = this.settings.row.tablet || this.settings.row;
if(this.settings.infinite.tablet !== undefined) {
this.settings.infinite = this.settings.infinite.tablet;
}
this.settings.index = this.settings.index.tablet || this.settings.index;
} else {
this.settings.width = this.settings.width.desktop || this.settings.width;
this.settings.height = this.settings.height.desktop || this.settings.height;
this.settings.margin = this.settings.margin.desktop || this.settings.margin;
this.settings.count = this.settings.count.desktop || this.settings.count;
this.settings.col = this.settings.col.desktop || this.settings.col;
this.settings.row = this.settings.row.desktop || this.settings.row;
if(this.settings.infinite.desktop !== undefined) {
this.settings.infinite = this.settings.infinite.desktop;
}
this.settings.index = this.settings.index.desktop || this.settings.index;
}
}
};
// jQuery Plugin
$.fn.fingoCarousel = function(options){
var $collection = this; // jQuery {}
return $.each($collection, function(idx){
var $this = $collection.eq(idx);
var _instance = new Carousel( this, options ); // 컴포넌트 화
$this.data('fingoCarousel', _instance);
});
};
})(this, this.jQuery);<|fim▁end|>
|
var $carousel_mask = this.$carousel_mask;
var carousel_tabpannel_width = ($carousel_mask.width() - (this.carousel_col - 1) * this.carousel_content_margin) / this.carousel_col;
|
<|file_name|>library.py<|end_file_name|><|fim▁begin|>import os
import re
import gettext
import locale
import threading # libsearchfilter_toggle starts thread libsearchfilter_loop
import operator
import gtk
import gobject
import pango
import ui
import misc
import formatting
import mpdhelper as mpdh
from consts import consts
import breadcrumbs
def library_set_data(album=None, artist=None, genre=None, year=None,
path=None):
if album is not None:
album = unicode(album)
if artist is not None:
artist = unicode(artist)
if genre is not None:
genre = unicode(genre)
if year is not None:
year = unicode(year)
if path is not None:
path = unicode(path)
return (album, artist, genre, year, path)
def library_get_data(data, *args):
name_to_index = {'album': 0, 'artist': 1, 'genre': 2, 'year': 3, 'path': 4}
# Data retrieved from the gtktreeview model is not in
# unicode anymore, so convert it.
retlist = [unicode(data[name_to_index[arg]]) if data[name_to_index[arg]] \
else None for arg in args]
if len(retlist) == 1:
return retlist[0]
else:
return retlist
class Library(object):
def __init__(self, config, mpd, artwork, TAB_LIBRARY, album_filename,
settings_save, filtering_entry_make_red,
filtering_entry_revert_color, filter_key_pressed,
on_add_item, connected, on_library_button_press, new_tab,
get_multicd_album_root_dir):
self.artwork = artwork
self.config = config
self.mpd = mpd
self.librarymenu = None # cyclic dependency, set later
self.album_filename = album_filename
self.settings_save = settings_save
self.filtering_entry_make_red = filtering_entry_make_red
self.filtering_entry_revert_color = filtering_entry_revert_color
self.filter_key_pressed = filter_key_pressed
self.on_add_item = on_add_item
self.connected = connected
self.on_library_button_press = on_library_button_press
self.get_multicd_album_root_dir = get_multicd_album_root_dir
self.NOTAG = _("Untagged")
self.VAstr = _("Various Artists")
self.search_terms = [_('Artist'), _('Title'), _('Album'), _('Genre'),
_('Filename'), _('Everything')]
self.search_terms_mpd = ['artist', 'title', 'album', 'genre', 'file',
'any']
self.libfilterbox_cmd_buf = None
self.libfilterbox_cond = None
self.libfilterbox_source = None
self.prevlibtodo_base = None
self.prevlibtodo_base_results = None
self.prevlibtodo = None
self.save_timeout = None
self.libsearch_last_tooltip = None
self.lib_view_filesystem_cache = None
self.lib_view_artist_cache = None
self.lib_view_genre_cache = None
self.lib_view_album_cache = None
self.lib_list_genres = None
self.lib_list_artists = None
self.lib_list_albums = None
self.lib_list_years = None
self.view_caches_reset()
self.libraryvbox = gtk.VBox()
self.library = ui.treeview()
self.library_selection = self.library.get_selection()
self.breadcrumbs = breadcrumbs.CrumbBox()
self.breadcrumbs.props.spacing = 2
expanderwindow2 = ui.scrollwindow(add=self.library)
self.searchbox = gtk.HBox()
self.searchcombo = ui.combo(items=self.search_terms)
self.searchcombo.set_tooltip_text(_("Search terms"))
self.searchtext = ui.entry()
self.searchtext.set_tooltip_text(_("Search library"))
self.searchbutton = ui.button(img=ui.image(stock=gtk.STOCK_CANCEL),
h=self.searchcombo.size_request()[1])
self.searchbutton.set_no_show_all(True)
self.searchbutton.hide()
self.searchbutton.set_tooltip_text(_("End Search"))
self.libraryview = ui.button(relief=gtk.RELIEF_NONE)
self.libraryview.set_tooltip_text(_("Library browsing view"))
# disabled as breadcrumbs replace this:
# self.searchbox.pack_start(self.libraryview, False, False, 1)
# self.searchbox.pack_start(gtk.VSeparator(), False, False, 2)
self.searchbox.pack_start(ui.label(_("Search:")), False, False, 3)
self.searchbox.pack_start(self.searchtext, True, True, 2)
self.searchbox.pack_start(self.searchcombo, False, False, 2)
self.searchbox.pack_start(self.searchbutton, False, False, 2)
self.libraryvbox.pack_start(self.breadcrumbs, False, False, 2)
self.libraryvbox.pack_start(expanderwindow2, True, True)
self.libraryvbox.pack_start(self.searchbox, False, False, 2)
self.tab = new_tab(self.libraryvbox, gtk.STOCK_HARDDISK, TAB_LIBRARY,
self.library)
# Assign some pixbufs for use in self.library
self.openpb2 = self.library.render_icon(gtk.STOCK_OPEN,
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.harddiskpb2 = self.library.render_icon(gtk.STOCK_HARDDISK,
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.openpb = self.library.render_icon(gtk.STOCK_OPEN,
gtk.ICON_SIZE_MENU)
self.harddiskpb = self.library.render_icon(gtk.STOCK_HARDDISK,
gtk.ICON_SIZE_MENU)
self.albumpb = gtk.gdk.pixbuf_new_from_file_at_size(
album_filename, consts.LIB_COVER_SIZE, consts.LIB_COVER_SIZE)
self.genrepb = self.library.render_icon('gtk-orientation-portrait',
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.artistpb = self.library.render_icon('artist',
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.sonatapb = self.library.render_icon('sonata', gtk.ICON_SIZE_MENU)
# list of the library views: (id, name, icon name, label)
self.VIEWS = [
(consts.VIEW_FILESYSTEM, 'filesystem',
gtk.STOCK_HARDDISK, _("Filesystem")),
(consts.VIEW_ALBUM, 'album',
'album', _("Albums")),
(consts.VIEW_ARTIST, 'artist',
'artist', _("Artists")),
(consts.VIEW_GENRE, 'genre',
gtk.STOCK_ORIENTATION_PORTRAIT, _("Genres")),
]
self.library_view_assign_image()
self.library.connect('row_activated', self.on_library_row_activated)
self.library.connect('button_press_event',
self.on_library_button_press)
self.library.connect('key-press-event', self.on_library_key_press)
self.library.connect('query-tooltip', self.on_library_query_tooltip)
expanderwindow2.connect('scroll-event', self.on_library_scrolled)
self.libraryview.connect('clicked', self.library_view_popup)
self.searchtext.connect('key-press-event',
self.libsearchfilter_key_pressed)
self.searchtext.connect('activate', self.libsearchfilter_on_enter)
self.searchbutton.connect('clicked', self.on_search_end)
self.libfilter_changed_handler = self.searchtext.connect(
'changed', self.libsearchfilter_feed_loop)
searchcombo_changed_handler = self.searchcombo.connect(
'changed', self.on_library_search_combo_change)
# Initialize library data and widget
self.libraryposition = {}
self.libraryselectedpath = {}
self.searchcombo.handler_block(searchcombo_changed_handler)
self.searchcombo.set_active(self.config.last_search_num)
self.searchcombo.handler_unblock(searchcombo_changed_handler)
self.librarydata = gtk.ListStore(gtk.gdk.Pixbuf, gobject.TYPE_PYOBJECT,
str)
self.library.set_model(self.librarydata)
self.library.set_search_column(2)
self.librarycell = gtk.CellRendererText()
self.librarycell.set_property("ellipsize", pango.ELLIPSIZE_END)
self.libraryimg = gtk.CellRendererPixbuf()
self.librarycolumn = gtk.TreeViewColumn()
self.librarycolumn.pack_start(self.libraryimg, False)
self.librarycolumn.pack_start(self.librarycell, True)
self.librarycolumn.set_attributes(self.libraryimg, pixbuf=0)
self.librarycolumn.set_attributes(self.librarycell, markup=2)
self.librarycolumn.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE)
self.library.append_column(self.librarycolumn)
self.library_selection.set_mode(gtk.SELECTION_MULTIPLE)
def get_libraryactions(self):
return [(name + 'view', icon, label,
None, None, self.on_libraryview_chosen)
for _view, name, icon, label in self.VIEWS]
def get_model(self):
return self.librarydata
def get_widgets(self):
return self.libraryvbox
def get_treeview(self):
return self.library
def get_selection(self):
return self.library_selection
def set_librarymenu(self, librarymenu):
self.librarymenu = librarymenu
self.librarymenu.attach_to_widget(self.libraryview, None)
def library_view_popup(self, button):
self.librarymenu.popup(None, None, self.library_view_position_menu, 1,
0, button)
def library_view_position_menu(self, _menu, button):
x, y, _width, height = button.get_allocation()
return (self.config.x + x, self.config.y + y + height, True)
def on_libraryview_chosen(self, action):
if self.search_visible():
self.on_search_end(None)
if action.get_name() == 'filesystemview':
self.config.lib_view = consts.VIEW_FILESYSTEM
elif action.get_name() == 'artistview':
self.config.lib_view = consts.VIEW_ARTIST
elif action.get_name() == 'genreview':
self.config.lib_view = consts.VIEW_GENRE
elif action.get_name() == 'albumview':
self.config.lib_view = consts.VIEW_ALBUM
self.library.grab_focus()
self.library_view_assign_image()
self.libraryposition = {}
self.libraryselectedpath = {}
self.library_browse(self.library_set_data(path="/"))
try:
if len(self.librarydata) > 0:
self.library_selection.unselect_range((0,),
(len(self.librarydata)-1,))
except:
pass
gobject.idle_add(self.library.scroll_to_point, 0, 0)
def library_view_assign_image(self):
_view, _name, icon, label = [v for v in self.VIEWS
if v[0] == self.config.lib_view][0]
self.libraryview.set_image(ui.image(stock=icon))
self.libraryview.set_label(" " + label)
def view_caches_reset(self):
# We should call this on first load and whenever mpd is
# updated.
self.lib_view_filesystem_cache = None
self.lib_view_artist_cache = None
self.lib_view_genre_cache = None
self.lib_view_album_cache = None
self.lib_list_genres = None
self.lib_list_artists = None
self.lib_list_albums = None
self.lib_list_years = None
def on_library_scrolled(self, _widget, _event):
try:
# Use gobject.idle_add so that we can get the visible
# state of the treeview
gobject.idle_add(self._on_library_scrolled)
except:
pass
def _on_library_scrolled(self):
if not self.config.show_covers:
return
# This avoids a warning about a NULL node in get_visible_range
if not self.library.props.visible:
return
vis_range = self.library.get_visible_range()
if vis_range is None:
return
try:
start_row = int(vis_range[0][0])
end_row = int(vis_range[1][0])
except IndexError:
# get_visible_range failed
return
self.artwork.library_artwork_update(self.librarydata, start_row,
end_row, self.albumpb)
def library_browse(self, _widget=None, root=None):
# Populates the library list with entries
if not self.connected():
return
if root is None or (self.config.lib_view == consts.VIEW_FILESYSTEM \
and self.library_get_data(root, 'path') is None):
root = self.library_set_data(path="/")
if self.config.wd is None or (self.config.lib_view == \
consts.VIEW_FILESYSTEM and \
self.library_get_data(self.config.wd,
'path') is None):
self.config.wd = self.library_set_data(path="/")
prev_selection = []
prev_selection_root = False
prev_selection_parent = False
if root == self.config.wd:
# This will happen when the database is updated. So, lets save
# the current selection in order to try to re-select it after
# the update is over.
model, selected = self.library_selection.get_selected_rows()
for path in selected:
if model.get_value(model.get_iter(path), 2) == "/":
prev_selection_root = True
elif model.get_value(model.get_iter(path), 2) == "..":
prev_selection_parent = True
else:
prev_selection.append(model.get_value(model.get_iter(path),
1))
self.libraryposition[self.config.wd] = \
self.library.get_visible_rect()[1]
path_updated = True
else:
path_updated = False
new_level = self.library_get_data_level(root)
curr_level = self.library_get_data_level(self.config.wd)
# The logic below is more consistent with, e.g., thunar.
if new_level > curr_level:
# Save position and row for where we just were if we've
# navigated into a sub-directory:
self.libraryposition[self.config.wd] = \
self.library.get_visible_rect()[1]
model, rows = self.library_selection.get_selected_rows()
if len(rows) > 0:
data = self.librarydata.get_value(
self.librarydata.get_iter(rows[0]), 2)
if not data in ("..", "/"):
self.libraryselectedpath[self.config.wd] = rows[0]
elif (self.config.lib_view == consts.VIEW_FILESYSTEM and \
root != self.config.wd) \
or (self.config.lib_view != consts.VIEW_FILESYSTEM and new_level != \
curr_level):
# If we've navigated to a parent directory, don't save
# anything so that the user will enter that subdirectory
# again at the top position with nothing selected
self.libraryposition[self.config.wd] = 0
self.libraryselectedpath[self.config.wd] = None
# In case sonata is killed or crashes, we'll save the library state
# in 5 seconds (first removing any current settings_save timeouts)
if self.config.wd != root:
try:
gobject.source_remove(self.save_timeout)
except:
pass
self.save_timeout = gobject.timeout_add(5000, self.settings_save)
self.config.wd = root
self.library.freeze_child_notify()
self.librarydata.clear()
# Populate treeview with data:
bd = []
while len(bd) == 0:
if self.config.lib_view == consts.VIEW_FILESYSTEM:
bd = self.library_populate_filesystem_data(
self.library_get_data(self.config.wd, 'path'))
elif self.config.lib_view == consts.VIEW_ALBUM:
album, artist, year = self.library_get_data(self.config.wd,
'album', 'artist',
'year')
if album is not None:
bd = self.library_populate_data(artist=artist, album=album,
year=year)
else:
bd = self.library_populate_toplevel_data(albumview=True)
elif self.config.lib_view == consts.VIEW_ARTIST:
artist, album, year = self.library_get_data(self.config.wd,
'artist', 'album',
'year')
if artist is not None and album is not None:
bd = self.library_populate_data(artist=artist, album=album,
year=year)
elif artist is not None:
bd = self.library_populate_data(artist=artist)<|fim▁hole|> self.config.wd, 'genre', 'artist', 'album', 'year')
if genre is not None and artist is not None and album is \
not None:
bd = self.library_populate_data(genre=genre, artist=artist,
album=album, year=year)
elif genre is not None and artist is not None:
bd = self.library_populate_data(genre=genre, artist=artist)
elif genre is not None:
bd = self.library_populate_data(genre=genre)
else:
bd = self.library_populate_toplevel_data(genreview=True)
if len(bd) == 0:
# Nothing found; go up a level until we reach the top level
# or results are found
last_wd = self.config.wd
self.config.wd = self.library_get_parent()
if self.config.wd == last_wd:
break
for _sort, path in bd:
self.librarydata.append(path)
self.library.thaw_child_notify()
# Scroll back to set view for current dir:
self.library.realize()
gobject.idle_add(self.library_set_view, not path_updated)
if len(prev_selection) > 0 or prev_selection_root or \
prev_selection_parent:
# Retain pre-update selection:
self.library_retain_selection(prev_selection, prev_selection_root,
prev_selection_parent)
# Update library artwork as necessary
self.on_library_scrolled(None, None)
self.update_breadcrumbs()
def update_breadcrumbs(self):
# remove previous buttons
for b in self.breadcrumbs:
self.breadcrumbs.remove(b)
# add the views button first
b = ui.button(text=_(" v "), can_focus=False, relief=gtk.RELIEF_NONE)
b.connect('clicked', self.library_view_popup)
self.breadcrumbs.pack_start(b, False, False)
b.show()
# add the ellipsis explicitly XXX make this unnecessary
b = ui.label("...")
self.breadcrumbs.pack_start(b, False, False)
b.show()
# find info for current view
view, _name, icon, label = [v for v in self.VIEWS
if v[0] == self.config.lib_view][0]
# the first crumb is the root of the current view
crumbs = [(label, icon, None, self.library_set_data(path='/'))]
# rest of the crumbs are specific to the view
if view == consts.VIEW_FILESYSTEM:
path = self.library_get_data(self.config.wd, 'path')
if path and path != '/':
parts = path.split('/')
else:
parts = [] # no crumbs for /
# append a crumb for each part
for i, part in enumerate(parts):
partpath = '/'.join(parts[:i + 1])
target = self.library_set_data(path=partpath)
crumbs.append((part, gtk.STOCK_OPEN, None, target))
else:
if view == consts.VIEW_ALBUM:
# We don't want to show an artist button in album view
keys = 'genre', 'album'
nkeys = 2
else:
keys = 'genre', 'artist', 'album'
nkeys = 3
parts = self.library_get_data(self.config.wd, *keys)
# append a crumb for each part
for i, key, part in zip(range(nkeys), keys, parts):
if part is None:
continue
partdata = dict(zip(keys, parts)[:i + 1])
target = self.library_set_data(**partdata)
pb, icon = None, None
if key == 'album':
# Album artwork, with self.alumbpb as a backup:
artist, album, path = self.library_get_data(self.config.wd,
'artist', 'album', 'path')
cache_data = self.library_set_data(artist=artist,
album=album, path=path)
pb = self.artwork.get_library_artwork_cached_pb(cache_data,
None)
if pb is None:
icon = 'album'
elif key == 'artist':
icon = 'artist'
else:
icon = gtk.STOCK_ORIENTATION_PORTRAIT
crumbs.append((part, icon, pb, target))
# add a button for each crumb
for crumb in crumbs:
text, icon, pb, target = crumb
text = misc.escape_html(text)
if crumb is crumbs[-1]:
text = "<b>%s</b>" % text
label = ui.label(markup=text)
if icon:
image = ui.image(stock=icon)
elif pb:
pb = pb.scale_simple(16, 16, gtk.gdk.INTERP_HYPER)
image = ui.image(pb=pb)
b = breadcrumbs.CrumbButton(image, label)
if crumb is crumbs[-1]:
# FIXME makes the button request minimal space:
# label.props.ellipsize = pango.ELLIPSIZE_END
b.props.active = True
# FIXME why doesn't the tooltip show?
b.set_tooltip_text(label.get_label())
b.connect('toggled', self.library_browse, target)
self.breadcrumbs.pack_start(b, False, False)
b.show_all()
def library_populate_add_parent_rows(self):
return [] # disabled as breadcrumbs replace these
if self.config.lib_view == consts.VIEW_FILESYSTEM:
bd = [('0', [self.harddiskpb, self.library_set_data(path='/'),
'/'])]
bd += [('1', [self.openpb, self.library_set_data(path='..'),
'..'])]
else:
bd = [('0', [self.harddiskpb2, self.library_set_data(path='/'),
'/'])]
bd += [('1', [self.openpb2, self.library_set_data(path='..'),
'..'])]
return bd
def library_populate_filesystem_data(self, path):
# List all dirs/files at path
bd = []
if path == '/' and self.lib_view_filesystem_cache is not None:
# Use cache if possible...
bd = self.lib_view_filesystem_cache
else:
for item in self.mpd.lsinfo(path):
if 'directory' in item:
name = mpdh.get(item, 'directory').split('/')[-1]
data = self.library_set_data(path=mpdh.get(item,
'directory'))
bd += [('d' + unicode(name).lower(), [self.openpb, data,
misc.escape_html(name)])]
elif 'file' in item:
data = self.library_set_data(path=mpdh.get(item, 'file'))
bd += [('f' + unicode(mpdh.get(item, 'file')).lower(),
[self.sonatapb, data,
formatting.parse(self.config.libraryformat, item,
True)])]
bd.sort(key=operator.itemgetter(0))
if path != '/' and len(bd) > 0:
bd = self.library_populate_add_parent_rows() + bd
if path == '/':
self.lib_view_filesystem_cache = bd
return bd
def library_get_toplevel_cache(self, genreview=False, artistview=False,
albumview=False):
if genreview and self.lib_view_genre_cache is not None:
bd = self.lib_view_genre_cache
elif artistview and self.lib_view_artist_cache is not None:
bd = self.lib_view_artist_cache
elif albumview and self.lib_view_album_cache is not None:
bd = self.lib_view_album_cache
else:
return None
# Check if we can update any artwork:
for _sort, info in bd:
pb = info[0]
if pb == self.albumpb:
artist, album, path = self.library_get_data(info[1], 'artist',
'album', 'path')
key = self.library_set_data(path=path, artist=artist,
album=album)
pb2 = self.artwork.get_library_artwork_cached_pb(key, None)
if pb2 is not None:
info[0] = pb2
return bd
def library_populate_toplevel_data(self, genreview=False, artistview=False,
albumview=False):
bd = self.library_get_toplevel_cache(genreview, artistview, albumview)
if bd is not None:
# We have our cached data, woot.
return bd
bd = []
if genreview or artistview:
# Only for artist/genre views, album view is handled differently
# since multiple artists can have the same album name
if genreview:
items = self.library_return_list_items('genre')
pb = self.genrepb
else:
items = self.library_return_list_items('artist')
pb = self.artistpb
if not (self.NOTAG in items):
items.append(self.NOTAG)
for item in items:
if genreview:
playtime, num_songs = self.library_return_count(genre=item)
data = self.library_set_data(genre=item)
else:
playtime, num_songs = self.library_return_count(
artist=item)
data = self.library_set_data(artist=item)
if num_songs > 0:
display = misc.escape_html(item)
display += self.add_display_info(num_songs,
int(playtime) / 60)
bd += [(misc.lower_no_the(item), [pb, data, display])]
elif albumview:
albums = []
untagged_found = False
for item in self.mpd.listallinfo('/'):
if 'file' in item and 'album' in item:
album = mpdh.get(item, 'album')
artist = mpdh.get(item, 'artist', self.NOTAG)
year = mpdh.get(item, 'date', self.NOTAG)
path = self.get_multicd_album_root_dir(
os.path.dirname(mpdh.get(item, 'file')))
data = self.library_set_data(album=album, artist=artist,
year=year, path=path)
albums.append(data)
if album == self.NOTAG:
untagged_found = True
if not untagged_found:
albums.append(self.library_set_data(album=self.NOTAG))
albums = misc.remove_list_duplicates(albums, case=False)
albums = self.list_identify_VA_albums(albums)
for item in albums:
album, artist, year, path = self.library_get_data(item,
'album',
'artist',
'year',
'path')
playtime, num_songs = self.library_return_count(artist=artist,
album=album,
year=year)
if num_songs > 0:
data = self.library_set_data(artist=artist, album=album,
year=year, path=path)
display = misc.escape_html(album)
if artist and year and len(artist) > 0 and len(year) > 0 \
and artist != self.NOTAG and year != self.NOTAG:
display += " <span weight='light'>(%s, %s)</span>" \
% (misc.escape_html(artist),
misc.escape_html(year))
elif artist and len(artist) > 0 and artist != self.NOTAG:
display += " <span weight='light'>(%s)</span>" \
% misc.escape_html(artist)
elif year and len(year) > 0 and year != self.NOTAG:
display += " <span weight='light'>(%s)</span>" \
% misc.escape_html(year)
display += self.add_display_info(num_songs,
int(playtime) / 60)
bd += [(misc.lower_no_the(album), [self.albumpb, data,
display])]
bd.sort(locale.strcoll, key=operator.itemgetter(0))
if genreview:
self.lib_view_genre_cache = bd
elif artistview:
self.lib_view_artist_cache = bd
elif albumview:
self.lib_view_album_cache = bd
return bd
def list_identify_VA_albums(self, albums):
for i in range(len(albums)):
if i + consts.NUM_ARTISTS_FOR_VA - 1 > len(albums)-1:
break
VA = False
for j in range(1, consts.NUM_ARTISTS_FOR_VA):
if unicode(self.library_get_data(albums[i], 'album')).lower() \
!= unicode(self.library_get_data(albums[i + j],
'album')).lower() or \
self.library_get_data(albums[i], 'year') != \
self.library_get_data(albums[i + j], 'year') or \
self.library_get_data(albums[i], 'path') != \
self.library_get_data(albums[i + j], 'path'):
break
if unicode(self.library_get_data(albums[i], 'artist')) == \
unicode(self.library_get_data(albums[i + j], 'artist')):
albums.pop(i + j)
break
if j == consts.NUM_ARTISTS_FOR_VA - 1:
VA = True
if VA:
album, year, path = self.library_get_data(albums[i], 'album',
'year', 'path')
artist = self.VAstr
albums[i] = self.library_set_data(album=album, artist=artist,
year=year, path=path)
j = 1
while i + j <= len(albums) - 1:
if unicode(self.library_get_data(albums[i],
'album')).lower() == \
unicode(self.library_get_data(albums[i + j],
'album')).lower() \
and self.library_get_data(albums[i], 'year') == \
self.library_get_data(albums[i + j], 'year'):
albums.pop(i + j)
else:
break
return albums
def get_VAstr(self):
return self.VAstr
def library_populate_data(self, genre=None, artist=None, album=None,
year=None):
# Create treeview model info
bd = []
if genre is not None and artist is None and album is None:
# Artists within a genre
artists = self.library_return_list_items('artist', genre=genre)
if len(artists) > 0:
if not self.NOTAG in artists:
artists.append(self.NOTAG)
for artist in artists:
playtime, num_songs = self.library_return_count(
genre=genre, artist=artist)
if num_songs > 0:
display = misc.escape_html(artist)
display += self.add_display_info(num_songs,
int(playtime) / 60)
data = self.library_set_data(genre=genre,
artist=artist)
bd += [(misc.lower_no_the(artist),
[self.artistpb, data, display])]
elif artist is not None and album is None:
# Albums/songs within an artist and possibly genre
# Albums first:
if genre is not None:
albums = self.library_return_list_items('album', genre=genre,
artist=artist)
else:
albums = self.library_return_list_items('album', artist=artist)
for album in albums:
if genre is not None:
years = self.library_return_list_items('date', genre=genre,
artist=artist,
album=album)
else:
years = self.library_return_list_items('date',
artist=artist,
album=album)
if not self.NOTAG in years:
years.append(self.NOTAG)
for year in years:
if genre is not None:
playtime, num_songs = self.library_return_count(
genre=genre, artist=artist, album=album, year=year)
if num_songs > 0:
files = self.library_return_list_items(
'file', genre=genre, artist=artist,
album=album, year=year)
path = os.path.dirname(files[0])
data = self.library_set_data(genre=genre,
artist=artist,
album=album,
year=year, path=path)
else:
playtime, num_songs = self.library_return_count(
artist=artist, album=album, year=year)
if num_songs > 0:
files = self.library_return_list_items(
'file', artist=artist, album=album, year=year)
path = os.path.dirname(files[0])
data = self.library_set_data(artist=artist,
album=album,
year=year, path=path)
if num_songs > 0:
cache_data = self.library_set_data(artist=artist,
album=album,
path=path)
display = misc.escape_html(album)
if year and len(year) > 0 and year != self.NOTAG:
display += " <span weight='light'>(%s)</span>" \
% misc.escape_html(year)
display += self.add_display_info(num_songs,
int(playtime) / 60)
ordered_year = year
if ordered_year == self.NOTAG:
ordered_year = '9999'
pb = self.artwork.get_library_artwork_cached_pb(
cache_data, self.albumpb)
bd += [(ordered_year + misc.lower_no_the(album),
[pb, data, display])]
# Now, songs not in albums:
bd += self.library_populate_data_songs(genre, artist, self.NOTAG,
None)
else:
# Songs within an album, artist, year, and possibly genre
bd += self.library_populate_data_songs(genre, artist, album, year)
if len(bd) > 0:
bd = self.library_populate_add_parent_rows() + bd
bd.sort(locale.strcoll, key=operator.itemgetter(0))
return bd
def library_populate_data_songs(self, genre, artist, album, year):
bd = []
if genre is not None:
songs, _playtime, _num_songs = \
self.library_return_search_items(genre=genre, artist=artist,
album=album, year=year)
else:
songs, _playtime, _num_songs = self.library_return_search_items(
artist=artist, album=album, year=year)
for song in songs:
data = self.library_set_data(path=mpdh.get(song, 'file'))
track = mpdh.get(song, 'track', '99', False, 2)
disc = mpdh.get(song, 'disc', '99', False, 2)
try:
bd += [('f' + disc + track + misc.lower_no_the(
mpdh.get(song, 'title')), [self.sonatapb, data,
formatting.parse(
self.config.libraryformat,
song, True)])]
except:
bd += [('f' + disc + track + \
unicode(mpdh.get(song, 'file')).lower(),
[self.sonatapb, data,
formatting.parse(self.config.libraryformat, song,
True)])]
return bd
def library_return_list_items(self, itemtype, genre=None, artist=None,
album=None, year=None, ignore_case=True):
# Returns all items of tag 'itemtype', in alphabetical order,
# using mpd's 'list'. If searchtype is passed, use
# a case insensitive search, via additional 'list'
# queries, since using a single 'list' call will be
# case sensitive.
results = []
searches = self.library_compose_list_count_searchlist(genre, artist,
album, year)
if len(searches) > 0:
for s in searches:
# If we have untagged tags (''), use search instead
# of list because list will not return anything.
if '' in s:
items = []
songs, playtime, num_songs = \
self.library_return_search_items(genre, artist,
album, year)
for song in songs:
items.append(mpdh.get(song, itemtype))
else:
items = self.mpd.list(itemtype, *s)
for item in items:
if len(item) > 0:
results.append(item)
else:
if genre is None and artist is None and album is None and year \
is None:
for item in self.mpd.list(itemtype):
if len(item) > 0:
results.append(item)
if ignore_case:
results = misc.remove_list_duplicates(results, case=False)
results.sort(locale.strcoll)
return results
def library_return_count(self, genre=None, artist=None, album=None,
year=None):
# Because mpd's 'count' is case sensitive, we have to
# determine all equivalent items (case insensitive) and
# call 'count' for each of them. Using 'list' + 'count'
# involves much less data to be transferred back and
# forth than to use 'search' and count manually.
searches = self.library_compose_list_count_searchlist(genre, artist,
album, year)
playtime = 0
num_songs = 0
for s in searches:
if '' in s and self.mpd.version <= (0, 13):
# Can't return count for empty tags, use search instead:
_results, playtime, num_songs = \
self.library_return_search_items(
genre=genre, artist=artist, album=album, year=year)
else:
count = self.mpd.count(*s)
playtime += mpdh.get(count, 'playtime', 0, True)
num_songs += mpdh.get(count, 'songs', 0, True)
return (playtime, num_songs)
def library_compose_list_count_searchlist_single(self, search, typename,
cached_list, searchlist):
s = []
skip_type = (typename == 'artist' and search == self.VAstr)
if search is not None and not skip_type:
if search == self.NOTAG:
itemlist = [search, '']
else:
itemlist = []
if cached_list is None:
cached_list = self.library_return_list_items(typename,
ignore_case=False)
# This allows us to match untagged items
cached_list.append('')
for item in cached_list:
if unicode(item).lower() == unicode(search).lower():
itemlist.append(item)
if len(itemlist) == 0:
# There should be no results!
return None, cached_list
for item in itemlist:
if len(searchlist) > 0:
for item2 in searchlist:
s.append(item2 + (typename, item))
else:
s.append((typename, item))
else:
s = searchlist
return s, cached_list
def library_compose_list_count_searchlist(self, genre=None, artist=None,
album=None, year=None):
s = []
s, self.lib_list_genres = \
self.library_compose_list_count_searchlist_single(
genre, 'genre', self.lib_list_genres, s)
if s is None:
return []
s, self.lib_list_artists = \
self.library_compose_list_count_searchlist_single(
artist, 'artist', self.lib_list_artists, s)
if s is None:
return []
s, self.lib_list_albums = \
self.library_compose_list_count_searchlist_single(
album, 'album', self.lib_list_albums, s)
if s is None:
return []
s, self.lib_list_years = \
self.library_compose_list_count_searchlist_single(
year, 'date', self.lib_list_years, s)
if s is None:
return []
return s
def library_compose_search_searchlist_single(self, search, typename,
searchlist):
s = []
skip_type = (typename == 'artist' and search == self.VAstr)
if search is not None and not skip_type:
if search == self.NOTAG:
itemlist = [search, '']
else:
itemlist = [search]
for item in itemlist:
if len(searchlist) > 0:
for item2 in searchlist:
s.append(item2 + (typename, item))
else:
s.append((typename, item))
else:
s = searchlist
return s
def library_compose_search_searchlist(self, genre=None, artist=None,
album=None, year=None):
s = []
s = self.library_compose_search_searchlist_single(genre, 'genre', s)
s = self.library_compose_search_searchlist_single(album, 'album', s)
s = self.library_compose_search_searchlist_single(artist, 'artist', s)
s = self.library_compose_search_searchlist_single(year, 'date', s)
return s
def library_return_search_items(self, genre=None, artist=None, album=None,
year=None):
# Returns all mpd items, using mpd's 'search', along with
# playtime and num_songs.
searches = self.library_compose_search_searchlist(genre, artist, album,
year)
for s in searches:
args_tuple = tuple(map(str, s))
playtime = 0
num_songs = 0
results = []
if '' in s and self.mpd.version <= (0, 13):
# Can't search for empty tags, search broader and
# filter instead:
# Strip empty tag args from tuple:
pos = list(args_tuple).index('')
strip_type = list(args_tuple)[pos-1]
new_lst = []
for i, item in enumerate(list(args_tuple)):
if i != pos and i != pos-1:
new_lst.append(item)
args_tuple = tuple(new_lst)
else:
strip_type = None
if len(args_tuple) == 0:
return None, 0, 0
items = self.mpd.search(*args_tuple)
if items is not None:
for item in items:
if strip_type is None or (strip_type is not None and not \
strip_type in item.keys()):
match = True
pos = 0
# Ensure that if, e.g., "foo" is searched,
# "foobar" isn't returned too
for arg in args_tuple[::2]:
if arg in item and \
unicode(mpdh.get(item, arg)).upper() != \
unicode(args_tuple[pos + 1]).upper():
match = False
break
pos += 2
if match:
results.append(item)
num_songs += 1
playtime += mpdh.get(item, 'time', 0, True)
return (results, int(playtime), num_songs)
def add_display_info(self, num_songs, playtime):
return "\n<small><span weight='light'>%s %s, %s %s</span></small>" \
% (num_songs, gettext.ngettext('song', 'songs', num_songs),
playtime, gettext.ngettext('minute', 'minutes', playtime))
def library_retain_selection(self, prev_selection, prev_selection_root,
prev_selection_parent):
# Unselect everything:
if len(self.librarydata) > 0:
self.library_selection.unselect_range((0,),
(len(self.librarydata) - 1,))
# Now attempt to retain the selection from before the update:
for value in prev_selection:
for row in self.librarydata:
if value == row[1]:
self.library_selection.select_path(row.path)
break
if prev_selection_root:
self.library_selection.select_path((0,))
if prev_selection_parent:
self.library_selection.select_path((1,))
def library_set_view(self, select_items=True):
# select_items should be false if the same directory has merely
# been refreshed (updated)
try:
if self.config.wd in self.libraryposition:
self.library.scroll_to_point(
-1, self.libraryposition[self.config.wd])
else:
self.library.scroll_to_point(0, 0)
except:
self.library.scroll_to_point(0, 0)
# Select and focus previously selected item
if select_items:
if self.config.wd in self.libraryselectedpath:
try:
if self.libraryselectedpath[self.config.wd]:
self.library_selection.select_path(
self.libraryselectedpath[self.config.wd])
self.library.grab_focus()
except:
pass
def library_set_data(self, *args, **kwargs):
return library_set_data(*args, **kwargs)
def library_get_data(self, data, *args):
return library_get_data(data, *args)
def library_get_data_level(self, data):
if self.config.lib_view == consts.VIEW_FILESYSTEM:
# Returns the number of directories down:
if library_get_data(data, 'path') == '/':
# Every other path doesn't start with "/", so
# start the level numbering at -1
return -1
else:
return library_get_data(data, 'path').count("/")
else:
# Returns the number of items stored in data, excluding
# the path:
level = 0
album, artist, genre, year = library_get_data(
data, 'album', 'artist', 'genre', 'year')
for item in [album, artist, genre, year]:
if item is not None:
level += 1
return level
def on_library_key_press(self, widget, event):
if event.keyval == gtk.gdk.keyval_from_name('Return'):
self.on_library_row_activated(widget, widget.get_cursor()[0])
return True
def on_library_query_tooltip(self, widget, x, y, keyboard_mode, tooltip):
if keyboard_mode or not self.search_visible():
widget.set_tooltip_text(None)
return False
bin_x, bin_y = widget.convert_widget_to_bin_window_coords(x, y)
pathinfo = widget.get_path_at_pos(bin_x, bin_y)
if not pathinfo:
widget.set_tooltip_text(None)
# If the user hovers over an empty row and then back to
# a row with a search result, this will ensure the tooltip
# shows up again:
gobject.idle_add(self.library_search_tooltips_enable, widget, x, y,
keyboard_mode, None)
return False
treepath, _col, _x2, _y2 = pathinfo
i = self.librarydata.get_iter(treepath[0])
path = misc.escape_html(self.library_get_data(
self.librarydata.get_value(i, 1), 'path'))
song = self.librarydata.get_value(i, 2)
new_tooltip = "<b>%s:</b> %s\n<b>%s:</b> %s" \
% (_("Song"), song, _("Path"), path)
if new_tooltip != self.libsearch_last_tooltip:
self.libsearch_last_tooltip = new_tooltip
self.library.set_property('has-tooltip', False)
gobject.idle_add(self.library_search_tooltips_enable, widget, x, y,
keyboard_mode, tooltip)
gobject.idle_add(widget.set_tooltip_markup, new_tooltip)
return
self.libsearch_last_tooltip = new_tooltip
return False #api says we should return True, but this doesn't work?
def library_search_tooltips_enable(self, widget, x, y, keyboard_mode,
tooltip):
self.library.set_property('has-tooltip', True)
if tooltip is not None:
self.on_library_query_tooltip(widget, x, y, keyboard_mode, tooltip)
def on_library_row_activated(self, _widget, path, _column=0):
if path is None:
# Default to last item in selection:
_model, selected = self.library_selection.get_selected_rows()
if len(selected) >= 1:
path = selected[0]
else:
return
value = self.librarydata.get_value(self.librarydata.get_iter(path), 1)
icon = self.librarydata.get_value(self.librarydata.get_iter(path), 0)
if icon == self.sonatapb:
# Song found, add item
self.on_add_item(self.library)
elif value == self.library_set_data(path=".."):
self.library_browse_parent(None)
else:
self.library_browse(None, value)
def library_get_parent(self):
if self.config.lib_view == consts.VIEW_ALBUM:
value = self.library_set_data(path="/")
elif self.config.lib_view == consts.VIEW_ARTIST:
album, artist = self.library_get_data(self.config.wd, 'album',
'artist')
if album is not None:
value = self.library_set_data(artist=artist)
else:
value = self.library_set_data(path="/")
elif self.config.lib_view == consts.VIEW_GENRE:
album, artist, genre = self.library_get_data(
self.config.wd, 'album', 'artist', 'genre')
if album is not None:
value = self.library_set_data(genre=genre, artist=artist)
elif artist is not None:
value = self.library_set_data(genre=genre)
else:
value = self.library_set_data(path="/")
else:
newvalue = '/'.join(
self.library_get_data(self.config.wd, 'path').split('/')[:-1])\
or '/'
value = self.library_set_data(path=newvalue)
return value
def library_browse_parent(self, _action):
if not self.search_visible():
if self.library.is_focus():
value = self.library_get_parent()
self.library_browse(None, value)
return True
def not_parent_is_selected(self):
# Returns True if something is selected and it's not
# ".." or "/":
model, rows = self.library_selection.get_selected_rows()
for path in rows:
i = model.get_iter(path)
value = model.get_value(i, 2)
if value != ".." and value != "/":
return True
return False
def get_path_child_filenames(self, return_root, selected_only=True):
# If return_root=True, return main directories whenever possible
# instead of individual songs in order to reduce the number of
# mpd calls we need to make. We won't want this behavior in some
# instances, like when we want all end files for editing tags
items = []
if selected_only:
model, rows = self.library_selection.get_selected_rows()
else:
model = self.librarydata
rows = [(i,) for i in range(len(model))]
for path in rows:
i = model.get_iter(path)
pb = model.get_value(i, 0)
data = model.get_value(i, 1)
value = model.get_value(i, 2)
if value != ".." and value != "/":
album, artist, year, genre, path = self.library_get_data(
data, 'album', 'artist', 'year', 'genre', 'path')
if path is not None and album is None and artist is None and \
year is None and genre is None:
if pb == self.sonatapb:
# File
items.append(path)
else:
# Directory
if not return_root:
items += self.library_get_path_files_recursive(
path)
else:
items.append(path)
else:
results, _playtime, _num_songs = \
self.library_return_search_items(
genre=genre, artist=artist, album=album,
year=year)
for item in results:
items.append(mpdh.get(item, 'file'))
# Make sure we don't have any EXACT duplicates:
items = misc.remove_list_duplicates(items, case=True)
return items
def library_get_path_files_recursive(self, path):
results = []
for item in self.mpd.lsinfo(path):
if 'directory' in item:
results = results + self.library_get_path_files_recursive(
mpdh.get(item, 'directory'))
elif 'file' in item:
results.append(mpdh.get(item, 'file'))
return results
def on_library_search_combo_change(self, _combo=None):
self.config.last_search_num = self.searchcombo.get_active()
if not self.search_visible():
return
self.prevlibtodo = ""
self.prevlibtodo_base = "__"
self.libsearchfilter_feed_loop(self.searchtext)
def on_search_end(self, _button, move_focus=True):
if self.search_visible():
self.libsearchfilter_toggle(move_focus)
def search_visible(self):
return self.searchbutton.get_property('visible')
def libsearchfilter_toggle(self, move_focus):
if not self.search_visible() and self.connected():
self.library.set_property('has-tooltip', True)
ui.show(self.searchbutton)
self.prevlibtodo = 'foo'
self.prevlibtodo_base = "__"
self.prevlibtodo_base_results = []
# extra thread for background search work,
# synchronized with a condition and its internal mutex
self.libfilterbox_cond = threading.Condition()
self.libfilterbox_cmd_buf = self.searchtext.get_text()
qsearch_thread = threading.Thread(target=self.libsearchfilter_loop)
qsearch_thread.setDaemon(True)
qsearch_thread.start()
elif self.search_visible():
ui.hide(self.searchbutton)
self.searchtext.handler_block(self.libfilter_changed_handler)
self.searchtext.set_text("")
self.searchtext.handler_unblock(self.libfilter_changed_handler)
self.libsearchfilter_stop_loop()
self.library_browse(root=self.config.wd)
if move_focus:
self.library.grab_focus()
def libsearchfilter_feed_loop(self, editable):
if not self.search_visible():
self.libsearchfilter_toggle(None)
# Lets only trigger the searchfilter_loop if 200ms pass
# without a change in gtk.Entry
try:
gobject.source_remove(self.libfilterbox_source)
except:
pass
self.libfilterbox_source = gobject.timeout_add(
300, self.libsearchfilter_start_loop, editable)
def libsearchfilter_start_loop(self, editable):
self.libfilterbox_cond.acquire()
self.libfilterbox_cmd_buf = editable.get_text()
self.libfilterbox_cond.notifyAll()
self.libfilterbox_cond.release()
def libsearchfilter_stop_loop(self):
self.libfilterbox_cond.acquire()
self.libfilterbox_cmd_buf = '$$$QUIT###'
self.libfilterbox_cond.notifyAll()
self.libfilterbox_cond.release()
def libsearchfilter_loop(self):
while True:
# copy the last command or pattern safely
self.libfilterbox_cond.acquire()
try:
while(self.libfilterbox_cmd_buf == '$$$DONE###'):
self.libfilterbox_cond.wait()
todo = self.libfilterbox_cmd_buf
self.libfilterbox_cond.release()
except:
todo = self.libfilterbox_cmd_buf
searchby = self.search_terms_mpd[self.config.last_search_num]
if self.prevlibtodo != todo:
if todo == '$$$QUIT###':
gobject.idle_add(self.filtering_entry_revert_color,
self.searchtext)
return
elif len(todo) > 1:
gobject.idle_add(self.libsearchfilter_do_search,
searchby, todo)
elif len(todo) == 0:
gobject.idle_add(self.filtering_entry_revert_color,
self.searchtext)
self.libsearchfilter_toggle(False)
else:
gobject.idle_add(self.filtering_entry_revert_color,
self.searchtext)
self.libfilterbox_cond.acquire()
self.libfilterbox_cmd_buf = '$$$DONE###'
try:
self.libfilterbox_cond.release()
except:
pass
self.prevlibtodo = todo
def libsearchfilter_do_search(self, searchby, todo):
if not self.prevlibtodo_base in todo:
# Do library search based on first two letters:
self.prevlibtodo_base = todo[:2]
self.prevlibtodo_base_results = self.mpd.search(searchby,
self.prevlibtodo_base)
subsearch = False
else:
subsearch = True
# Now, use filtering similar to playlist filtering:
# this make take some seconds... and we'll escape the search text
# because we'll be searching for a match in items that are also escaped
#
# Note that the searching is not order specific. That is, "foo bar"
# will match on "fools bar" and "barstool foo".
todos = todo.split(" ")
regexps = []
for i in range(len(todos)):
todos[i] = misc.escape_html(todos[i])
todos[i] = re.escape(todos[i])
todos[i] = '.*' + todos[i].lower()
regexps.append(re.compile(todos[i]))
matches = []
if searchby != 'any':
for row in self.prevlibtodo_base_results:
is_match = True
for regexp in regexps:
if not regexp.match(unicode(mpdh.get(row,
searchby)).lower()):
is_match = False
break
if is_match:
matches.append(row)
else:
for row in self.prevlibtodo_base_results:
allstr = " ".join(mpdh.get(row, meta) for meta in row)
is_match = True
for regexp in regexps:
if not regexp.match(unicode(allstr).lower()):
is_match = False
break
if is_match:
matches.append(row)
if subsearch and len(matches) == len(self.librarydata):
# nothing changed..
return
self.library.freeze_child_notify()
currlen = len(self.librarydata)
bd = [[self.sonatapb,
self.library_set_data(path=mpdh.get(item, 'file')),
formatting.parse(self.config.libraryformat, item, True)]
for item in matches if 'file' in item]
bd.sort(locale.strcoll, key=operator.itemgetter(2))
for i, item in enumerate(bd):
if i < currlen:
j = self.librarydata.get_iter((i, ))
for index in range(len(item)):
if item[index] != self.librarydata.get_value(j, index):
self.librarydata.set_value(j, index, item[index])
else:
self.librarydata.append(item)
# Remove excess items...
newlen = len(bd)
if newlen == 0:
self.librarydata.clear()
else:
for i in range(currlen - newlen):
j = self.librarydata.get_iter((currlen - 1 - i,))
self.librarydata.remove(j)
self.library.thaw_child_notify()
if len(matches) == 0:
gobject.idle_add(self.filtering_entry_make_red, self.searchtext)
else:
gobject.idle_add(self.library.set_cursor, '0')
gobject.idle_add(self.filtering_entry_revert_color,
self.searchtext)
def libsearchfilter_key_pressed(self, widget, event):
self.filter_key_pressed(widget, event, self.library)
def libsearchfilter_on_enter(self, _entry):
self.on_library_row_activated(None, None)
def libsearchfilter_set_focus(self):
gobject.idle_add(self.searchtext.grab_focus)
def libsearchfilter_get_style(self):
return self.searchtext.get_style()<|fim▁end|>
|
else:
bd = self.library_populate_toplevel_data(artistview=True)
elif self.config.lib_view == consts.VIEW_GENRE:
genre, artist, album, year = self.library_get_data(
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import {Manager} from './manager'
export {Filterer} from './filter';
export {Logger} from './logger';<|fim▁hole|>var logging = new Manager();
export default logging;<|fim▁end|>
|
export {Manager};
|
<|file_name|>plot_label_foci.py<|end_file_name|><|fim▁begin|>"""
=======================
Generate Surface Labels
=======================
Define a label that is centered on a specific vertex in the surface mesh. Plot
that label and the focus that defines its center.
"""
print __doc__
from surfer import Brain, utils
subject_id = "fsaverage"
"""
Bring up the visualization.
"""
brain = Brain(subject_id, "lh", "inflated")
"""
First we'll identify a stereotaxic focus in the MNI coordinate system. This
might be a peak activations from a volume based analysis.
"""
coord = [-43, 25, 24]
"""<|fim▁hole|>Next we grow a label along the surface around the neareset vertex to this
coordinate in the white surface mesh. The `n_steps` argument controls the size
of the resulting label.
"""
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=25, map_surface="white")
brain.add_label('example_data/coord-lh.label', color="darkseagreen", alpha=.8)
"""
Now we plot the focus on the inflated surface at the vertex identified in the
previous step.
"""
brain.add_foci([coord], map_surface="white", color="mediumseagreen")
"""
We can also do this using a vertex index, perhaps defined as the peak
activation in a surface analysis. This will be more accurate than using a
volume-based focus.
"""
coord = 0
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=40, map_surface="white",
coord_as_vert=True)
brain.add_label('example_data/coord-lh.label', color='royalblue', alpha=.8)
"""
Now we plot the foci on the inflated surface. We will map the foci onto the
surface by finding the vertex on the "white" mesh that is closest to the
coordinate of the point we want to display.
"""
brain.add_foci([coord], map_surface="white", coords_as_verts=True,
color="mediumblue")
"""
Set the camera position to show the extent of the labels.
"""
brain.show_view(dict(elevation=40, distance=430))<|fim▁end|>
| |
<|file_name|>marshal.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The gocql Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cassandra
import (
"bytes"
"fmt"
"math/big"
"reflect"
"time"
"errors"
"github.com/elastic/beats/libbeat/logp"
"gopkg.in/inf.v0"
"strings"
)
// TypeInfo describes a Cassandra specific data type.
type TypeInfo interface {
Type() Type
Version() byte
Custom() string
// New creates a pointer to an empty version of whatever type
// is referenced by the TypeInfo receiver
New() interface{}
}
type NativeType struct {
proto byte
typ Type
custom string // only used for TypeCustom
}
func (t NativeType) New() interface{} {
return reflect.New(goType(t)).Interface()
}
func (s NativeType) Type() Type {
return s.typ
}
func (s NativeType) Version() byte {
return s.proto
}
func (s NativeType) Custom() string {
return s.custom
}
func (s NativeType) String() string {
switch s.typ {
case TypeCustom:
return fmt.Sprintf("%s(%s)", s.typ, s.custom)
default:
return s.typ.String()
}
}
type CollectionType struct {
NativeType
Key TypeInfo // only used for TypeMap
Elem TypeInfo // only used for TypeMap, TypeList and TypeSet
}
func goType(t TypeInfo) reflect.Type {
switch t.Type() {
case TypeVarchar, TypeASCII, TypeInet, TypeText:
return reflect.TypeOf(*new(string))
case TypeBigInt, TypeCounter:
return reflect.TypeOf(*new(int64))
case TypeTimestamp:
return reflect.TypeOf(*new(time.Time))
case TypeBlob:
return reflect.TypeOf(*new([]byte))
case TypeBoolean:
return reflect.TypeOf(*new(bool))
case TypeFloat:
return reflect.TypeOf(*new(float32))
case TypeDouble:
return reflect.TypeOf(*new(float64))
case TypeInt:
return reflect.TypeOf(*new(int))
case TypeDecimal:
return reflect.TypeOf(*new(*inf.Dec))
case TypeUUID, TypeTimeUUID:
return reflect.TypeOf(*new(UUID))
case TypeList, TypeSet:
return reflect.SliceOf(goType(t.(CollectionType).Elem))
case TypeMap:
return reflect.MapOf(goType(t.(CollectionType).Key), goType(t.(CollectionType).Elem))
case TypeVarint:
return reflect.TypeOf(*new(*big.Int))
case TypeTuple:
// what can we do here? all there is to do is to make a list of interface{}
tuple := t.(TupleTypeInfo)
return reflect.TypeOf(make([]interface{}, len(tuple.Elems)))
case TypeUDT:
return reflect.TypeOf(make(map[string]interface{}))
default:
return nil
}
}
func (t CollectionType) New() interface{} {
return reflect.New(goType(t)).Interface()
}
func (c CollectionType) String() string {
switch c.typ {
case TypeMap:
return fmt.Sprintf("%s(%s, %s)", c.typ, c.Key, c.Elem)
case TypeList, TypeSet:
return fmt.Sprintf("%s(%s)", c.typ, c.Elem)
case TypeCustom:
return fmt.Sprintf("%s(%s)", c.typ, c.custom)
default:
return c.typ.String()
}
}
type TupleTypeInfo struct {
NativeType
Elems []TypeInfo
}
func (t TupleTypeInfo) New() interface{} {
return reflect.New(goType(t)).Interface()
}
type UDTField struct {
Name string
Type TypeInfo
}
type UDTTypeInfo struct {
NativeType
KeySpace string
Name string
Elements []UDTField
}
func (u UDTTypeInfo) New() interface{} {
return reflect.New(goType(u)).Interface()
}
func (u UDTTypeInfo) String() string {
buf := &bytes.Buffer{}
fmt.Fprintf(buf, "%s.%s{", u.KeySpace, u.Name)
first := true
for _, e := range u.Elements {
if !first {
fmt.Fprint(buf, ",")
} else {
first = false
}
fmt.Fprintf(buf, "%s=%v", e.Name, e.Type)
}
fmt.Fprint(buf, "}")
return buf.String()
}
// String returns a human readable name for the Cassandra datatype
// described by t.
// Type is the identifier of a Cassandra internal datatype.
type Type int
const (
TypeCustom Type = 0x0000
TypeASCII Type = 0x0001
TypeBigInt Type = 0x0002
TypeBlob Type = 0x0003
TypeBoolean Type = 0x0004
TypeCounter Type = 0x0005
TypeDecimal Type = 0x0006
TypeDouble Type = 0x0007
TypeFloat Type = 0x0008
TypeInt Type = 0x0009
TypeText Type = 0x000A
TypeTimestamp Type = 0x000B
TypeUUID Type = 0x000C
TypeVarchar Type = 0x000D
TypeVarint Type = 0x000E
TypeTimeUUID Type = 0x000F
TypeInet Type = 0x0010
TypeDate Type = 0x0011
TypeTime Type = 0x0012
TypeSmallInt Type = 0x0013
TypeTinyInt Type = 0x0014
TypeList Type = 0x0020
TypeMap Type = 0x0021
TypeSet Type = 0x0022
TypeUDT Type = 0x0030
TypeTuple Type = 0x0031
)
// String returns the name of the identifier.
func (t Type) String() string {
switch t {
case TypeCustom:
return "custom"
case TypeASCII:
return "ascii"
case TypeBigInt:
return "bigint"
case TypeBlob:
return "blob"
case TypeBoolean:
return "boolean"
case TypeCounter:
return "counter"
case TypeDecimal:
return "decimal"
case TypeDouble:
return "double"
case TypeFloat:
return "float"
case TypeInt:
return "int"
case TypeText:
return "text"
case TypeTimestamp:
return "timestamp"
case TypeUUID:
return "uuid"
case TypeVarchar:
return "varchar"
case TypeTimeUUID:
return "timeuuid"
case TypeInet:
return "inet"
case TypeDate:
return "date"
case TypeTime:
return "time"
case TypeSmallInt:
return "smallint"
case TypeTinyInt:
return "tinyint"
case TypeList:
return "list"
case TypeMap:
return "map"
case TypeSet:
return "set"
case TypeVarint:
return "varint"
case TypeTuple:
return "tuple"
default:
return fmt.Sprintf("unknown_type_%d", t)
}
}
const (
apacheCassandraTypePrefix = "org.apache.cassandra.db.marshal."
)
// get Apache Cassandra types
func getApacheCassandraType(class string) Type {
switch strings.TrimPrefix(class, apacheCassandraTypePrefix) {
case "AsciiType":
return TypeASCII
case "LongType":
return TypeBigInt
case "BytesType":
return TypeBlob
case "BooleanType":
return TypeBoolean
case "CounterColumnType":
return TypeCounter
case "DecimalType":
return TypeDecimal
case "DoubleType":
return TypeDouble
case "FloatType":
return TypeFloat
case "Int32Type":
return TypeInt
case "ShortType":
return TypeSmallInt
case "ByteType":
return TypeTinyInt
case "DateType", "TimestampType":
return TypeTimestamp
case "UUIDType", "LexicalUUIDType":
return TypeUUID
case "UTF8Type":
return TypeVarchar
case "IntegerType":
return TypeVarint
case "TimeUUIDType":
return TypeTimeUUID
case "InetAddressType":
return TypeInet
case "MapType":
return TypeMap
case "ListType":
return TypeList
case "SetType":
return TypeSet
case "TupleType":
return TypeTuple
default:
return TypeCustom
}
}
// error Types
type ErrType int
const (
errServer ErrType = 0x0000
errProtocol ErrType = 0x000A
errCredentials ErrType = 0x0100
errUnavailable ErrType = 0x1000
errOverloaded ErrType = 0x1001
errBootstrapping ErrType = 0x1002
errTruncate ErrType = 0x1003
errWriteTimeout ErrType = 0x1100
errReadTimeout ErrType = 0x1200
errReadFailure ErrType = 0x1300
errFunctionFailure ErrType = 0x1400
errWriteFailure ErrType = 0x1500
errSyntax ErrType = 0x2000
errUnauthorized ErrType = 0x2100
errInvalid ErrType = 0x2200
errConfig ErrType = 0x2300
errAlreadyExists ErrType = 0x2400
errUnprepared ErrType = 0x2500
)
func (this ErrType) String() string {
switch this {
case errUnavailable:
return "errUnavailable"
case errWriteTimeout:
return "errWriteTimeout"
case errReadTimeout:
return "errReadTimeout"
case errAlreadyExists:
return "errAlreadyExists"
case errUnprepared:
return "errUnprepared"
case errReadFailure:
return "errReadFailure"
case errWriteFailure:
return "errWriteFailure"
case errFunctionFailure:
return "errFunctionFailure"
case errInvalid:
return "errInvalid"
case errBootstrapping:
return "errBootstrapping"
case errConfig:
return "errConfig"
case errCredentials:
return "errCredentials"
case errOverloaded:
return "errOverloaded"
case errProtocol:
return "errProtocol"
case errServer:
return "errServer"
case errSyntax:
return "errSyntax"
case errTruncate:
return "errTruncate"
case errUnauthorized:
return "errUnauthorized"
}
return "ErrUnknown"
}
const (
protoDirectionMask = 0x80
protoVersionMask = 0x7F
protoVersion1 = 0x01
protoVersion2 = 0x02
protoVersion3 = 0x03
protoVersion4 = 0x04
maxFrameSize = 256 * 1024 * 1024
)
type protoVersion byte
func (p protoVersion) IsRequest() bool {
v := p.version()
if v < protoVersion1 || v > protoVersion4 {
logp.Err("unsupported request version: %x", v)
}
if v == protoVersion4 {
return p == 0x04
}
if v == protoVersion3 {
return p == 0x03
}
return p == 0x00
}
func (p protoVersion) IsResponse() bool {
v := p.version()
if v < protoVersion1 || v > protoVersion4 {
logp.Err("unsupported response version: %x", v)
}
if v == protoVersion4 {
return p == 0x84
}
if v == protoVersion3 {
return p == 0x83
}
return p == 0x80
}
func (p protoVersion) version() byte {
return byte(p) & protoVersionMask
}
func (p protoVersion) String() string {
dir := "REQ"
if p.IsResponse() {
dir = "RESP"
}
return fmt.Sprintf("[version=%d direction=%s]", p.version(), dir)
}
type FrameOp byte
const (
// header ops
opError FrameOp = 0x00
opStartup FrameOp = 0x01
opReady FrameOp = 0x02
opAuthenticate FrameOp = 0x03
opOptions FrameOp = 0x05
opSupported FrameOp = 0x06
opQuery FrameOp = 0x07
opResult FrameOp = 0x08
opPrepare FrameOp = 0x09
opExecute FrameOp = 0x0A
opRegister FrameOp = 0x0B
opEvent FrameOp = 0x0C
opBatch FrameOp = 0x0D
opAuthChallenge FrameOp = 0x0E
opAuthResponse FrameOp = 0x0F
opAuthSuccess FrameOp = 0x10
opUnknown FrameOp = 0xFF
)
func (f FrameOp) String() string {
switch f {
case opError:
return "ERROR"
case opStartup:
return "STARTUP"
case opReady:
return "READY"
case opAuthenticate:
return "AUTHENTICATE"
case opOptions:
return "OPTIONS"
case opSupported:
return "SUPPORTED"
case opQuery:
return "QUERY"
case opResult:
return "RESULT"
case opPrepare:
return "PREPARE"
case opExecute:
return "EXECUTE"
case opRegister:
return "REGISTER"
case opEvent:
return "EVENT"
case opBatch:
return "BATCH"
case opAuthChallenge:
return "AUTH_CHALLENGE"
case opAuthResponse:
return "AUTH_RESPONSE"
case opAuthSuccess:
return "AUTH_SUCCESS"
default:
return fmt.Sprintf("UNKNOWN_OP_%d", f)
}
}
var frameOps = map[string]FrameOp{
"ERROR": opError,
"STARTUP": opStartup,
"READY": opReady,
"AUTHENTICATE": opAuthenticate,
"OPTIONS": opOptions,
"SUPPORTED": opSupported,
"QUERY": opQuery,
"RESULT": opResult,
"PREPARE": opPrepare,
"EXECUTE": opExecute,
"REGISTER": opRegister,
"EVENT": opEvent,
"BATCH": opBatch,
"AUTH_CHALLENGE": opAuthChallenge,
"AUTH_RESPONSE": opAuthResponse,
"AUTH_SUCCESS": opAuthSuccess,
}
func FrameOpFromString(s string) (FrameOp, error) {
s = strings.ToUpper(strings.TrimSpace(s))
op, found := frameOps[s]
if !found {
return opUnknown, fmt.Errorf("unknown frame op: %v", s)
}
return op, nil
}
func (f *FrameOp) Unpack(in interface{}) error {
s, ok := in.(string)
if !ok {
return errors.New("expected string")
}
op, err := FrameOpFromString(s)
if err != nil {
return err
}
*f = op
return nil
}
const (
// result kind
resultKindVoid = 1
resultKindRows = 2
resultKindSetKeyspace = 3
resultKindPrepared = 4
resultKindSchemaChanged = 5
// rows flags
flagGlobalTableSpec int = 0x01
flagHasMorePages int = 0x02
flagNoMetaData int = 0x04
// query flags
flagValues byte = 0x01<|fim▁hole|> flagWithPagingState byte = 0x08
flagWithSerialConsistency byte = 0x10
flagDefaultTimestamp byte = 0x20
flagWithNameValues byte = 0x40
// header flags
flagDefault byte = 0x00
flagCompress byte = 0x01
flagTracing byte = 0x02
flagCustomPayload byte = 0x04
flagWarning byte = 0x08
)
func getHeadFlagString(f byte) string {
switch f {
case flagDefault:
return "Default"
case flagCompress:
return "Compress"
case flagTracing:
return "Tracing"
case flagCustomPayload:
return "CustomPayload"
case flagWarning:
return "Warning"
default:
return fmt.Sprintf("UnknownFlag_%d", f)
}
}
func getRowFlagString(f int) string {
switch f {
case flagGlobalTableSpec:
return "GlobalTableSpec"
case flagHasMorePages:
return "HasMorePages"
case flagNoMetaData:
return "NoMetaData"
default:
return fmt.Sprintf("FLAG_%d", f)
}
}
type Consistency uint16
const (
Any Consistency = 0x00
One Consistency = 0x01
Two Consistency = 0x02
Three Consistency = 0x03
Quorum Consistency = 0x04
All Consistency = 0x05
LocalQuorum Consistency = 0x06
EachQuorum Consistency = 0x07
LocalOne Consistency = 0x0A
)
func (c Consistency) String() string {
switch c {
case Any:
return "ANY"
case One:
return "ONE"
case Two:
return "TWO"
case Three:
return "THREE"
case Quorum:
return "QUORUM"
case All:
return "ALL"
case LocalQuorum:
return "LOCAL_QUORUM"
case EachQuorum:
return "EACH_QUORUM"
case LocalOne:
return "LOCAL_ONE"
default:
return fmt.Sprintf("UNKNOWN_CONS_0x%x", uint16(c))
}
}
type SerialConsistency uint16
const (
Serial SerialConsistency = 0x08
LocalSerial SerialConsistency = 0x09
)
func (s SerialConsistency) String() string {
switch s {
case Serial:
return "SERIAL"
case LocalSerial:
return "LOCAL_SERIAL"
default:
return fmt.Sprintf("UNKNOWN_SERIAL_CONS_0x%x", uint16(s))
}
}
type UUID [16]byte
// Bytes returns the raw byte slice for this UUID. A UUID is always 128 bits
// (16 bytes) long.
func (u UUID) Bytes() []byte {
return u[:]
}
// String returns the UUID in it's canonical form, a 32 digit hexadecimal
// number in the form of xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx.
func (u UUID) String() string {
var offsets = [...]int{0, 2, 4, 6, 9, 11, 14, 16, 19, 21, 24, 26, 28, 30, 32, 34}
const hexString = "0123456789abcdef"
r := make([]byte, 36)
for i, b := range u {
r[offsets[i]] = hexString[b>>4]
r[offsets[i]+1] = hexString[b&0xF]
}
r[8] = '-'
r[13] = '-'
r[18] = '-'
r[23] = '-'
return string(r)
}
// UUIDFromBytes converts a raw byte slice to an UUID.
func UUIDFromBytes(input []byte) (UUID, error) {
var u UUID
if len(input) != 16 {
return u, errors.New("UUIDs must be exactly 16 bytes long")
}
copy(u[:], input)
return u, nil
}
type ColumnInfo struct {
Keyspace string
Table string
Name string
TypeInfo TypeInfo
}<|fim▁end|>
|
flagSkipMetaData byte = 0x02
flagPageSize byte = 0x04
|
<|file_name|>datepicker.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:e40a08695f05163cfb0eaeeef4588fcfad55a6576cfadfb079505495605beb33<|fim▁hole|>size 27133<|fim▁end|>
| |
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>// Generated by build.rs script in the amqp0-primitives crate.
// Pre-generated files are used by default. Generation is done with the amqp0-codegen crate.
//
// To regenerate, ignoring the pre-generated files, use: cargo --features="amqp0-build-primitives"
// To format and replace the pre-generated files, use: cargo --features="amqp0-pregen-primitives"
//
// EDITORS BEWARE: Your modifications may be overridden or removed.
pub trait AckMethod {
type Payload: Default + SetAckMethodFields;
} // pub trait AckMethod
pub trait SetAckMethodFields {
fn set_delivery_tag(&mut self, _: u64) {}
fn set_multiple(&mut self, _: bool) {}
} // pub trait SetAckMethodFields
pub struct AckBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct AckBuilder
impl<T> AckBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> AckBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> AckBuilder<T>
impl<T> Default for AckBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
AckBuilder { payload: Default::default() }
}
} // impl Default for AckBuilder
impl<T> AckBuilder<T>
where T: ::Encodable + SetAckMethodFields
{
pub fn delivery_tag(mut self, delivery_tag: u64) -> Self {
SetAckMethodFields::set_delivery_tag(&mut self.payload, delivery_tag);
self
} // set_delivery_tag()
pub fn multiple(mut self, multiple: bool) -> Self {
SetAckMethodFields::set_multiple(&mut self.payload, multiple);
self
} // set_multiple()
} // impl<T> AckBuilder<T>
pub trait CancelMethod<'a> {
type Payload: Default + SetCancelMethodFields<'a>;
} // pub trait CancelMethod<'a>
pub trait SetCancelMethodFields<'a> {
fn set_consumer_tag<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_no_wait(&mut self, _: bool) {}
} // pub trait SetCancelMethodFields<'a>
pub struct CancelBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct CancelBuilder
impl<T> CancelBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> CancelBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> CancelBuilder<T>
impl<T> Default for CancelBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
CancelBuilder { payload: Default::default() }
}
} // impl Default for CancelBuilder
impl<'a, T> CancelBuilder<T>
where T: ::Encodable + ::Content<'a> + SetCancelMethodFields<'a>
{
pub fn consumer_tag<V>(mut self, consumer_tag: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetCancelMethodFields::set_consumer_tag(&mut self.payload, consumer_tag.into());
self
} // set_consumer_tag()
pub fn no_wait(mut self, no_wait: bool) -> Self {
SetCancelMethodFields::set_no_wait(&mut self.payload, no_wait);
self
} // set_no_wait()
} // impl<'a, T> CancelBuilder<T>
pub trait CancelOkMethod<'a> {
type Payload: Default + SetCancelOkMethodFields<'a>;
} // pub trait CancelOkMethod<'a>
pub trait SetCancelOkMethodFields<'a> {
fn set_consumer_tag<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetCancelOkMethodFields<'a>
pub struct CancelOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct CancelOkBuilder
impl<T> CancelOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> CancelOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> CancelOkBuilder<T>
impl<T> Default for CancelOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
CancelOkBuilder { payload: Default::default() }
}
} // impl Default for CancelOkBuilder
impl<'a, T> CancelOkBuilder<T>
where T: ::Encodable + ::Content<'a> + SetCancelOkMethodFields<'a>
{
pub fn consumer_tag<V>(mut self, consumer_tag: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetCancelOkMethodFields::set_consumer_tag(&mut self.payload, consumer_tag.into());
self
} // set_consumer_tag()
} // impl<'a, T> CancelOkBuilder<T>
pub trait ConsumeMethod<'a> {
type Payload: Default + SetConsumeMethodFields<'a>;
} // pub trait ConsumeMethod<'a>
pub trait SetConsumeMethodFields<'a> {
fn set_arguments<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_consumer_tag<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_exclusive(&mut self, _: bool) {}
fn set_filter<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_no_ack(&mut self, _: bool) {}
fn set_no_local(&mut self, _: bool) {}
fn set_no_wait(&mut self, _: bool) {}
fn set_queue<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_ticket(&mut self, _: u16) {}
} // pub trait SetConsumeMethodFields<'a>
pub struct ConsumeBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct ConsumeBuilder
impl<T> ConsumeBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> ConsumeBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> ConsumeBuilder<T>
impl<T> Default for ConsumeBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
ConsumeBuilder { payload: Default::default() }
}
} // impl Default for ConsumeBuilder
impl<'a, T> ConsumeBuilder<T>
where T: ::Encodable + ::Content<'a> + SetConsumeMethodFields<'a>
{
pub fn arguments<V>(mut self, arguments: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetConsumeMethodFields::set_arguments(&mut self.payload, arguments.into());
self
} // set_arguments()
pub fn consumer_tag<V>(mut self, consumer_tag: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetConsumeMethodFields::set_consumer_tag(&mut self.payload, consumer_tag.into());
self
} // set_consumer_tag()
pub fn exclusive(mut self, exclusive: bool) -> Self {
SetConsumeMethodFields::set_exclusive(&mut self.payload, exclusive);
self
} // set_exclusive()
pub fn filter<V>(mut self, filter: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetConsumeMethodFields::set_filter(&mut self.payload, filter.into());
self
} // set_filter()
pub fn no_ack(mut self, no_ack: bool) -> Self {
SetConsumeMethodFields::set_no_ack(&mut self.payload, no_ack);
self
} // set_no_ack()
pub fn no_local(mut self, no_local: bool) -> Self {
SetConsumeMethodFields::set_no_local(&mut self.payload, no_local);
self
} // set_no_local()
pub fn no_wait(mut self, no_wait: bool) -> Self {
SetConsumeMethodFields::set_no_wait(&mut self.payload, no_wait);
self
} // set_no_wait()
pub fn queue<V>(mut self, queue: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetConsumeMethodFields::set_queue(&mut self.payload, queue.into());
self
} // set_queue()
pub fn ticket(mut self, ticket: u16) -> Self {
SetConsumeMethodFields::set_ticket(&mut self.payload, ticket);
self
} // set_ticket()
} // impl<'a, T> ConsumeBuilder<T>
pub trait ConsumeOkMethod<'a> {
type Payload: Default + SetConsumeOkMethodFields<'a>;
} // pub trait ConsumeOkMethod<'a>
pub trait SetConsumeOkMethodFields<'a> {
fn set_consumer_tag<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetConsumeOkMethodFields<'a>
pub struct ConsumeOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct ConsumeOkBuilder
impl<T> ConsumeOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> ConsumeOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> ConsumeOkBuilder<T>
impl<T> Default for ConsumeOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
ConsumeOkBuilder { payload: Default::default() }
}
} // impl Default for ConsumeOkBuilder
impl<'a, T> ConsumeOkBuilder<T>
where T: ::Encodable + ::Content<'a> + SetConsumeOkMethodFields<'a>
{
pub fn consumer_tag<V>(mut self, consumer_tag: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetConsumeOkMethodFields::set_consumer_tag(&mut self.payload, consumer_tag.into());
self
} // set_consumer_tag()
} // impl<'a, T> ConsumeOkBuilder<T>
pub trait DeliverMethod<'a> {
type Payload: Default + SetDeliverMethodFields<'a>;
} // pub trait DeliverMethod<'a>
pub trait SetDeliverMethodFields<'a> {
fn set_app_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_cluster_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_consumer_tag<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_encoding<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_type<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_correlation_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_delivery_mode(&mut self, _: u8) {}
fn set_delivery_tag(&mut self, _: u64) {}
fn set_exchange<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_expiration<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_headers<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_message_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_priority(&mut self, _: u8) {}
fn set_redelivered(&mut self, _: bool) {}
fn set_reply_to<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_routing_key<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_timestamp(&mut self, _: u64) {}
fn set_ty<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_user_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetDeliverMethodFields<'a>
pub struct DeliverBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct DeliverBuilder
impl<T> DeliverBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> DeliverBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> DeliverBuilder<T>
impl<T> Default for DeliverBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
DeliverBuilder { payload: Default::default() }
}
} // impl Default for DeliverBuilder
impl<'a, T> DeliverBuilder<T>
where T: ::Encodable + ::Content<'a> + SetDeliverMethodFields<'a>
{
pub fn app_id<V>(mut self, app_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_app_id(&mut self.payload, app_id.into());
self
} // set_app_id()
pub fn cluster_id<V>(mut self, cluster_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_cluster_id(&mut self.payload, cluster_id.into());
self
} // set_cluster_id()
pub fn consumer_tag<V>(mut self, consumer_tag: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_consumer_tag(&mut self.payload, consumer_tag.into());
self
} // set_consumer_tag()
pub fn content_encoding<V>(mut self, content_encoding: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_content_encoding(&mut self.payload, content_encoding.into());
self
} // set_content_encoding()
pub fn content_type<V>(mut self, content_type: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_content_type(&mut self.payload, content_type.into());
self
} // set_content_type()
pub fn correlation_id<V>(mut self, correlation_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_correlation_id(&mut self.payload, correlation_id.into());
self
} // set_correlation_id()
pub fn delivery_mode(mut self, delivery_mode: u8) -> Self {
SetDeliverMethodFields::set_delivery_mode(&mut self.payload, delivery_mode);
self
} // set_delivery_mode()
pub fn delivery_tag(mut self, delivery_tag: u64) -> Self {
SetDeliverMethodFields::set_delivery_tag(&mut self.payload, delivery_tag);
self
} // set_delivery_tag()
pub fn exchange<V>(mut self, exchange: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_exchange(&mut self.payload, exchange.into());
self
} // set_exchange()
pub fn expiration<V>(mut self, expiration: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_expiration(&mut self.payload, expiration.into());
self
} // set_expiration()
pub fn headers<V>(mut self, headers: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetDeliverMethodFields::set_headers(&mut self.payload, headers.into());
self
} // set_headers()
pub fn message_id<V>(mut self, message_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_message_id(&mut self.payload, message_id.into());
self
} // set_message_id()
pub fn priority(mut self, priority: u8) -> Self {
SetDeliverMethodFields::set_priority(&mut self.payload, priority);
self
} // set_priority()
pub fn redelivered(mut self, redelivered: bool) -> Self {
SetDeliverMethodFields::set_redelivered(&mut self.payload, redelivered);
self
} // set_redelivered()
pub fn reply_to<V>(mut self, reply_to: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_reply_to(&mut self.payload, reply_to.into());
self
} // set_reply_to()
pub fn routing_key<V>(mut self, routing_key: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_routing_key(&mut self.payload, routing_key.into());
self
} // set_routing_key()
pub fn timestamp(mut self, timestamp: u64) -> Self {
SetDeliverMethodFields::set_timestamp(&mut self.payload, timestamp);
self
} // set_timestamp()
pub fn ty<V>(mut self, ty: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_ty(&mut self.payload, ty.into());
self
} // set_ty()
pub fn user_id<V>(mut self, user_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetDeliverMethodFields::set_user_id(&mut self.payload, user_id.into());
self
} // set_user_id()
pub fn set_headers<V>(self, _: V) -> Self
where V: Into<<T as ::Content<'a>>::Headers>
{
self
}
pub fn set_body<V>(self, _: V) -> Self
where V: Into<::std::borrow::Cow<'a, [u8]>>
{
self
}
} // impl<'a, T> DeliverBuilder<T>
pub trait GetMethod<'a> {
type Payload: Default + SetGetMethodFields<'a>;
} // pub trait GetMethod<'a>
pub trait SetGetMethodFields<'a> {
fn set_no_ack(&mut self, _: bool) {}
fn set_queue<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_ticket(&mut self, _: u16) {}
} // pub trait SetGetMethodFields<'a>
pub struct GetBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct GetBuilder
impl<T> GetBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> GetBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> GetBuilder<T>
<|fim▁hole|> GetBuilder { payload: Default::default() }
}
} // impl Default for GetBuilder
impl<'a, T> GetBuilder<T>
where T: ::Encodable + ::Content<'a> + SetGetMethodFields<'a>
{
pub fn no_ack(mut self, no_ack: bool) -> Self {
SetGetMethodFields::set_no_ack(&mut self.payload, no_ack);
self
} // set_no_ack()
pub fn queue<V>(mut self, queue: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetMethodFields::set_queue(&mut self.payload, queue.into());
self
} // set_queue()
pub fn ticket(mut self, ticket: u16) -> Self {
SetGetMethodFields::set_ticket(&mut self.payload, ticket);
self
} // set_ticket()
} // impl<'a, T> GetBuilder<T>
pub trait GetEmptyMethod<'a> {
type Payload: Default + SetGetEmptyMethodFields<'a>;
} // pub trait GetEmptyMethod<'a>
pub trait SetGetEmptyMethodFields<'a> {
fn set_cluster_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetGetEmptyMethodFields<'a>
pub struct GetEmptyBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct GetEmptyBuilder
impl<T> GetEmptyBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> GetEmptyBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> GetEmptyBuilder<T>
impl<T> Default for GetEmptyBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
GetEmptyBuilder { payload: Default::default() }
}
} // impl Default for GetEmptyBuilder
impl<'a, T> GetEmptyBuilder<T>
where T: ::Encodable + ::Content<'a> + SetGetEmptyMethodFields<'a>
{
pub fn cluster_id<V>(mut self, cluster_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetEmptyMethodFields::set_cluster_id(&mut self.payload, cluster_id.into());
self
} // set_cluster_id()
} // impl<'a, T> GetEmptyBuilder<T>
pub trait GetOkMethod<'a> {
type Payload: Default + SetGetOkMethodFields<'a>;
} // pub trait GetOkMethod<'a>
pub trait SetGetOkMethodFields<'a> {
fn set_app_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_cluster_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_encoding<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_type<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_correlation_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_delivery_mode(&mut self, _: u8) {}
fn set_delivery_tag(&mut self, _: u64) {}
fn set_exchange<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_expiration<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_headers<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_message_count(&mut self, _: u32) {}
fn set_message_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_priority(&mut self, _: u8) {}
fn set_redelivered(&mut self, _: bool) {}
fn set_reply_to<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_routing_key<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_timestamp(&mut self, _: u64) {}
fn set_ty<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_user_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetGetOkMethodFields<'a>
pub struct GetOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct GetOkBuilder
impl<T> GetOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> GetOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> GetOkBuilder<T>
impl<T> Default for GetOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
GetOkBuilder { payload: Default::default() }
}
} // impl Default for GetOkBuilder
impl<'a, T> GetOkBuilder<T>
where T: ::Encodable + ::Content<'a> + SetGetOkMethodFields<'a>
{
pub fn app_id<V>(mut self, app_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_app_id(&mut self.payload, app_id.into());
self
} // set_app_id()
pub fn cluster_id<V>(mut self, cluster_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_cluster_id(&mut self.payload, cluster_id.into());
self
} // set_cluster_id()
pub fn content_encoding<V>(mut self, content_encoding: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_content_encoding(&mut self.payload, content_encoding.into());
self
} // set_content_encoding()
pub fn content_type<V>(mut self, content_type: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_content_type(&mut self.payload, content_type.into());
self
} // set_content_type()
pub fn correlation_id<V>(mut self, correlation_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_correlation_id(&mut self.payload, correlation_id.into());
self
} // set_correlation_id()
pub fn delivery_mode(mut self, delivery_mode: u8) -> Self {
SetGetOkMethodFields::set_delivery_mode(&mut self.payload, delivery_mode);
self
} // set_delivery_mode()
pub fn delivery_tag(mut self, delivery_tag: u64) -> Self {
SetGetOkMethodFields::set_delivery_tag(&mut self.payload, delivery_tag);
self
} // set_delivery_tag()
pub fn exchange<V>(mut self, exchange: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_exchange(&mut self.payload, exchange.into());
self
} // set_exchange()
pub fn expiration<V>(mut self, expiration: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_expiration(&mut self.payload, expiration.into());
self
} // set_expiration()
pub fn headers<V>(mut self, headers: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetGetOkMethodFields::set_headers(&mut self.payload, headers.into());
self
} // set_headers()
pub fn message_count(mut self, message_count: u32) -> Self {
SetGetOkMethodFields::set_message_count(&mut self.payload, message_count);
self
} // set_message_count()
pub fn message_id<V>(mut self, message_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_message_id(&mut self.payload, message_id.into());
self
} // set_message_id()
pub fn priority(mut self, priority: u8) -> Self {
SetGetOkMethodFields::set_priority(&mut self.payload, priority);
self
} // set_priority()
pub fn redelivered(mut self, redelivered: bool) -> Self {
SetGetOkMethodFields::set_redelivered(&mut self.payload, redelivered);
self
} // set_redelivered()
pub fn reply_to<V>(mut self, reply_to: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_reply_to(&mut self.payload, reply_to.into());
self
} // set_reply_to()
pub fn routing_key<V>(mut self, routing_key: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_routing_key(&mut self.payload, routing_key.into());
self
} // set_routing_key()
pub fn timestamp(mut self, timestamp: u64) -> Self {
SetGetOkMethodFields::set_timestamp(&mut self.payload, timestamp);
self
} // set_timestamp()
pub fn ty<V>(mut self, ty: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_ty(&mut self.payload, ty.into());
self
} // set_ty()
pub fn user_id<V>(mut self, user_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetGetOkMethodFields::set_user_id(&mut self.payload, user_id.into());
self
} // set_user_id()
pub fn set_headers<V>(self, _: V) -> Self
where V: Into<<T as ::Content<'a>>::Headers>
{
self
}
pub fn set_body<V>(self, _: V) -> Self
where V: Into<::std::borrow::Cow<'a, [u8]>>
{
self
}
} // impl<'a, T> GetOkBuilder<T>
pub trait NackMethod {
type Payload: Default + SetNackMethodFields;
} // pub trait NackMethod
pub trait SetNackMethodFields {
fn set_delivery_tag(&mut self, _: u64) {}
fn set_multiple(&mut self, _: bool) {}
fn set_requeue(&mut self, _: bool) {}
} // pub trait SetNackMethodFields
pub struct NackBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct NackBuilder
impl<T> NackBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> NackBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> NackBuilder<T>
impl<T> Default for NackBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
NackBuilder { payload: Default::default() }
}
} // impl Default for NackBuilder
impl<T> NackBuilder<T>
where T: ::Encodable + SetNackMethodFields
{
pub fn delivery_tag(mut self, delivery_tag: u64) -> Self {
SetNackMethodFields::set_delivery_tag(&mut self.payload, delivery_tag);
self
} // set_delivery_tag()
pub fn multiple(mut self, multiple: bool) -> Self {
SetNackMethodFields::set_multiple(&mut self.payload, multiple);
self
} // set_multiple()
pub fn requeue(mut self, requeue: bool) -> Self {
SetNackMethodFields::set_requeue(&mut self.payload, requeue);
self
} // set_requeue()
} // impl<T> NackBuilder<T>
pub trait PublishMethod<'a> {
type Payload: Default + SetPublishMethodFields<'a>;
} // pub trait PublishMethod<'a>
pub trait SetPublishMethodFields<'a> {
fn set_app_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_cluster_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_encoding<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_type<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_correlation_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_delivery_mode(&mut self, _: u8) {}
fn set_exchange<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_expiration<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_headers<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_immediate(&mut self, _: bool) {}
fn set_mandatory(&mut self, _: bool) {}
fn set_message_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_priority(&mut self, _: u8) {}
fn set_reply_to<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_routing_key<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_ticket(&mut self, _: u16) {}
fn set_timestamp(&mut self, _: u64) {}
fn set_ty<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_user_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetPublishMethodFields<'a>
pub struct PublishBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct PublishBuilder
impl<T> PublishBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> PublishBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> PublishBuilder<T>
impl<T> Default for PublishBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
PublishBuilder { payload: Default::default() }
}
} // impl Default for PublishBuilder
impl<'a, T> PublishBuilder<T>
where T: ::Encodable + ::Content<'a> + SetPublishMethodFields<'a>
{
pub fn app_id<V>(mut self, app_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_app_id(&mut self.payload, app_id.into());
self
} // set_app_id()
pub fn cluster_id<V>(mut self, cluster_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_cluster_id(&mut self.payload, cluster_id.into());
self
} // set_cluster_id()
pub fn content_encoding<V>(mut self, content_encoding: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_content_encoding(&mut self.payload, content_encoding.into());
self
} // set_content_encoding()
pub fn content_type<V>(mut self, content_type: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_content_type(&mut self.payload, content_type.into());
self
} // set_content_type()
pub fn correlation_id<V>(mut self, correlation_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_correlation_id(&mut self.payload, correlation_id.into());
self
} // set_correlation_id()
pub fn delivery_mode(mut self, delivery_mode: u8) -> Self {
SetPublishMethodFields::set_delivery_mode(&mut self.payload, delivery_mode);
self
} // set_delivery_mode()
pub fn exchange<V>(mut self, exchange: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_exchange(&mut self.payload, exchange.into());
self
} // set_exchange()
pub fn expiration<V>(mut self, expiration: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_expiration(&mut self.payload, expiration.into());
self
} // set_expiration()
pub fn headers<V>(mut self, headers: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetPublishMethodFields::set_headers(&mut self.payload, headers.into());
self
} // set_headers()
pub fn immediate(mut self, immediate: bool) -> Self {
SetPublishMethodFields::set_immediate(&mut self.payload, immediate);
self
} // set_immediate()
pub fn mandatory(mut self, mandatory: bool) -> Self {
SetPublishMethodFields::set_mandatory(&mut self.payload, mandatory);
self
} // set_mandatory()
pub fn message_id<V>(mut self, message_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_message_id(&mut self.payload, message_id.into());
self
} // set_message_id()
pub fn priority(mut self, priority: u8) -> Self {
SetPublishMethodFields::set_priority(&mut self.payload, priority);
self
} // set_priority()
pub fn reply_to<V>(mut self, reply_to: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_reply_to(&mut self.payload, reply_to.into());
self
} // set_reply_to()
pub fn routing_key<V>(mut self, routing_key: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_routing_key(&mut self.payload, routing_key.into());
self
} // set_routing_key()
pub fn ticket(mut self, ticket: u16) -> Self {
SetPublishMethodFields::set_ticket(&mut self.payload, ticket);
self
} // set_ticket()
pub fn timestamp(mut self, timestamp: u64) -> Self {
SetPublishMethodFields::set_timestamp(&mut self.payload, timestamp);
self
} // set_timestamp()
pub fn ty<V>(mut self, ty: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_ty(&mut self.payload, ty.into());
self
} // set_ty()
pub fn user_id<V>(mut self, user_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetPublishMethodFields::set_user_id(&mut self.payload, user_id.into());
self
} // set_user_id()
pub fn set_headers<V>(self, _: V) -> Self
where V: Into<<T as ::Content<'a>>::Headers>
{
self
}
pub fn set_body<V>(self, _: V) -> Self
where V: Into<::std::borrow::Cow<'a, [u8]>>
{
self
}
} // impl<'a, T> PublishBuilder<T>
pub trait QosMethod {
type Payload: Default + SetQosMethodFields;
} // pub trait QosMethod
pub trait SetQosMethodFields {
fn set_global(&mut self, _: bool) {}
fn set_prefetch_count(&mut self, _: u16) {}
fn set_prefetch_size(&mut self, _: u32) {}
} // pub trait SetQosMethodFields
pub struct QosBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct QosBuilder
impl<T> QosBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> QosBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> QosBuilder<T>
impl<T> Default for QosBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
QosBuilder { payload: Default::default() }
}
} // impl Default for QosBuilder
impl<T> QosBuilder<T>
where T: ::Encodable + SetQosMethodFields
{
pub fn global(mut self, global: bool) -> Self {
SetQosMethodFields::set_global(&mut self.payload, global);
self
} // set_global()
pub fn prefetch_count(mut self, prefetch_count: u16) -> Self {
SetQosMethodFields::set_prefetch_count(&mut self.payload, prefetch_count);
self
} // set_prefetch_count()
pub fn prefetch_size(mut self, prefetch_size: u32) -> Self {
SetQosMethodFields::set_prefetch_size(&mut self.payload, prefetch_size);
self
} // set_prefetch_size()
} // impl<T> QosBuilder<T>
pub trait QosOkMethod {
type Payload: Default;
} // pub trait QosOkMethod
pub struct QosOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct QosOkBuilder
impl<T> QosOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> QosOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> QosOkBuilder<T>
impl<T> Default for QosOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
QosOkBuilder { payload: Default::default() }
}
} // impl Default for QosOkBuilder
pub trait RecoverMethod {
type Payload: Default + SetRecoverMethodFields;
} // pub trait RecoverMethod
pub trait SetRecoverMethodFields {
fn set_requeue(&mut self, _: bool) {}
} // pub trait SetRecoverMethodFields
pub struct RecoverBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RecoverBuilder
impl<T> RecoverBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RecoverBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RecoverBuilder<T>
impl<T> Default for RecoverBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RecoverBuilder { payload: Default::default() }
}
} // impl Default for RecoverBuilder
impl<T> RecoverBuilder<T>
where T: ::Encodable + SetRecoverMethodFields
{
pub fn requeue(mut self, requeue: bool) -> Self {
SetRecoverMethodFields::set_requeue(&mut self.payload, requeue);
self
} // set_requeue()
} // impl<T> RecoverBuilder<T>
pub trait RecoverAsyncMethod {
type Payload: Default + SetRecoverAsyncMethodFields;
} // pub trait RecoverAsyncMethod
pub trait SetRecoverAsyncMethodFields {
fn set_requeue(&mut self, _: bool) {}
} // pub trait SetRecoverAsyncMethodFields
pub struct RecoverAsyncBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RecoverAsyncBuilder
impl<T> RecoverAsyncBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RecoverAsyncBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RecoverAsyncBuilder<T>
impl<T> Default for RecoverAsyncBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RecoverAsyncBuilder { payload: Default::default() }
}
} // impl Default for RecoverAsyncBuilder
impl<T> RecoverAsyncBuilder<T>
where T: ::Encodable + SetRecoverAsyncMethodFields
{
pub fn requeue(mut self, requeue: bool) -> Self {
SetRecoverAsyncMethodFields::set_requeue(&mut self.payload, requeue);
self
} // set_requeue()
} // impl<T> RecoverAsyncBuilder<T>
pub trait RecoverOkMethod {
type Payload: Default;
} // pub trait RecoverOkMethod
pub struct RecoverOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RecoverOkBuilder
impl<T> RecoverOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RecoverOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RecoverOkBuilder<T>
impl<T> Default for RecoverOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RecoverOkBuilder { payload: Default::default() }
}
} // impl Default for RecoverOkBuilder
pub trait RecoverSyncMethod {
type Payload: Default + SetRecoverSyncMethodFields;
} // pub trait RecoverSyncMethod
pub trait SetRecoverSyncMethodFields {
fn set_requeue(&mut self, _: bool) {}
} // pub trait SetRecoverSyncMethodFields
pub struct RecoverSyncBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RecoverSyncBuilder
impl<T> RecoverSyncBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RecoverSyncBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RecoverSyncBuilder<T>
impl<T> Default for RecoverSyncBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RecoverSyncBuilder { payload: Default::default() }
}
} // impl Default for RecoverSyncBuilder
impl<T> RecoverSyncBuilder<T>
where T: ::Encodable + SetRecoverSyncMethodFields
{
pub fn requeue(mut self, requeue: bool) -> Self {
SetRecoverSyncMethodFields::set_requeue(&mut self.payload, requeue);
self
} // set_requeue()
} // impl<T> RecoverSyncBuilder<T>
pub trait RecoverSyncOkMethod {
type Payload: Default;
} // pub trait RecoverSyncOkMethod
pub struct RecoverSyncOkBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RecoverSyncOkBuilder
impl<T> RecoverSyncOkBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RecoverSyncOkBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RecoverSyncOkBuilder<T>
impl<T> Default for RecoverSyncOkBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RecoverSyncOkBuilder { payload: Default::default() }
}
} // impl Default for RecoverSyncOkBuilder
pub trait RejectMethod {
type Payload: Default + SetRejectMethodFields;
} // pub trait RejectMethod
pub trait SetRejectMethodFields {
fn set_delivery_tag(&mut self, _: u64) {}
fn set_requeue(&mut self, _: bool) {}
} // pub trait SetRejectMethodFields
pub struct RejectBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct RejectBuilder
impl<T> RejectBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> RejectBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> RejectBuilder<T>
impl<T> Default for RejectBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
RejectBuilder { payload: Default::default() }
}
} // impl Default for RejectBuilder
impl<T> RejectBuilder<T>
where T: ::Encodable + SetRejectMethodFields
{
pub fn delivery_tag(mut self, delivery_tag: u64) -> Self {
SetRejectMethodFields::set_delivery_tag(&mut self.payload, delivery_tag);
self
} // set_delivery_tag()
pub fn requeue(mut self, requeue: bool) -> Self {
SetRejectMethodFields::set_requeue(&mut self.payload, requeue);
self
} // set_requeue()
} // impl<T> RejectBuilder<T>
pub trait ReturnMethod<'a> {
type Payload: Default + SetReturnMethodFields<'a>;
} // pub trait ReturnMethod<'a>
pub trait SetReturnMethodFields<'a> {
fn set_app_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_cluster_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_encoding<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_content_type<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_correlation_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_delivery_mode(&mut self, _: u8) {}
fn set_exchange<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_expiration<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_headers<V>(&mut self, _: V) where V: Into<::field::TableEntries<'a>> {}
fn set_message_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_priority(&mut self, _: u8) {}
fn set_reply_code(&mut self, _: u16) {}
fn set_reply_text<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_reply_to<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_routing_key<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_timestamp(&mut self, _: u64) {}
fn set_ty<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
fn set_user_id<V>(&mut self, _: V) where V: Into<::std::borrow::Cow<'a, str>> {}
} // pub trait SetReturnMethodFields<'a>
pub struct ReturnBuilder<T>
where T: ::Encodable
{
payload: T,
} // struct ReturnBuilder
impl<T> ReturnBuilder<T>
where T: Default + ::Encodable
{
pub fn new() -> Self {
Default::default()
}
} // impl Builder (new)
impl<T> ReturnBuilder<T>
where T: ::Encodable
{
pub fn build(self) -> T {
self.payload
}
} // impl<T> ReturnBuilder<T>
impl<T> Default for ReturnBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
ReturnBuilder { payload: Default::default() }
}
} // impl Default for ReturnBuilder
impl<'a, T> ReturnBuilder<T>
where T: ::Encodable + ::Content<'a> + SetReturnMethodFields<'a>
{
pub fn app_id<V>(mut self, app_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_app_id(&mut self.payload, app_id.into());
self
} // set_app_id()
pub fn cluster_id<V>(mut self, cluster_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_cluster_id(&mut self.payload, cluster_id.into());
self
} // set_cluster_id()
pub fn content_encoding<V>(mut self, content_encoding: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_content_encoding(&mut self.payload, content_encoding.into());
self
} // set_content_encoding()
pub fn content_type<V>(mut self, content_type: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_content_type(&mut self.payload, content_type.into());
self
} // set_content_type()
pub fn correlation_id<V>(mut self, correlation_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_correlation_id(&mut self.payload, correlation_id.into());
self
} // set_correlation_id()
pub fn delivery_mode(mut self, delivery_mode: u8) -> Self {
SetReturnMethodFields::set_delivery_mode(&mut self.payload, delivery_mode);
self
} // set_delivery_mode()
pub fn exchange<V>(mut self, exchange: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_exchange(&mut self.payload, exchange.into());
self
} // set_exchange()
pub fn expiration<V>(mut self, expiration: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_expiration(&mut self.payload, expiration.into());
self
} // set_expiration()
pub fn headers<V>(mut self, headers: V) -> Self
where V: Into<::field::TableEntries<'a>>
{
SetReturnMethodFields::set_headers(&mut self.payload, headers.into());
self
} // set_headers()
pub fn message_id<V>(mut self, message_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_message_id(&mut self.payload, message_id.into());
self
} // set_message_id()
pub fn priority(mut self, priority: u8) -> Self {
SetReturnMethodFields::set_priority(&mut self.payload, priority);
self
} // set_priority()
pub fn reply_code(mut self, reply_code: u16) -> Self {
SetReturnMethodFields::set_reply_code(&mut self.payload, reply_code);
self
} // set_reply_code()
pub fn reply_text<V>(mut self, reply_text: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_reply_text(&mut self.payload, reply_text.into());
self
} // set_reply_text()
pub fn reply_to<V>(mut self, reply_to: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_reply_to(&mut self.payload, reply_to.into());
self
} // set_reply_to()
pub fn routing_key<V>(mut self, routing_key: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_routing_key(&mut self.payload, routing_key.into());
self
} // set_routing_key()
pub fn timestamp(mut self, timestamp: u64) -> Self {
SetReturnMethodFields::set_timestamp(&mut self.payload, timestamp);
self
} // set_timestamp()
pub fn ty<V>(mut self, ty: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_ty(&mut self.payload, ty.into());
self
} // set_ty()
pub fn user_id<V>(mut self, user_id: V) -> Self
where V: Into<::std::borrow::Cow<'a, str>>
{
SetReturnMethodFields::set_user_id(&mut self.payload, user_id.into());
self
} // set_user_id()
pub fn set_headers<V>(self, _: V) -> Self
where V: Into<<T as ::Content<'a>>::Headers>
{
self
}
pub fn set_body<V>(self, _: V) -> Self
where V: Into<::std::borrow::Cow<'a, [u8]>>
{
self
}
} // impl<'a, T> ReturnBuilder<T><|fim▁end|>
|
impl<T> Default for GetBuilder<T>
where T: ::Encodable + Default
{
fn default() -> Self {
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
This module defines serializers for the main API data objects:
.. autosummary::
:nosignatures:
DimensionSerializer
FilterSerializer
MessageSerializer
QuestionSerializer
"""
from django.core.paginator import Paginator
from rest_framework import serializers, pagination
import emoticonvis.apps.corpus.models as corpus_models
import emoticonvis.apps.enhance.models as enhance_models
from django.contrib.auth.models import User
# A simple string field that looks up dimensions on deserialization
class MessageSerializer(serializers.ModelSerializer):
"""
JSON representation of :class:`.Message`
objects for the API.
Messages are provided in a simple format that is useful for displaying
examples:
::
{
"id": 52,
"dataset": 2,
"text": "Some sort of thing or other",
"sender": {
"id": 2,
"dataset": 1
"original_id": 2568434,
"username": "my_name",
"full_name": "My Name"
},
"time": "2010-02-25T00:23:53Z"
}
Additional fields may be added later.
"""
class Meta:
model = corpus_models.Message
fields = ('id', 'dataset', 'text', )
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
return instance.username
class Meta:
model = User
fields = ('username', )
class FeatureVectorSerializer(serializers.Serializer):
message = MessageSerializer()
tokens = serializers.ListField()
feature_vector = serializers.ListField(child=serializers.DictField())
class FeatureCodeDistributionSerializer(serializers.Serializer):
feature_index = serializers.IntegerField()
feature_text = serializers.CharField()
distribution = serializers.ListField(child=serializers.DictField())
class SVMResultSerializer(serializers.Serializer):
results = serializers.DictField()
messages = serializers.ListField(child=FeatureVectorSerializer(), required=True)
class FeatureSerializer(serializers.ModelSerializer):
token_list = serializers.ListField(child=serializers.CharField(), required=False)
class Meta:
model = enhance_models.Feature
fields = ('id', 'dictionary', 'index', 'text', 'document_frequency', 'token_list', )
read_only_fields = ('id', 'dictionary', 'index', 'text', 'document_frequency', )
class PaginatedMessageSerializer(pagination.PaginationSerializer):
class Meta:
object_serializer_class = MessageSerializer
class DatasetSerializer(serializers.ModelSerializer):
class Meta:
model = corpus_models.Dataset
fields = ('id', 'name', 'description', 'message_count', )
read_only_fields = ('id', 'name', 'description', 'message_count', )
class DictionarySerializer(serializers.ModelSerializer):
dataset = DatasetSerializer()<|fim▁hole|> fields = ('id', 'name', 'time', 'feature_count', 'dataset', )
read_only_fields = ('id', 'name', 'time', 'feature_count', 'dataset', )
class CodeAssignmentSerializer(serializers.ModelSerializer):
class Meta:
model = coding_models.CodeAssignment
fields = ('id', 'source', 'message', 'code', 'is_example', 'is_ambiguous', 'is_saved', )
read_only_fields = ('id', 'source', )
class CodeDefinitionSerializer(serializers.Serializer):
code = serializers.CharField(required=False)
source = UserSerializer(required=False)
text = serializers.CharField()
examples = MessageSerializer(many=True, required=False)
class CodeMessageSerializer(serializers.Serializer):
code = serializers.CharField()
source = UserSerializer()
messages = MessageSerializer(many=True)
class DisagreementIndicatorSerializer(serializers.ModelSerializer):
user_assignment = CodeAssignmentSerializer(required=False)
partner_assignment = CodeAssignmentSerializer(required=False)
class Meta:
model = coding_models.DisagreementIndicator
fields = ('id', 'message', 'user_assignment', 'partner_assignment', 'type', )
read_only_fields = ('id', 'message', 'user_assignment', 'partner_assignment', )
class PairwiseSerializer(serializers.Serializer):
user_code = serializers.CharField()
partner_code = serializers.CharField()
count = serializers.IntegerField()<|fim▁end|>
|
class Meta:
model = enhance_models.Dictionary
|
<|file_name|>export_all_courses.py<|end_file_name|><|fim▁begin|>"""
Script for exporting all courseware from Mongo to a directory and listing the courses which failed to export
"""
from django.core.management.base import BaseCommand
from six import text_type
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.xml_exporter import export_course_to_xml
class Command(BaseCommand):
"""
Export all courses from mongo to the specified data directory and list the courses which failed to export
"""
help = 'Export all courses from mongo to the specified data directory and list the courses which failed to export'
def add_arguments(self, parser):
parser.add_argument('output_path')
def handle(self, *args, **options):
"""
Execute the command
"""
courses, failed_export_courses = export_courses_to_output_path(options['output_path'])
print("=" * 80)
print("=" * 30 + "> Export summary")
print(u"Total number of courses to export: {0}".format(len(courses)))
print(u"Total number of courses which failed to export: {0}".format(len(failed_export_courses)))
print("List of export failed courses ids:")
print("\n".join(failed_export_courses))
print("=" * 80)
def export_courses_to_output_path(output_path):
"""<|fim▁hole|> root_dir = output_path
courses = module_store.get_courses()
course_ids = [x.id for x in courses]
failed_export_courses = []
for course_id in course_ids:
print("-" * 80)
print(u"Exporting course id = {0} to {1}".format(course_id, output_path))
try:
course_dir = text_type(course_id).replace('/', '...')
export_course_to_xml(module_store, content_store, course_id, root_dir, course_dir)
except Exception as err: # pylint: disable=broad-except
failed_export_courses.append(text_type(course_id))
print(u"=" * 30 + u"> Oops, failed to export {0}".format(course_id))
print("Error:")
print(err)
return courses, failed_export_courses<|fim▁end|>
|
Export all courses to target directory and return the list of courses which failed to export
"""
content_store = contentstore()
module_store = modulestore()
|
<|file_name|>engine.py<|end_file_name|><|fim▁begin|>/*Owner & Copyrights: Vance King Saxbe. A.*/""" Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @[email protected]. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager."""#!/usr/bin/env python3
import _thread
import os
import sys
import time
import gc
from src import googlequotemachine
from src import yahooquotemachine
from src import bloombergquotemachine
from src import createtablesgooglefinance
from src import createtablesyahoofinance
from src import createtablesbloomberg
from time import localtime, strftime
start1 = []
sys.setrecursionlimit(1000000)
database = "data/"
markettime = {}
with open("conf/MarketTimings.conf") as fillees:
mlist = fillees.read().splitlines()
fillees.close()
for line in mlist:
items = line.split(", ")
key, values = items[0], items[1]
markettime[key] = values
with open('conf/symbolname.conf') as fille:
synamelist = fille.read().splitlines()
fille.close()
timetorun = 1800
cycle = 1
while("TRUE"):
with open('conf/urls.conf') as openedfile:
fileaslist = openedfile.read().splitlines()
openedfile.close()
a_lock = _thread.allocate_lock()
thr = []<|fim▁hole|> print("locks placed and Market engine is running for the...", cycle)
for lines in fileaslist:
lisj = lines.split('", "')
mtime = markettime[lisj[2].replace('"','')]
mktime = mtime.split("-")
if mktime[1] < mktime[0]:
righto = mktime[1].split(":")
close = str(str(int(righto[0])+24)+":"+righto[1])
else:
close = mktime[1]
rightnow = strftime("%H:%M", localtime())
if rightnow < strftime("04:00"):
right = rightnow.split(":")
rightnow = str(str(int(right[0])+24)+":"+right[1])
if (close > rightnow > mktime[0]):
print("Market ", lisj[2].replace('.db"',''), " is starting at cycle ", cycle)
if lisj[1] =='g':
thr.append(_thread.start_new_thread(googlequotemachine.actionking, (a_lock, start1, lisj[0].replace('"',''),database+lisj[2].replace('"',''),0,synamelist,1,0,timetorun) ))
elif lisj[1] =='y':
thr.append(_thread.start_new_thread(yahooquotemachine.actionking, (a_lock,start1, lisj[0].replace('"',''),database+lisj[2].replace('"',''),0,synamelist,1,0,timetorun) ))
else:
thr.append(_thread.start_new_thread(bloombergquotemachine.actionking, (a_lock,start1, lisj[0].replace('"',''),database+lisj[2].replace('"',''),0,) ))
time.sleep(0.00001)
print("locks placed and Market engine is running for the....", cycle, " time...with threads", thr )
time.sleep(timetorun)
gc.collect()
print("locks released and Market engine is restarting for the...", cycle, " time...")
cycle = cycle + 1
/*email to provide support at [email protected], [email protected], For donations please write to [email protected]*/<|fim▁end|>
|
with a_lock:
|
<|file_name|>petition-page-loader.ts<|end_file_name|><|fim▁begin|>/// <reference path="../node.d.ts" />
import loading = require('./loading');
import https = require('https');
import petitionUtil = require('../private/petition-util');
var getJsonOverHttps = petitionUtil.getJsonOverHttps;
export module UkPetitions {
/**
* Loads a page of petition summaries. It is stateless.
*/
export class PetitionPageLoader {
private agent: https.Agent;
/**
* Constructor for petition page loaders.
* @param agent The HTTP agemt to use to make requests.
*/
constructor(agent?: https.Agent) {
this.agent = agent;
}
/**
* Load a page of petitions by number. Returns an emitter. Emits either 'loaded' or 'error' events.
* The 'loaded' event is passed the data of the petition.<|fim▁hole|> * @return The emitter
*/
load(page: any): loading.UkPetitions.Loading {
var emitter: loading.UkPetitions.Loading = new loading.UkPetitions.Loading();
var pathToLoad: string;
if (typeof page === 'number') {
pathToLoad = '/petitions.json?page=' + page;
} else if (typeof page === 'string') {
pathToLoad = page;
} else if (typeof page === 'object') {
if (page instanceof String) {
pathToLoad = page;
} else if (page instanceof Number) {
pathToLoad = '/petitions.json?page=' + page;
} else {
emitter.error('Problem parameter');
return emitter;
}
} else {
emitter.error('Problem parameter');
return emitter;
}
getJsonOverHttps({
hostname: 'petition.parliament.uk',
port: 443,
path: pathToLoad,
agent: this.agent
})
.on('error', emitter.error.bind(emitter))
.on('data', emitter.loaded.bind(emitter));
return emitter;
}
}
}<|fim▁end|>
|
* The 'error' event is passed the Error.
|
<|file_name|>Lexer.cpp<|end_file_name|><|fim▁begin|>// Lexer.cpp
// Copyright (c) 2014 - 2015, [email protected]
// Licensed under the Apache License Version 2.0.
#include <string>
#include <ctype.h>
#include <cassert>
#include <iostream>
#include "parser.h"
#include "../utf8rewind/include/utf8rewind/utf8rewind.h"
using string_view = std::experimental::string_view;
namespace Lexer
{
using namespace Parser;
static void skipWhitespace(string_view& line, Pin& pos, size_t* offset)
{
size_t skip = 0;
while(line.length() > skip && (line[skip] == '\t' || line[skip] == ' '))
{
(line[skip] == ' ' ? pos.col++ : pos.col += TAB_WIDTH);
skip++;
}
line.remove_prefix(skip);
(*offset) += skip;
}
template <size_t N>
static bool hasPrefix(const string_view& str, char const (&literal)[N])
{
if(str.length() < N - 1) return false;
for(size_t i = 0; i < N - 1; i++)
if(str[i] != literal[i]) return false;
return true;
}
template <size_t N>
static bool compare(const string_view& str, char const (&literal)[N])
{
if(str.length() != N - 1) return false;
for(size_t i = 0; i < N - 1; i++)
if(str[i] != literal[i]) return false;
return true;
}
static TType prevType = TType::Invalid;
static size_t prevID = 0;
static bool shouldConsiderUnaryLiteral(string_view& stream, Pin& pos)
{
// check the previous token
bool res = (prevType != TType::Invalid && prevID == pos.fileID && (prevType != TType::RParen && prevType != TType::RSquare
&& prevType != TType::Identifier && prevType != TType::Number));
if(!res) return false;
// check if the current char is a + or -
if(stream.length() == 0) return false;
if(stream[0] != '+' && stream[0] != '-') return false;
// check if there's only spaces between this and the number itself
for(size_t i = 1; i < stream.length(); i++)
{
if(isdigit(stream[i])) return true;
else if(stream[i] != ' ') return false;
}
return false;
}
TType getNextToken(const util::FastVector<string_view>& lines, size_t* line, size_t* offset, const string_view& whole,
Pin& pos, Token* out)
{
bool flag = true;
if(*line == lines.size())
{
out->pin = pos;
out->type = TType::EndOfFile;
return TType::EndOfFile;
}
string_view stream = lines[*line].substr(*offset);
size_t read = 0;
size_t unicodeLength = 0;
// first eat all whitespace
skipWhitespace(stream, pos, offset);
Token& tok = *out;
tok.pin = pos;
// check compound symbols first.
if(hasPrefix(stream, "//"))
{
tok.type = TType::Comment;
stream = stream.substr(0, 0);
(*line)++;
pos.line++;
(*offset) = 0;
// don't assign lines[line] = stream, since over here we've changed 'line' to be the next one.
flag = false;
tok.text = "";
}
else if(hasPrefix(stream, "=="))
{
tok.type = TType::EqualsTo;
tok.text = "==";
read = 2;
}
else if(hasPrefix(stream, ">="))
{
tok.type = TType::GreaterEquals;
tok.text = ">=";
read = 2;
}
else if(hasPrefix(stream, "<="))
{
tok.type = TType::LessThanEquals;
tok.text = "<=";
read = 2;
}
else if(hasPrefix(stream, "!="))
{
tok.type = TType::NotEquals;
tok.text = "!=";
read = 2;
}
else if(hasPrefix(stream, "||"))
{
tok.type = TType::LogicalOr;
tok.text = "||";
read = 2;
}
else if(hasPrefix(stream, "&&"))
{
tok.type = TType::LogicalAnd;
tok.text = "&&";
read = 2;
}
else if(hasPrefix(stream, "->"))
{
tok.type = TType::Arrow;
tok.text = "->";
read = 2;
}
else if(hasPrefix(stream, "++"))
{
tok.type = TType::DoublePlus;
tok.text = "++";
read = 2;
}
else if(hasPrefix(stream, "--"))
{
tok.type = TType::DoubleMinus;
tok.text = "--";
read = 2;
}
else if(hasPrefix(stream, "+="))
{
tok.type = TType::PlusEq;
tok.text = "+=";
read = 2;
}
else if(hasPrefix(stream, "-="))
{
tok.type = TType::MinusEq;
tok.text = "-=";
read = 2;
}
else if(hasPrefix(stream, "*="))
{
tok.type = TType::MultiplyEq;
tok.text = "*=";
read = 2;
}
else if(hasPrefix(stream, "/="))
{
tok.type = TType::DivideEq;
tok.text = "/=";
read = 2;
}
else if(hasPrefix(stream, "%="))
{
tok.type = TType::ModEq;
tok.text = "%=";
read = 2;
}
else if(hasPrefix(stream, "&="))
{
tok.type = TType::AmpersandEq;
tok.text = "&=";
read = 2;
}
else if(hasPrefix(stream, "|="))
{
tok.type = TType::PipeEq;
tok.text = "|=";
read = 2;
}
else if(hasPrefix(stream, "^="))
{
tok.type = TType::CaretEq;
tok.text = "^=";
read = 2;
}
else if(hasPrefix(stream, "..."))
{
tok.type = TType::Ellipsis;
tok.text = "...";
read = 3;
}
else if(hasPrefix(stream, "..<"))
{
tok.type = TType::HalfOpenEllipsis;
tok.text = "..<";
read = 3;
}
else if(hasPrefix(stream, "/*"))
{
int currentNest = 1;
// support nested, so basically we have to loop until we find either a /* or a */
stream.remove_prefix(2);
(*offset) += 2;
pos.col += 2;
Pin opening = pos;
Pin curpos = pos;
size_t k = 0;
while(currentNest > 0)
{
// we can do this, because we know the closing token (*/) is 2 chars long
// so if we have 1 char left, gg.
if(k + 1 == stream.size() || stream[k] == '\n')
{
if(*line + 1 == lines.size())
parserError(opening, "Expected closing */ (reached EOF), for block comment started here:");
// else, get the next line.
// also note: if we're in this loop, we're inside a block comment.
// since the ending token cannot be split across lines, we know that this last char
// must also be part of the comment. hence, just skip over it.
k = 0;
curpos.line++;
curpos.col = 0;
(*offset) = 0;
(*line)++;
stream = lines[*line];
continue;
}
if(stream[k] == '/' && stream[k + 1] == '*')
currentNest++, k++, curpos.col++, opening = curpos;
else if(stream[k] == '*' && stream[k + 1] == '/')
currentNest--, k++, curpos.col++;
k++;
curpos.col++;
}
if(currentNest != 0)
parserError(opening, "Expected closing */ (reached EOF), for block comment started here:");
pos = curpos;
// don't actually store the text, because it's pointless and memory-wasting
// tok.text = "/* I used to be a comment like you, until I took a memory-leak to the knee. */";
tok.type = TType::Comment;
tok.text = "";
read = k;
}
else if(hasPrefix(stream, "*/"))
{
parserError(tok, "Unexpected '*/'");
}
// unicode stuff
else if(hasPrefix(stream, "ƒ"))
{
tok.type = TType::Func;
read = std::string("ƒ").length();
tok.text = "ƒ";
unicodeLength = 1;
}
else if(hasPrefix(stream, "fi"))
{
tok.type = TType::ForeignFunc;
read = std::string("fi").length();
tok.text = "fi";
unicodeLength = 1;
}
else if(hasPrefix(stream, "÷"))
{
tok.type = TType::Divide;<|fim▁hole|> unicodeLength = 1;
}
else if(hasPrefix(stream, "≠"))
{
tok.type = TType::NotEquals;
read = std::string("≠").length();
tok.text = "≠";
unicodeLength = 1;
}
else if(hasPrefix(stream, "≤"))
{
tok.type = TType::LessThanEquals;
read = std::string("≤").length();
tok.text = "≤";
unicodeLength = 1;
}
else if(hasPrefix(stream, "≥"))
{
tok.type = TType::GreaterEquals;
read = std::string("≥").length();
tok.text = "≥";
unicodeLength = 1;
}
// note some special-casing is needed to differentiate between unary +/- and binary +/-
// cases where we want binary:
// ...) + 3
// ...] + 3
// ident + 3
// number + 3
// so in every other case we want unary +/-.
else if(!stream.empty() && (isdigit(stream[0]) || shouldConsiderUnaryLiteral(stream, pos)))
{
// copy it.
auto tmp = stream;
if(stream.find('-') == 0 || stream.find('+') == 0)
tmp.remove_prefix(1);
int base = 10;
if(tmp.find("0x") == 0 || tmp.find("0X") == 0)
base = 16, tmp.remove_prefix(2);
else if(tmp.find("0b") == 0 || tmp.find("0B") == 0)
base = 2, tmp.remove_prefix(2);
// find that shit
auto end = std::find_if_not(tmp.begin(), tmp.end(), [base](const char& c) -> bool {
if(base == 10) return isdigit(c);
if(base == 16) return isdigit(c) || (toupper(c) >= 'A' && toupper(c) <= 'F');
else return (c == '0' || c == '1');
});
tmp.remove_prefix((end - tmp.begin()));
// check if we have 'e' or 'E'
bool hadExp = false;
if(tmp.size() > 0 && (tmp[0] == 'e' || tmp[0] == 'E'))
{
if(base != 10)
parserError("Exponential form is supported with neither hexadecimal nor binary literals");
// find that shit
auto next = std::find_if_not(tmp.begin() + 1, tmp.end(), isdigit);
// this does the 'e' as well.
tmp.remove_prefix(next - tmp.begin());
hadExp = true;
}
size_t didRead = stream.size() - tmp.size();
auto post = stream.substr(didRead);
if(!post.empty() && post[0] == '.')
{
if(base != 10)
parserError("Invalid floating point literal; only valid in base 10");
else if(hadExp)
parserError("Invalid floating point literal; decimal point cannot occur after the exponent ('e' or 'E').");
// if the previous token was a '.' as well, then we're doing some tuple access
// eg. x.0.1 (we would be at '0', having a period both ahead and behind us)
// if the next token is not a number, then same thing, eg.
// x.0.z, where the first tuple element of 'x' is a struct or something.
// so -- lex a floating point *iff* the previous token was not '.', and the next token is a digit.
if(prevType != TType::Period && post.size() > 1 && isdigit(post[1]))
{
// yes, parse a floating point
post.remove_prefix(1), didRead++;
while(isdigit(post.front()))
post.remove_prefix(1), didRead++;
// ok.
}
else
{
// no, just return the integer token.
// (which we do below, so just do nothing here)
}
}
tok.text = stream.substr(0, didRead);
tok.type = TType::Number;
tok.pin.len = didRead;
read = didRead;
}
else if(!stream.empty() && (stream[0] == '_' || utf8iscategory(stream.data(), stream.size(), UTF8_CATEGORY_LETTER) > 0))
{
// get as many letters as possible first
size_t identLength = utf8iscategory(stream.data(), stream.size(),
UTF8_CATEGORY_LETTER | UTF8_CATEGORY_PUNCTUATION_CONNECTOR | UTF8_CATEGORY_NUMBER);
bool isExclamation = (stream.size() - identLength > 0) && stream.substr(identLength).front() == '!';
read = identLength;
tok.text = stream.substr(0, identLength);
// check for keywords
if(compare(tok.text, "class")) tok.type = TType::Class;
else if(compare(tok.text, "struct")) tok.type = TType::Struct;
else if(compare(tok.text, "fn")) tok.type = TType::Func;
else if(compare(tok.text, "import")) tok.type = TType::Import;
else if(compare(tok.text, "var")) tok.type = TType::Var;
else if(compare(tok.text, "let")) tok.type = TType::Val;
else if(compare(tok.text, "for")) tok.type = TType::For;
else if(compare(tok.text, "while")) tok.type = TType::While;
else if(compare(tok.text, "if")) tok.type = TType::If;
else if(compare(tok.text, "else")) tok.type = TType::Else;
else if(compare(tok.text, "return")) tok.type = TType::Return;
else if(compare(tok.text, "is")) tok.type = TType::Is;
else if(compare(tok.text, "switch")) tok.type = TType::Switch;
else if(compare(tok.text, "case")) tok.type = TType::Case;
else if(compare(tok.text, "enum")) tok.type = TType::Enum;
else if(compare(tok.text, "ffi")) tok.type = TType::ForeignFunc;
else if(compare(tok.text, "true")) tok.type = TType::True;
else if(compare(tok.text, "false")) tok.type = TType::False;
else if(compare(tok.text, "static")) tok.type = TType::Static;
else if(compare(tok.text, "break")) tok.type = TType::Break;
else if(compare(tok.text, "continue")) tok.type = TType::Continue;
else if(compare(tok.text, "do")) tok.type = TType::Do;
else if(compare(tok.text, "loop")) tok.type = TType::Loop;
else if(compare(tok.text, "defer")) tok.type = TType::Defer;
else if(compare(tok.text, "public")) tok.type = TType::Public;
else if(compare(tok.text, "private")) tok.type = TType::Private;
else if(compare(tok.text, "internal")) tok.type = TType::Internal;
else if(compare(tok.text, "alloc")) tok.type = TType::Alloc;
else if(compare(tok.text, "dealloc")) tok.type = TType::Dealloc;
else if(compare(tok.text, "typeof")) tok.type = TType::Typeof;
else if(compare(tok.text, "typeid")) tok.type = TType::Typeid;
else if(compare(tok.text, "sizeof")) tok.type = TType::Sizeof;
else if(compare(tok.text, "get")) tok.type = TType::Get;
else if(compare(tok.text, "set")) tok.type = TType::Set;
else if(compare(tok.text, "null")) tok.type = TType::Null;
else if(compare(tok.text, "module")) tok.type = TType::Module;
else if(compare(tok.text, "namespace")) tok.type = TType::Namespace;
else if(compare(tok.text, "extension")) tok.type = TType::Extension;
else if(compare(tok.text, "typealias")) tok.type = TType::TypeAlias;
else if(compare(tok.text, "protocol")) tok.type = TType::Protocol;
else if(compare(tok.text, "override")) tok.type = TType::Override;
else if(compare(tok.text, "operator")) tok.type = TType::Operator;
else if(compare(tok.text, "as")) { tok.type = TType::As; if(isExclamation) { read++; tok.type = TType::AsExclamation; } }
else tok.type = TType::Identifier;
}
else if(!stream.empty() && stream[0] == '"')
{
// string literal
// because we want to avoid using std::string (ie. copying) in the lexer (Token), we must send the string over verbatim.
// store the starting position
size_t start = stream.begin() - whole.begin() + 1;
// opening "
pos.col++;
size_t didRead = 0;
size_t i = 1;
for(; stream[i] != '"'; i++)
{
if(stream[i] == '\\')
{
if(i + 1 == stream.size() || *line + 1 == lines.size())
{
parserError("Unexpected end of input");
}
else if(stream[i + 1] == '"')
{
// add the quote and the backslash, and skip it.
didRead += 2;
pos.col += 2;
i++;
}
// breaking string over two lines
else if(stream[i + 1] == '\n')
{
// skip it, then move to the next line
pos.line++;
pos.col = 1;
(*line)++;
i = 0;
// just a fudge factor gotten from empirical evidence
// 3 extra holds for multiple lines, so all is well.
didRead += 3;
stream = lines[*line];
(*offset) = 0;
}
else
{
// just put the backslash in.
// and don't skip the next one.
didRead++;
pos.col++;
}
continue;
}
didRead++;
pos.col++;
if(i == stream.size() - 1 || stream[i] == '\n')
{
parserError(Pin(pos.fileID, pos.line, pos.col + i, pos.len), "Expected closing '\"' (%zu/%zu/%zu/%c/%zu)",
i, stream.size(), didRead, stream[i], *offset);
}
}
// closing "
pos.col++;
tok.type = TType::StringLiteral;
tok.text = whole.substr(start, didRead);
stream = stream.substr(i + 1);
(*offset) += i + 1;
read = 0;
flag = false;
}
else if(!stream.empty())
{
if(isascii(stream[0]))
{
// check the first char
switch(stream[0])
{
// for single-char things
case '\n': tok.type = TType::NewLine; pos.line++; break;
case '{': tok.type = TType::LBrace; break;
case '}': tok.type = TType::RBrace; break;
case '(': tok.type = TType::LParen; break;
case ')': tok.type = TType::RParen; break;
case '[': tok.type = TType::LSquare; break;
case ']': tok.type = TType::RSquare; break;
case '<': tok.type = TType::LAngle; break;
case '>': tok.type = TType::RAngle; break;
case '+': tok.type = TType::Plus; break;
case '-': tok.type = TType::Minus; break;
case '*': tok.type = TType::Asterisk; break;
case '/': tok.type = TType::Divide; break;
case '\'': tok.type = TType::SQuote; break;
case '.': tok.type = TType::Period; break;
case ',': tok.type = TType::Comma; break;
case ':': tok.type = TType::Colon; break;
case '=': tok.type = TType::Equal; break;
case '?': tok.type = TType::Question; break;
case '!': tok.type = TType::Exclamation; break;
case ';': tok.type = TType::Semicolon; break;
case '&': tok.type = TType::Ampersand; break;
case '%': tok.type = TType::Percent; break;
case '|': tok.type = TType::Pipe; break;
case '@': tok.type = TType::At; break;
case '#': tok.type = TType::Pound; break;
case '~': tok.type = TType::Tilde; break;
case '^': tok.type = TType::Caret; break;
default:
parserError(tok, "Unknown token '%c'", stream[0]);
}
tok.text = stream.substr(0, 1);
read = 1;
}
else if(utf8iscategory(stream.data(), stream.size(), UTF8_CATEGORY_SYMBOL_MATH | UTF8_CATEGORY_PUNCTUATION_OTHER) > 0)
{
read = utf8iscategory(stream.data(), stream.size(), UTF8_CATEGORY_SYMBOL_MATH | UTF8_CATEGORY_PUNCTUATION_OTHER);
tok.text = stream.substr(0, read);
tok.type = TType::UnicodeSymbol;
}
else
{
parserError(tok, "Unknown token '%s'", stream.substr(0, 10).to_string().c_str());
}
}
stream.remove_prefix(read);
if(flag)
(*offset) += read;
if(tok.type != TType::NewLine)
{
if(read > 0)
{
// note(debatable): put the actual "position" in the front of the token
pos.col += read;
// special handling -- things like ƒ, ≤ etc. are one character wide, but can be several *bytes* long.
pos.len = (unicodeLength > 0 ? unicodeLength : read);
tok.pin.len = read;
}
}
else
{
pos.col = 1;
(*line)++;
(*offset) = 0;
}
// printf("token %s: %d // %d\n", tok.text.to_string().c_str(), tok.pin.col, pos.col);
prevType = tok.type;
prevID = tok.pin.fileID;
return prevType;
}
}<|fim▁end|>
|
read = std::string("÷").length();
tok.text = "÷";
|
<|file_name|>bot.py<|end_file_name|><|fim▁begin|>from slackminion.bot import Bot
from slackminion.webserver import Webserver<|fim▁hole|>
class DummyBot(Bot):
def __init__(self, *args, **kwargs):
super(DummyBot, self).__init__(None, *args, **kwargs)
setattr(self, 'start', lambda: None)
setattr(self, 'send_message', lambda x, y, z, a: None)
self.webserver = Webserver('127.0.0.1', '9999')<|fim▁end|>
| |
<|file_name|>CollectionFilteringUnitTest.java<|end_file_name|><|fim▁begin|>package com.baeldung.collection.filtering;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
/**
* Various filtering examples.
*
* @author Rodolfo Felipe
*/
public class CollectionFilteringUnitTest {
private List<Employee> buildEmployeeList() {
return Arrays.asList(new Employee(1, "Mike", 1), new Employee(2, "John", 1), new Employee(3, "Mary", 1), new Employee(4, "Joe", 2), new Employee(5, "Nicole", 2), new Employee(6, "Alice", 2), new Employee(7, "Bob", 3), new Employee(8, "Scarlett", 3));
}<|fim▁hole|>
@Test
public void givenEmployeeList_andNameFilterList_thenObtainFilteredEmployeeList_usingForEachLoop() {
List<Employee> filteredList = new ArrayList<>();
List<Employee> originalList = buildEmployeeList();
List<String> nameFilter = employeeNameFilter();
for (Employee employee : originalList) {
for (String name : nameFilter) {
if (employee.getName()
.equalsIgnoreCase(name)) {
filteredList.add(employee);
}
}
}
Assert.assertThat(filteredList.size(), Matchers.is(nameFilter.size()));
}
@Test
public void givenEmployeeList_andNameFilterList_thenObtainFilteredEmployeeList_usingLambda() {
List<Employee> filteredList;
List<Employee> originalList = buildEmployeeList();
List<String> nameFilter = employeeNameFilter();
filteredList = originalList.stream()
.filter(employee -> nameFilter.contains(employee.getName()))
.collect(Collectors.toList());
Assert.assertThat(filteredList.size(), Matchers.is(nameFilter.size()));
}
@Test
public void givenEmployeeList_andNameFilterList_thenObtainFilteredEmployeeList_usingLambdaAndHashSet() {
List<Employee> filteredList;
List<Employee> originalList = buildEmployeeList();
Set<String> nameFilterSet = employeeNameFilter().stream()
.collect(Collectors.toSet());
filteredList = originalList.stream()
.filter(employee -> nameFilterSet.contains(employee.getName()))
.collect(Collectors.toList());
Assert.assertThat(filteredList.size(), Matchers.is(nameFilterSet.size()));
}
}<|fim▁end|>
|
private List<String> employeeNameFilter() {
return Arrays.asList("Alice", "Mike", "Bob");
}
|
<|file_name|>expected.js<|end_file_name|><|fim▁begin|><|fim▁hole|>/*before*/"use strict";
/*after*/foo();<|fim▁end|>
| |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>define(function(require) {
var $ = require('./jquery');
var marked = require('./marked');
var prettify = require('./prettify');
var title = document.title;
function initPage() {
marked.setOptions({
highlight: function(code) {
return prettify.prettyPrintOne(escape(code));
}
});
$('.section').each(function() {
$(this).html(marked($(this).children('.markdown').val()));
});
$('.loading').remove();
initShare();
updateView();
}
function initEvent() {
$(document).bind({
WeixinJSBridgeReady: initShare
});
if ('onhashchange' in window) {
$(window).on({
hashchange: function() {
updateView();
}
});
} else {
$('body').on({
click: function() {
if (this.href.indexOf('#') >= 0) {
updateView(this.href.replace(/(?:.*(#\w+)|.*)/, '$1') || '#intro');
}
}
}, 'a');
}
$('.footer .top').on({
click: function() {
window.scrollTo(0, 0);
}
});
}
function initShare() {
if (!window.WeixinJSBridge) {
return;
}
try {
WeixinJSBridge.on('menu:share:appmessage', function(argv) {
WeixinJSBridge.invoke('sendAppMessage', getShareData());
});
WeixinJSBridge.on('menu:share:timeline', function(argv) {
WeixinJSBridge.invoke('shareTimeline', getShareData());
});
} catch (e) {}
}
<|fim▁hole|> desc: $('#intro p').eq(0).text(),
img_url: 'http://tp3.sinaimg.cn/1562087202/180/40038430931/1'
};
}
function updateView(id) {
id = id || location.href.replace(/(?:.*(#\w+)|.*)/, '$1') || '#intro';
$('.section').hide();
document.title = title + ' - ' + $(id).show().find('h2').eq(0).text();
setTimeout(window.scrollTo, 0, 0, 0);
ga('send', 'event', 'section', 'view', id);
}
function escape(code) {
return code
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
}
initPage();
initEvent();
});<|fim▁end|>
|
function getShareData() {
return {
title: document.title,
link: document.location.href,
|
<|file_name|>GuiCreateWorld.java<|end_file_name|><|fim▁begin|>package net.minecraft.client.gui;
import java.io.IOException;
import java.util.Random;
import net.minecraft.client.resources.I18n;
import net.minecraft.util.ChatAllowedCharacters;
import net.minecraft.world.GameType;
import net.minecraft.world.WorldSettings;
import net.minecraft.world.WorldType;
import net.minecraft.world.storage.ISaveFormat;
import net.minecraft.world.storage.WorldInfo;
import org.apache.commons.lang3.StringUtils;
import org.lwjgl.input.Keyboard;
public class GuiCreateWorld extends GuiScreen
{
private final GuiScreen parentScreen;
private GuiTextField worldNameField;
private GuiTextField worldSeedField;
private String saveDirName;
private String gameMode = "survival";
/**
* Used to save away the game mode when the current "debug" world type is chosen (forcing it to spectator mode)
*/
private String savedGameMode;
private boolean generateStructuresEnabled = true;
/** If cheats are allowed */
private boolean allowCheats;
/**
* User explicitly clicked "Allow Cheats" at some point
* Prevents value changes due to changing game mode
*/
private boolean allowCheatsWasSetByUser;
private boolean bonusChestEnabled;
/** Set to true when "hardcore" is the currently-selected gamemode */
private boolean hardCoreMode;
private boolean alreadyGenerated;
private boolean inMoreWorldOptionsDisplay;
private GuiButton btnGameMode;
private GuiButton btnMoreOptions;
private GuiButton btnMapFeatures;
private GuiButton btnBonusItems;
private GuiButton btnMapType;
private GuiButton btnAllowCommands;
private GuiButton btnCustomizeType;
private String gameModeDesc1;
private String gameModeDesc2;
private String worldSeed;
private String worldName;
private int selectedIndex;
public String chunkProviderSettingsJson = "";
/** These filenames are known to be restricted on one or more OS's. */
private static final String[] DISALLOWED_FILENAMES = new String[] {"CON", "COM", "PRN", "AUX", "CLOCK$", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"};
public GuiCreateWorld(GuiScreen p_i46320_1_)
{
this.parentScreen = p_i46320_1_;
this.worldSeed = "";
this.worldName = I18n.format("selectWorld.newWorld", new Object[0]);
}
/**
* Called from the main game loop to update the screen.
*/
public void updateScreen()
{
this.worldNameField.updateCursorCounter();
this.worldSeedField.updateCursorCounter();
}
/**
* Adds the buttons (and other controls) to the screen in question. Called when the GUI is displayed and when the
* window resizes, the buttonList is cleared beforehand.
*/
public void initGui()
{
Keyboard.enableRepeatEvents(true);
this.buttonList.clear();
this.buttonList.add(new GuiButton(0, this.width / 2 - 155, this.height - 28, 150, 20, I18n.format("selectWorld.create", new Object[0])));
this.buttonList.add(new GuiButton(1, this.width / 2 + 5, this.height - 28, 150, 20, I18n.format("gui.cancel", new Object[0])));
this.btnGameMode = this.func_189646_b(new GuiButton(2, this.width / 2 - 75, 115, 150, 20, I18n.format("selectWorld.gameMode", new Object[0])));
this.btnMoreOptions = this.func_189646_b(new GuiButton(3, this.width / 2 - 75, 187, 150, 20, I18n.format("selectWorld.moreWorldOptions", new Object[0])));
this.btnMapFeatures = this.func_189646_b(new GuiButton(4, this.width / 2 - 155, 100, 150, 20, I18n.format("selectWorld.mapFeatures", new Object[0])));
this.btnMapFeatures.visible = false;
this.btnBonusItems = this.func_189646_b(new GuiButton(7, this.width / 2 + 5, 151, 150, 20, I18n.format("selectWorld.bonusItems", new Object[0])));
this.btnBonusItems.visible = false;
this.btnMapType = this.func_189646_b(new GuiButton(5, this.width / 2 + 5, 100, 150, 20, I18n.format("selectWorld.mapType", new Object[0])));
this.btnMapType.visible = false;
this.btnAllowCommands = this.func_189646_b(new GuiButton(6, this.width / 2 - 155, 151, 150, 20, I18n.format("selectWorld.allowCommands", new Object[0])));
this.btnAllowCommands.visible = false;
this.btnCustomizeType = this.func_189646_b(new GuiButton(8, this.width / 2 + 5, 120, 150, 20, I18n.format("selectWorld.customizeType", new Object[0])));
this.btnCustomizeType.visible = false;
this.worldNameField = new GuiTextField(9, this.fontRendererObj, this.width / 2 - 100, 60, 200, 20);
this.worldNameField.setFocused(true);
this.worldNameField.setText(this.worldName);
this.worldSeedField = new GuiTextField(10, this.fontRendererObj, this.width / 2 - 100, 60, 200, 20);
this.worldSeedField.setText(this.worldSeed);
this.showMoreWorldOptions(this.inMoreWorldOptionsDisplay);
this.calcSaveDirName();
this.updateDisplayState();
}
/**
* Determine a save-directory name from the world name
*/
private void calcSaveDirName()
{
this.saveDirName = this.worldNameField.getText().trim();
for (char c0 : ChatAllowedCharacters.ILLEGAL_FILE_CHARACTERS)
{
this.saveDirName = this.saveDirName.replace(c0, '_');
}
if (StringUtils.isEmpty(this.saveDirName))
{
this.saveDirName = "World";
}
this.saveDirName = getUncollidingSaveDirName(this.mc.getSaveLoader(), this.saveDirName);
}
/**
* Sets displayed GUI elements according to the current settings state
*/
private void updateDisplayState()
{
this.btnGameMode.displayString = I18n.format("selectWorld.gameMode", new Object[0]) + ": " + I18n.format("selectWorld.gameMode." + this.gameMode, new Object[0]);
this.gameModeDesc1 = I18n.format("selectWorld.gameMode." + this.gameMode + ".line1", new Object[0]);
this.gameModeDesc2 = I18n.format("selectWorld.gameMode." + this.gameMode + ".line2", new Object[0]);
this.btnMapFeatures.displayString = I18n.format("selectWorld.mapFeatures", new Object[0]) + " ";
if (this.generateStructuresEnabled)
{
this.btnMapFeatures.displayString = this.btnMapFeatures.displayString + I18n.format("options.on", new Object[0]);
}
else
{
this.btnMapFeatures.displayString = this.btnMapFeatures.displayString + I18n.format("options.off", new Object[0]);
}
this.btnBonusItems.displayString = I18n.format("selectWorld.bonusItems", new Object[0]) + " ";
if (this.bonusChestEnabled && !this.hardCoreMode)
{
this.btnBonusItems.displayString = this.btnBonusItems.displayString + I18n.format("options.on", new Object[0]);
}
else
{
this.btnBonusItems.displayString = this.btnBonusItems.displayString + I18n.format("options.off", new Object[0]);
}
this.btnMapType.displayString = I18n.format("selectWorld.mapType", new Object[0]) + " " + I18n.format(WorldType.WORLD_TYPES[this.selectedIndex].getTranslateName(), new Object[0]);
this.btnAllowCommands.displayString = I18n.format("selectWorld.allowCommands", new Object[0]) + " ";
if (this.allowCheats && !this.hardCoreMode)
{
this.btnAllowCommands.displayString = this.btnAllowCommands.displayString + I18n.format("options.on", new Object[0]);
}
else
{
this.btnAllowCommands.displayString = this.btnAllowCommands.displayString + I18n.format("options.off", new Object[0]);
}
}
/**
* Ensures that a proposed directory name doesn't collide with existing names.
* Returns the name, possibly modified to avoid collisions.
*/
public static String getUncollidingSaveDirName(ISaveFormat saveLoader, String name)
{
name = name.replaceAll("[\\./\"]", "_");
for (String s : DISALLOWED_FILENAMES)
{
if (name.equalsIgnoreCase(s))
{
name = "_" + name + "_";
}
}
while (saveLoader.getWorldInfo(name) != null)
{
name = name + "-";
}
return name;
}
/**
* Called when the screen is unloaded. Used to disable keyboard repeat events
*/
public void onGuiClosed()
{
Keyboard.enableRepeatEvents(false);
}
/**
* Called by the controls from the buttonList when activated. (Mouse pressed for buttons)
*/
protected void actionPerformed(GuiButton button) throws IOException
{
if (button.enabled)
{
if (button.id == 1)
{
this.mc.displayGuiScreen(this.parentScreen);
}
else if (button.id == 0)
{
this.mc.displayGuiScreen((GuiScreen)null);
if (this.alreadyGenerated)
{
return;
}
this.alreadyGenerated = true;
long i = (new Random()).nextLong();
String s = this.worldSeedField.getText();
if (!StringUtils.isEmpty(s))
{
try
{
long j = Long.parseLong(s);
if (j != 0L)
{
i = j;
}
}
catch (NumberFormatException var7)
{
i = (long)s.hashCode();
}
}
WorldSettings worldsettings = new WorldSettings(i, GameType.getByName(this.gameMode), this.generateStructuresEnabled, this.hardCoreMode, WorldType.WORLD_TYPES[this.selectedIndex]);
worldsettings.setGeneratorOptions(this.chunkProviderSettingsJson);
if (this.bonusChestEnabled && !this.hardCoreMode)
{
worldsettings.enableBonusChest();
}
if (this.allowCheats && !this.hardCoreMode)
{
worldsettings.enableCommands();
}
this.mc.launchIntegratedServer(this.saveDirName, this.worldNameField.getText().trim(), worldsettings);
}
else if (button.id == 3)
{
this.toggleMoreWorldOptions();
}
else if (button.id == 2)
{
if ("survival".equals(this.gameMode))
{
if (!this.allowCheatsWasSetByUser)
{
this.allowCheats = false;
}
this.hardCoreMode = false;
this.gameMode = "hardcore";
this.hardCoreMode = true;
this.btnAllowCommands.enabled = false;
this.btnBonusItems.enabled = false;
this.updateDisplayState();
}
else if ("hardcore".equals(this.gameMode))
{
if (!this.allowCheatsWasSetByUser)
{
this.allowCheats = true;
}
this.hardCoreMode = false;
this.gameMode = "creative";
this.updateDisplayState();
this.hardCoreMode = false;
this.btnAllowCommands.enabled = true;
this.btnBonusItems.enabled = true;
}
else
{
if (!this.allowCheatsWasSetByUser)
{
this.allowCheats = false;
}
this.gameMode = "survival";
this.updateDisplayState();
this.btnAllowCommands.enabled = true;
this.btnBonusItems.enabled = true;
this.hardCoreMode = false;
}
this.updateDisplayState();
}
else if (button.id == 4)
{
this.generateStructuresEnabled = !this.generateStructuresEnabled;
this.updateDisplayState();
}
else if (button.id == 7)
{
this.bonusChestEnabled = !this.bonusChestEnabled;
this.updateDisplayState();
}
else if (button.id == 5)
{
++this.selectedIndex;
if (this.selectedIndex >= WorldType.WORLD_TYPES.length)
{
this.selectedIndex = 0;
}
while (!this.canSelectCurWorldType())
{
++this.selectedIndex;
if (this.selectedIndex >= WorldType.WORLD_TYPES.length)
{
this.selectedIndex = 0;
}
}
this.chunkProviderSettingsJson = "";
this.updateDisplayState();
this.showMoreWorldOptions(this.inMoreWorldOptionsDisplay);
}
else if (button.id == 6)
{
this.allowCheatsWasSetByUser = true;
this.allowCheats = !this.allowCheats;
this.updateDisplayState();
}
else if (button.id == 8)<|fim▁hole|> {
this.mc.displayGuiScreen(new GuiCreateFlatWorld(this, this.chunkProviderSettingsJson));
}
else
{
this.mc.displayGuiScreen(new GuiCustomizeWorldScreen(this, this.chunkProviderSettingsJson));
}
}
}
}
/**
* Returns whether the currently-selected world type is actually acceptable for selection
* Used to hide the "debug" world type unless the shift key is depressed.
*/
private boolean canSelectCurWorldType()
{
WorldType worldtype = WorldType.WORLD_TYPES[this.selectedIndex];
return worldtype != null && worldtype.getCanBeCreated() ? (worldtype == WorldType.DEBUG_WORLD ? isShiftKeyDown() : true) : false;
}
/**
* Toggles between initial world-creation display, and "more options" display.
* Called when user clicks "More World Options..." or "Done" (same button, different labels depending on current
* display).
*/
private void toggleMoreWorldOptions()
{
this.showMoreWorldOptions(!this.inMoreWorldOptionsDisplay);
}
/**
* Shows additional world-creation options if toggle is true, otherwise shows main world-creation elements
*/
private void showMoreWorldOptions(boolean toggle)
{
this.inMoreWorldOptionsDisplay = toggle;
if (WorldType.WORLD_TYPES[this.selectedIndex] == WorldType.DEBUG_WORLD)
{
this.btnGameMode.visible = !this.inMoreWorldOptionsDisplay;
this.btnGameMode.enabled = false;
if (this.savedGameMode == null)
{
this.savedGameMode = this.gameMode;
}
this.gameMode = "spectator";
this.btnMapFeatures.visible = false;
this.btnBonusItems.visible = false;
this.btnMapType.visible = this.inMoreWorldOptionsDisplay;
this.btnAllowCommands.visible = false;
this.btnCustomizeType.visible = false;
}
else
{
this.btnGameMode.visible = !this.inMoreWorldOptionsDisplay;
this.btnGameMode.enabled = true;
if (this.savedGameMode != null)
{
this.gameMode = this.savedGameMode;
this.savedGameMode = null;
}
this.btnMapFeatures.visible = this.inMoreWorldOptionsDisplay && WorldType.WORLD_TYPES[this.selectedIndex] != WorldType.CUSTOMIZED;
this.btnBonusItems.visible = this.inMoreWorldOptionsDisplay;
this.btnMapType.visible = this.inMoreWorldOptionsDisplay;
this.btnAllowCommands.visible = this.inMoreWorldOptionsDisplay;
this.btnCustomizeType.visible = this.inMoreWorldOptionsDisplay && (WorldType.WORLD_TYPES[this.selectedIndex] == WorldType.FLAT || WorldType.WORLD_TYPES[this.selectedIndex] == WorldType.CUSTOMIZED);
}
this.updateDisplayState();
if (this.inMoreWorldOptionsDisplay)
{
this.btnMoreOptions.displayString = I18n.format("gui.done", new Object[0]);
}
else
{
this.btnMoreOptions.displayString = I18n.format("selectWorld.moreWorldOptions", new Object[0]);
}
}
/**
* Fired when a key is typed (except F11 which toggles full screen). This is the equivalent of
* KeyListener.keyTyped(KeyEvent e). Args : character (character on the key), keyCode (lwjgl Keyboard key code)
*/
protected void keyTyped(char typedChar, int keyCode) throws IOException
{
if (this.worldNameField.isFocused() && !this.inMoreWorldOptionsDisplay)
{
this.worldNameField.textboxKeyTyped(typedChar, keyCode);
this.worldName = this.worldNameField.getText();
}
else if (this.worldSeedField.isFocused() && this.inMoreWorldOptionsDisplay)
{
this.worldSeedField.textboxKeyTyped(typedChar, keyCode);
this.worldSeed = this.worldSeedField.getText();
}
if (keyCode == 28 || keyCode == 156)
{
this.actionPerformed((GuiButton)this.buttonList.get(0));
}
((GuiButton)this.buttonList.get(0)).enabled = !this.worldNameField.getText().isEmpty();
this.calcSaveDirName();
}
/**
* Called when the mouse is clicked. Args : mouseX, mouseY, clickedButton
*/
protected void mouseClicked(int mouseX, int mouseY, int mouseButton) throws IOException
{
super.mouseClicked(mouseX, mouseY, mouseButton);
if (this.inMoreWorldOptionsDisplay)
{
this.worldSeedField.mouseClicked(mouseX, mouseY, mouseButton);
}
else
{
this.worldNameField.mouseClicked(mouseX, mouseY, mouseButton);
}
}
/**
* Draws the screen and all the components in it.
*/
public void drawScreen(int mouseX, int mouseY, float partialTicks)
{
this.drawDefaultBackground();
this.drawCenteredString(this.fontRendererObj, I18n.format("selectWorld.create", new Object[0]), this.width / 2, 20, -1);
if (this.inMoreWorldOptionsDisplay)
{
this.drawString(this.fontRendererObj, I18n.format("selectWorld.enterSeed", new Object[0]), this.width / 2 - 100, 47, -6250336);
this.drawString(this.fontRendererObj, I18n.format("selectWorld.seedInfo", new Object[0]), this.width / 2 - 100, 85, -6250336);
if (this.btnMapFeatures.visible)
{
this.drawString(this.fontRendererObj, I18n.format("selectWorld.mapFeatures.info", new Object[0]), this.width / 2 - 150, 122, -6250336);
}
if (this.btnAllowCommands.visible)
{
this.drawString(this.fontRendererObj, I18n.format("selectWorld.allowCommands.info", new Object[0]), this.width / 2 - 150, 172, -6250336);
}
this.worldSeedField.drawTextBox();
if (WorldType.WORLD_TYPES[this.selectedIndex].showWorldInfoNotice())
{
this.fontRendererObj.drawSplitString(I18n.format(WorldType.WORLD_TYPES[this.selectedIndex].getTranslatedInfo(), new Object[0]), this.btnMapType.xPosition + 2, this.btnMapType.yPosition + 22, this.btnMapType.getButtonWidth(), 10526880);
}
}
else
{
this.drawString(this.fontRendererObj, I18n.format("selectWorld.enterName", new Object[0]), this.width / 2 - 100, 47, -6250336);
this.drawString(this.fontRendererObj, I18n.format("selectWorld.resultFolder", new Object[0]) + " " + this.saveDirName, this.width / 2 - 100, 85, -6250336);
this.worldNameField.drawTextBox();
this.drawString(this.fontRendererObj, this.gameModeDesc1, this.width / 2 - 100, 137, -6250336);
this.drawString(this.fontRendererObj, this.gameModeDesc2, this.width / 2 - 100, 149, -6250336);
}
super.drawScreen(mouseX, mouseY, partialTicks);
}
/**
* Set the initial values of a new world to create, from the values from an existing world.
*
* Called after construction when a user selects the "Recreate" button.
*/
public void recreateFromExistingWorld(WorldInfo original)
{
this.worldName = I18n.format("selectWorld.newWorld.copyOf", new Object[] {original.getWorldName()});
this.worldSeed = original.getSeed() + "";
this.selectedIndex = original.getTerrainType().getWorldTypeID();
this.chunkProviderSettingsJson = original.getGeneratorOptions();
this.generateStructuresEnabled = original.isMapFeaturesEnabled();
this.allowCheats = original.areCommandsAllowed();
if (original.isHardcoreModeEnabled())
{
this.gameMode = "hardcore";
}
else if (original.getGameType().isSurvivalOrAdventure())
{
this.gameMode = "survival";
}
else if (original.getGameType().isCreative())
{
this.gameMode = "creative";
}
}
}<|fim▁end|>
|
{
if (WorldType.WORLD_TYPES[this.selectedIndex] == WorldType.FLAT)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2020-2021 Jason Ish
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use std::sync::Arc;
use serde::Serialize;
pub(crate) use main::build_axum_server;
pub use main::build_context;
pub use main::main;
use session::SessionStore;
use crate::datastore::Datastore;
use crate::sqlite::configrepo::ConfigRepo;
pub mod api;
mod asset;
mod filters;
mod main;
mod rejection;
mod response;
pub mod session;
#[derive(Debug, Clone, PartialEq)]
pub enum AuthenticationType {
Anonymous,
Username,
UsernamePassword,
}
impl ToString for AuthenticationType {
fn to_string(&self) -> String {
let s = match self {
AuthenticationType::Anonymous => "anonymous",
AuthenticationType::Username => "username",
AuthenticationType::UsernamePassword => "usernamepassword",
};
s.to_string()
}
}
impl Default for AuthenticationType {
fn default() -> Self {
Self::Anonymous
}
}
#[derive(Serialize, Default, Debug)]
pub struct Features {
pub comments: bool,
pub reporting: bool,
}<|fim▁hole|> pub features: Features,
pub session_store: session::SessionStore,
pub config_repo: Arc<ConfigRepo>,
pub event_services: Option<serde_json::Value>,
}
impl ServerContext {
pub fn new(config: ServerConfig, config_repo: Arc<ConfigRepo>, datastore: Datastore) -> Self {
Self {
config: config,
datastore,
features: Features::default(),
session_store: SessionStore::new(),
config_repo: config_repo,
event_services: None,
}
}
}
#[derive(Debug, Default, Clone)]
pub struct ServerConfig {
pub host: String,
pub port: String,
pub no_check_certificate: bool,
pub datastore: String,
pub sqlite_filename: Option<String>,
pub tls_enabled: bool,
pub tls_cert_filename: Option<String>,
pub tls_key_filename: Option<String>,
pub elastic_url: String,
pub elastic_index: String,
pub elastic_no_index_suffix: bool,
pub elastic_username: Option<String>,
pub elastic_password: Option<String>,
pub elastic_ecs: bool,
pub data_directory: Option<String>,
pub authentication_required: bool,
pub authentication_type: AuthenticationType,
pub database_retention_period: Option<u64>,
pub http_reverse_proxy: bool,
pub http_request_logging: bool,
}<|fim▁end|>
|
pub struct ServerContext {
pub config: ServerConfig,
pub datastore: Datastore,
|
<|file_name|>MUHSetLong.java<|end_file_name|><|fim▁begin|>package sk.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sk.mmap.Constants;
import sk.mmap.IUnsafeAllocator;
import java.util.Map;
import java.util.HashMap;
// Memory mapped hash set of Long
public class MUHSetLong {
private static final Logger logger = LoggerFactory.getLogger(MUHSetLong.class);
private final int numBuckets;
private final IUnsafeAllocator allocator;
private final MBUArrayListL buckets;
// TODO: Implement rehashing when table is full to avoid adding lots of elements in list
public MUHSetLong(final IUnsafeAllocator allocator, final int numBuckets) {
this.allocator = allocator;
this.numBuckets = numBuckets;
buckets = new MBUArrayListL(allocator, numBuckets);
// Initialize all buckets with NULL
for (int i = 0; i < numBuckets; i++) {
buckets.add(Constants.NULL);
}
}
public void debug() {
logger.debug("Printing details of Hash table");
final Map<Integer, Integer> elems2buckets = new HashMap<>();
for (int i = 0; i < buckets.size(); i++) {
final long list = buckets.get(i);
int elems = 0;
if (list != Constants.NULL) {
long node = MULinkedListL.getFirst(allocator, list);<|fim▁hole|> // If list is non-NULL, search in the MULinkedListL
while (node != Constants.NULL) {
++elems;
node = MULinkedListL.getNext(allocator, node);
}
/*
if (elems > 1) {
logger.debug("Bucket " + i + " has " + elems + " elements");
}
*/
}
if (elems2buckets.containsKey(elems)) {
elems2buckets.put(elems, elems2buckets.get(elems) + 1);
} else {
elems2buckets.put(elems, 1);
}
}
elems2buckets.forEach((key, val) -> {
logger.debug(val + " buckets have " + key + " elements");
});
logger.debug("End printing details of Hash table");
}
private long getBucket(final long key, final boolean create) {
final long hash = JenkinsHash.hash64(key);
final int index = (int)(hash % numBuckets);
if (create) {
// logger.debug("Key " + key + " : Hash " + hash + " : index " + index);
}
long list = buckets.get(index);
if (create && list == Constants.NULL) {
list = MULinkedListL.create(allocator);
buckets.put(index, list);
}
return list;
}
public void put(final long value) {
long list = getBucket(value, true);
MULinkedListL.add(allocator, list, value);
}
public long get(final long key) {
long value = Constants.NULL;
final long list = getBucket(key, false);
if (list != Constants.NULL) {
long node = MULinkedListL.getFirst(allocator, list);
// If list is non-NULL, search in the MULinkedListL
while (node != Constants.NULL) {
final long tmp = MULinkedListL.getL(allocator, node);
if (tmp == key) {
value = tmp;
break;
}
node = MULinkedListL.getNext(allocator, node);
}
}
return value;
}
}<|fim▁end|>
| |
<|file_name|>algo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Sat Apr 22 16:47:12 2017
@author: Jasmin
"""
from parser import *
#from parser import read_data
#from parser import read_params
#import parser
from Methode2 import EvalWeightedSumInteract
from Methode2 import *
from Methode1 import *
from algo3 import *
#oui
def algorithme_1(File) :
data_brute = read_data(File)
(utilite, poids, mu_i, mu_ij, columns, N, Lambda, norm, nbProfiles, maximiser) = read_params(File)
#test des parametres
if (not param_Valide_Algo_1(poids)) :
print('poids non valides')
# return False
data_brute = traite_data(data_brute, poids)
# print(data_brute)
data = normalize(data_brute, normalisation=norm)
data['score'] = -1
for idx in data.index :
print("SHAPEEEEE", data.loc[idx].shape)
# if (data.loc[idx].shape[0] < 2) :
data.loc[idx, 'score'] = sommePonderee(data.loc[idx, data.columns[:-1]], poids)
data.sort_values('score', inplace=True, ascending=True)
data = data.reset_index()
# data.to_csv("resultat.csv")
res = []
for idx in data.index :
l = {}
line = data.loc[idx]
for x in data.columns :
l[x] = line[x]
res = res + [l]
print(data)
print('RES ')
print(res)
return res
def algorithme(File) :
if (File["Method"] == "1") :
return algorithme_1(File)
if (File["Method"] == "2") :
return algorithme_2(File)
if (File["Method"] == "3") :
return algorithme_3_Optimiste(File)
if (File["Method"] == "4") :
return algorithme_3_Pessimiste(File)
def algorithme_2(File) :
data_brute = read_data(File)
(utilite, poids, mu_i, mu_ij, columns, N, Lambda, norm, nbProfiles, maximiser) = read_params(File)
V = v(mu_i, mu_ij, N)
I = i(mu_i, mu_ij, N)
#test des parametres
if (not param_Valide_Algo_2(V, mu_i, mu_ij, I, poids, N)) :
print('parametres non valides')
# return False
#print(utilite, normalise)
data_brute = traite_data(data_brute, poids)
data = normalize(data_brute, normalisation=norm)
data['score'] = -1
for idx in data.index :
data.loc[idx, 'score'] = EvalWeightedSumInteract(data.loc[idx, data.columns[:-1]], poids, I, N)
data.sort_values('score', inplace=True, ascending=True)
data = data.reset_index()
# data.to_csv("resultat.csv")
res = []
for idx in data.index :
l = {}
line = data.loc[idx]
for x in data.columns :
l[x] = line[x]
res = res + [l]
print(res)
return res
def algorithme_3_Optimiste(File) :
data_brute = read_data(File)
profiles = read_profiles(File)
print("________PROFILES________", profiles)
(utilite, poids, mu_i, mu_ij, columns, N, Lambda, norm, nbProfiles, maximiser) = read_params(File)
#test des parametres
if (not param_Valide_Algo_3(Lambda)) :
print('parametres non valides')
# return False
<|fim▁hole|>
data['classe'] = -1
for idx in data.index :
data.loc[idx, 'classe'] = EvalOptimiste(data.loc[idx, data.columns[:-1]], profiles, maximiser, poids, Lambda)
data.sort_values('classe', inplace=True, ascending=True)
data = data.reset_index()
# data.to_csv("resultat.csv")
res = []
for idx in data.index :
l = {}
line = data.loc[idx]
for x in data.columns :
l[x] = line[x]
res = res + [l]
print(res)
return res
def algorithme_3_Pessimiste(File) :
data_brute = read_data(File)
profiles = read_profiles(File)
(utilite, poids, mu_i, mu_ij, columns, N, Lambda, norm, nbProfiles, maximiser) = read_params(File)
#test des parametres
if (not param_Valide_Algo_3(Lambda)) :
print('parametres non valides')
# return False
data_brute = traite_data(data_brute, poids)
profiles = traite_data(profiles, poids)
data = normalize(data_brute, normalisation=norm)
data['classe'] = -1
for idx in data.index :
data.loc[idx, 'classe'] = EvalPessimiste(data.loc[idx, data.columns[:-1]], profiles, maximiser, poids, Lambda)
data.sort_values('classe', inplace=True, ascending=True)
data = data.reset_index()
# data.to_csv("resultat.csv")
res = []
for idx in data.index :
l = {}
line = data.loc[idx]
for x in data.columns :
l[x] = line[x]
res = res + [l]
print(res)
return res
algorithme(File)<|fim▁end|>
|
profiles = traite_data(profiles, poids)
data_brute = traite_data(data_brute, poids)
data = normalize(data_brute, normalisation=norm)
|
<|file_name|>floatf.rs<|end_file_name|><|fim▁begin|>//! formatter for %f %F common-notation floating-point subs
use super::super::format_field::FormatField;
use super::super::formatter::{InPrefix, FormatPrimitive, Formatter};
use super::float_common::{FloatAnalysis, get_primitive_dec, primitive_to_str_common};
pub struct Floatf {
as_num: f64,
}
impl Floatf {
pub fn new() -> Floatf {
Floatf { as_num: 0.0 }
}<|fim▁hole|> field: &FormatField,
inprefix: &InPrefix,
str_in: &str)
-> Option<FormatPrimitive> {
let second_field = field.second_field.unwrap_or(6) + 1;
let analysis = FloatAnalysis::analyze(&str_in,
inprefix,
None,
Some(second_field as usize),
false);
let f = get_primitive_dec(inprefix,
&str_in[inprefix.offset..],
&analysis,
second_field as usize,
None);
Some(f)
}
fn primitive_to_str(&self, prim: &FormatPrimitive, field: FormatField) -> String {
primitive_to_str_common(prim, &field)
}
}<|fim▁end|>
|
}
impl Formatter for Floatf {
fn get_primitive(&self,
|
<|file_name|>bitcoin_et.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="et" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Unicorncoin</source>
<translation>Unicorncoinist</translation>
</message>
<message>
<location line="+39"/>
<source><b>Unicorncoin</b> version</source>
<translation><b>Unicorncoini</b> versioon</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎
See on eksperimentaalne tarkvara.⏎
⏎
Levitatud MIT/X11 tarkvara litsentsi all, vaata kaasasolevat faili COPYING või http://www.opensource.org/licenses/mit-license.php⏎
⏎
Toode sisaldab OpenSSL Projekti all toodetud tarkvara, mis on kasutamiseks OpenSSL Toolkitis (http://www.openssl.org/) ja Eric Young'i poolt loodud krüptograafilist tarkvara ([email protected]) ning Thomas Bernard'i loodud UPnP tarkvara.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Autoriõigus</translation>
</message>
<message>
<location line="+0"/>
<source>The Litecoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Aadressiraamat</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Topeltklõps aadressi või märgise muutmiseks</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Loo uus aadress</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopeeri märgistatud aadress vahemällu</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Uus aadress</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Unicorncoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Maksete saamiseks kasutatavad Unicorncoini aadressid. Maksjate paremaks jälgimiseks võib igaühele anda erineva.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Aadressi kopeerimine</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Kuva %QR kood</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Unicorncoin address</source>
<translation>Allkirjasta sõnum, et tõestada Bitconi aadressi olemasolu.</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Allkirjasta &Sõnum</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Kustuta märgistatud aadress loetelust</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Unicorncoin address</source>
<translation>Kinnita sõnum tõestamaks selle allkirjastatust määratud Unicorncoini aadressiga.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Kinnita Sõnum</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Kustuta</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Unicorncoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Need on sinu Unicorncoini aadressid maksete saatmiseks. Müntide saatmisel kontrolli alati summat ning saaja aadressi.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>&Märgise kopeerimine</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Muuda</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Saada &Münte</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Ekspordi Aadressiraamat</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaeraldatud fail (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Viga eksportimisel</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Tõrge faili kirjutamisel %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Silt</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Aadress</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(silti pole)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Salafraasi dialoog</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Sisesta salafraas</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Uus salafraas</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Korda salafraasi</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Sisesta rahakotile uus salafraas.<br/>Palun kasuta salafraasina <b>vähemalt 10 tähte/numbrit/sümbolit</b>, või <b>vähemalt 8 sõna</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Krüpteeri rahakott</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>See toiming nõuab sinu rahakoti salafraasi.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Tee rahakott lukust lahti.</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>See toiming nõuab sinu rahakoti salafraasi.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dekrüpteeri rahakott.</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Muuda salafraasi</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Sisesta rahakoti vana ning uus salafraas.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Kinnita rahakoti krüpteering</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>Hoiatus: Kui sa kaotad oma, rahakoti krüpteerimisel kasutatud, salafraasi, siis <b>KAOTAD KA KÕIK OMA LITECOINID</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Kas soovid oma rahakoti krüpteerida?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>TÄHTIS: Kõik varasemad rahakoti varundfailid tuleks üle kirjutada äsja loodud krüpteeritud rahakoti failiga. Turvakaalutlustel tühistatakse krüpteerimata rahakoti failid alates uue, krüpteeritud rahakoti, kasutusele võtust.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Hoiatus: Caps Lock on sisse lülitatud!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Rahakott krüpteeritud</translation>
</message>
<message>
<location line="-56"/>
<source>Unicorncoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your unicorncoins from being stolen by malware infecting your computer.</source>
<translation>Unicorncoin sulgub krüpteeringu lõpetamiseks. Pea meeles, et rahakoti krüpteerimine ei välista unicorncoinide vargust, kui sinu arvuti on nakatunud pahavaraga.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Tõrge rahakoti krüpteerimisel</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Rahakoti krüpteering ebaõnnestus tõrke tõttu. Sinu rahakotti ei krüpteeritud.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Salafraasid ei kattu.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Rahakoti avamine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Rahakoti salafraas ei ole õige.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Rahakoti dekrüpteerimine ei õnnestunud</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Rahakoti salafraasi muutmine õnnestus.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Signeeri &sõnum</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Võrgusünkimine...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Ülevaade</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Kuva rahakoti üld-ülevaade</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Tehingud</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Sirvi tehingute ajalugu</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Salvestatud aadresside ja märgiste loetelu muutmine</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Kuva saadud maksete aadresside loetelu</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>V&älju</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Väljumine</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Unicorncoin</source>
<translation>Kuva info Unicorncoini kohta</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Teave &Qt kohta</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Kuva Qt kohta käiv info</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Valikud...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Krüpteeri Rahakott</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Varunda Rahakott</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Salafraasi muutmine</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Impordi blokid kettalt...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Kettal olevate blokkide re-indekseerimine...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Unicorncoin address</source>
<translation>Saada münte Unicorncoini aadressile</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Unicorncoin</source>
<translation>Muuda Unicorncoini seadeid</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Varunda rahakott teise asukohta</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Rahakoti krüpteerimise salafraasi muutmine</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>&Debugimise aken</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Ava debugimise ja diagnostika konsool</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Kontrolli sõnumit...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Unicorncoin</source>
<translation>Unicorncoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Rahakott</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Saada</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Saama</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Aadressid</translation>
</message>
<message>
<location line="+22"/>
<source>&About Unicorncoin</source>
<translation>%Unicorncoinist</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Näita / Peida</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Näita või peida peaaken</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Krüpteeri oma rahakoti privaatvõtmed</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Unicorncoin addresses to prove you own them</source>
<translation>Omandi tõestamiseks allkirjasta sõnumid oma Unicorncoini aadressiga</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Unicorncoin addresses</source>
<translation>Kinnita sõnumid kindlustamaks et need allkirjastati määratud Unicorncoini aadressiga</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Fail</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Seaded</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Abi</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Vahelehe tööriistariba</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Unicorncoin client</source>
<translation>Unicorncoini klient</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Unicorncoin network</source>
<translation><numerusform>%n aktiivne ühendus Unicorncoini võrku</numerusform><numerusform>%n aktiivset ühendust Unicorncoini võrku</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Protsessitud %1 (arvutuslikult) tehingu ajaloo blokki %2-st.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Protsessitud %1 tehingute ajaloo blokki.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n tund</numerusform><numerusform>%n tundi</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n päev</numerusform><numerusform>%n päeva</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n nädal</numerusform><numerusform>%n nädalat</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 maas</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Viimane saabunud blokk loodi %1 tagasi.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Peale seda ei ole tehingud veel nähtavad.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Tõrge</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Hoiatus</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Informatsioon</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>See tehing ületab mahupiirangu. Saatmine on võimalik %1, node'idele ning võrgustiku toetuseks, makstava lisatasu eest. Kas nõustud lisatasuga?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Ajakohane</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Jõuan...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Kinnita tehingu tasu</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Saadetud tehing</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Sisenev tehing</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Kuupäev: %1⏎
Summa: %2⏎
Tüüp: %3⏎
Aadress: %4⏎</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>URI käsitsemine</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Unicorncoin address or malformed URI parameters.</source>
<translation>URI ei suudeta parsida. Põhjuseks võib olla kehtetu Unicorncoini aadress või vigased URI parameetrid.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Rahakott on <b>krüpteeritud</b> ning hetkel <b>avatud</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Rahakott on <b>krüpteeritud</b> ning hetkel <b>suletud</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Unicorncoin can no longer continue safely and will quit.</source>
<translation>Ilmnes kriitiline tõrge. Unicorncoin suletakse turvakaalutluste tõttu.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Võrgu Häire</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Muuda aadressi</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Märgis</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Selle aadressiraamatu kirjega seotud märgis</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Aadress</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Selle aadressiraamatu kirjega seotud aadress. Võimalik muuta ainult aadresside saatmiseks.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Uus sissetulev aadress</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Uus väljaminev aadress</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Sissetulevate aadresside muutmine</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Väljaminevate aadresside muutmine</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Selline aadress on juba olemas: "%1"</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Unicorncoin address.</source>
<translation>Sisestatud aadress "%1" ei ole Unicorncoinis kehtiv.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Rahakotti ei avatud</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Tõrge uue võtme loomisel.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Unicorncoin-Qt</source>
<translation>Unicorncoini-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versioon</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Kasutus:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>käsurea valikud</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>UI valikud</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Keele valik, nt "ee_ET" (vaikeväärtus: system locale)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Käivitu tegumiribale</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Käivitamisel teabeakna kuvamine (vaikeväärtus: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Valikud</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>%Peamine</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Tasu tehingu &fee</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Unicorncoin after logging in to the system.</source>
<translation>Käivita Unicorncoin süsteemi logimisel.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Unicorncoin on system login</source>
<translation>&Start Unicorncoin sisselogimisel</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Taasta kõik klientprogrammi seadete vaikeväärtused.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Lähtesta valikud</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>&Võrk</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Unicorncoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Unicorncoini kliendi pordi automaatne avamine ruuteris. Toimib, kui sinu ruuter aktsepteerib UPnP ühendust.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Suuna port &UPnP kaudu</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Unicorncoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Kasuta Unicorncoini võrgustikku ühendumiseks SOCKS turva proxy't (nt Tor'i kasutamisel).</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>%Connect läbi turva proxi:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxi &IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Proxi IP (nt 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Proxi port (nt 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>Turva proxi SOCKS &Version:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Turva proxi SOCKS versioon (nt 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Aken</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Minimeeri systray alale.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimeeri systray alale</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Sulgemise asemel minimeeri aken. Selle valiku tegemisel suletakse programm Menüüst "Välju" käsuga.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimeeri sulgemisel</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Kuva</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Kasutajaliidese &keel:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Unicorncoin.</source>
<translation>Kasutajaliidese keele valimise koht. Valik rakendub Unicorncoini käivitamisel.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>Summade kuvamise &Unit:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Vali liideses ning müntide saatmisel kuvatav vaikimisi alajaotus.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Unicorncoin addresses in the transaction list or not.</source>
<translation>Kuvada Unicorncoini aadress tehingute loetelus või mitte.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Tehingute loetelu &Display aadress</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Katkesta</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Rakenda</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>vaikeväärtus</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Kinnita valikute algseadistamine</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Mõned seadete muudatused rakenduvad programmi käivitumisel.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Kas soovid jätkata?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Hoiatus</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Unicorncoin.</source>
<translation>Tehtud valik rakendub Unicorncoini käivitamisel.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Sisestatud kehtetu proxy aadress.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Vorm</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Unicorncoin network after a connection is established, but this process has not completed yet.</source>
<translation>Kuvatav info ei pruugi olla ajakohane. Ühenduse loomisel süngitakse sinu rahakott automaatselt Bitconi võrgustikuga, kuid see toiming on hetkel lõpetamata.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Jääk:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Kinnitamata:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Rahakott</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Ebaküps:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Mitte aegunud mine'itud jääk</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Uuesti saadetud tehingud</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Sinu jääk hetkel</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Kinnitamata tehingud kokku. Ei kajastu hetke jäägis</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>sünkimata</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start unicorncoin: click-to-pay handler</source>
<translation>Unicorncoin ei käivitu: vajuta-maksa toiming</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>QR koodi dialoog</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Makse taotlus</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Summa:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Märgis:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Sõnum:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salvesta nimega...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Tõrge URI'st QR koodi loomisel</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Sisestatud summa on vale, palun kontrolli.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Tulemuseks on liiga pikk URL, püüa lühendada märgise/teate teksti.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salvesta QR kood</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>PNG pildifail (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Kliendi nimi</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Kliendi versioon</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informatsioon</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Kasutan OpenSSL versiooni</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Käivitamise hetk</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Võrgustik</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Ühenduste arv</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Testnetis</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Ploki jada</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Plokkide hetkearv</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Ligikaudne plokkide kogus</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Viimane ploki aeg</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Ava</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Käsurea valikud</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Unicorncoin-Qt help message to get a list with possible Unicorncoin command-line options.</source>
<translation>Näita kehtivate käsurea valikute kuvamiseks Unicorncoini-Qt abiteksti</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Kuva</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konsool</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Valmistusaeg</translation>
</message>
<message>
<location line="-104"/>
<source>Unicorncoin - Debug window</source>
<translation>Unicorncoin - debugimise aken</translation>
</message>
<message>
<location line="+25"/>
<source>Unicorncoin Core</source>
<translation>Unicorncoini tuumik</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Debugimise logifail</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Unicorncoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Ava Unicorncoini logifail praegusest andmekaustast. Toiminguks võib kuluda kuni mõni sekund.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Puhasta konsool</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Unicorncoin RPC console.</source>
<translation>Teretulemast Unicorncoini RPC konsooli.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Ajaloo sirvimiseks kasuta üles ja alla nooli, ekraani puhastamiseks <b>Ctrl-L</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Ülevaateks võimalikest käsklustest trüki <b>help</b>.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Müntide saatmine</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Saatmine mitmele korraga</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Lisa &Saaja</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Eemalda kõik tehingu väljad</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Puhasta &Kõik</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Jääk:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123,456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Saatmise kinnitamine</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>S&aada</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> kuni %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Müntide saatmise kinnitamine</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Soovid kindlasti saata %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>ja</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Saaja aadress ei ole kehtiv, palun kontrolli.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Makstav summa peab olema suurem kui 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Summa ületab jäägi.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Summa koos tehingu tasuga %1 ületab sinu jääki.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Ühe saatmisega topelt-adressaati olla ei tohi.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Tõrge: Tehingu loomine ebaõnnestus!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Viga: Tehingust keelduti. Nt rahakoti koopia kasutamisel võivad selle põhjustada juba kulutatud mündid.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Vorm</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>S&umma:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Maksa &:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Tehingu saaja aadress (nt: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Aadressiraamatusse sisestamiseks märgista aadress</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Märgis</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Vali saaja aadressiraamatust</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Kleebi aadress vahemälust</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Saaja eemaldamine</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Unicorncoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Sisesta Unicorncoini aadress (nt: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signatuurid - Allkirjasta / Kinnita Sõnum</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Allkirjastamise teade</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Omandiõigsuse tõestamiseks saad sõnumeid allkirjastada oma aadressiga. Ettevaatust petturitega, kes üritavad saada sinu allkirja endale saada. Allkirjasta ainult korralikult täidetud avaldusi, millega nõustud.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Sõnumi signeerimise aadress (nt: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Vali aadress aadressiraamatust</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Kleebi aadress vahemälust</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Sisesta siia allkirjastamise sõnum</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Signatuur</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopeeri praegune signatuur vahemällu</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Unicorncoin address</source>
<translation>Allkirjasta sõnum Unicorncoini aadressi sulle kuulumise tõestamiseks</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Allkirjasta &Sõnum</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Tühjenda kõik sõnumi allkirjastamise väljad</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Puhasta &Kõik</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Kinnita Sõnum</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Kinnitamiseks sisesta allkirjastamise aadress, sõnum (kindlasti kopeeri täpselt ka reavahetused, tühikud, tabulaatorid jms) ning allolev signatuur.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Aadress, millega sõnum allkirjastati (nt: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Unicorncoin address</source>
<translation>Kinnita sõnum tõestamaks selle allkirjastatust määratud Unicorncoini aadressiga.</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Kinnita &Sõnum</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Tühjenda kõik sõnumi kinnitamise väljad</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Unicorncoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Sisesta Unicorncoini aadress (nt: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Signatuuri genereerimiseks vajuta "Allkirjasta Sõnum"</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Unicorncoin signature</source>
<translation>Sisesta Unicorncoini allkiri</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Sisestatud aadress ei kehti.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Palun kontrolli aadressi ning proovi uuesti.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Sisestatud aadress ei viita võtmele.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Rahakoti avamine katkestati.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Sisestatud aadressi privaatvõti ei ole saadaval.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Sõnumi signeerimine ebaõnnestus.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Sõnum signeeritud.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Signatuuri ei õnnestunud dekodeerida.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Palun kontrolli signatuuri ning proovi uuesti.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Signatuur ei kattunud sõnumi kokkuvõttega.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Sõnumi kontroll ebaõnnestus.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Sõnum kontrollitud.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Litecoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Avatud kuni %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%/1offline'is</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/kinnitamata</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 kinnitust</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Staatus</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, levita läbi %n node'i</numerusform><numerusform>, levita läbi %n node'i</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Kuupäev</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Allikas</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Genereeritud</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Saatja</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Saaja</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>oma aadress</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>märgis</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Krediit</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>aegub %n bloki pärast</numerusform><numerusform>aegub %n bloki pärast</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>mitte aktsepteeritud</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Deebet</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Tehingu tasu</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Neto summa</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Sõnum</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Kommentaar</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Tehingu ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Enne, kui loodud münte saab kulutama asuda, peavad need läbima 120 blokki. Kui sina selle bloki lõid, levitati see, bloki jadasse ühendamiseks, võrgustikku. Kui jadasse ühendamine ei õnnestu, muudetakse tema staatus "keeldutud" olekusse ning seda ei saa kulutada. Selline olukord võib juhtuda, kui mõni teine node loob bloki sinuga samal ajal.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Debug'imise info</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Tehing</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Sisendid</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Kogus</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>õige</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>vale</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, veel esitlemata</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Avaneb %n bloki pärast</numerusform><numerusform>Avaneb %n bloki pärast</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>tundmatu</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Tehingu üksikasjad</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Paan kuvab tehingu detailid</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Kuupäev</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tüüp</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Aadress</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Kogus</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Avaneb %n bloki pärast</numerusform><numerusform>Avaneb %n bloki pärast</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Avatud kuni %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Ühenduseta (%1 kinnitust)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Kinnitamata (%1/%2 kinnitust)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Kinnitatud (%1 kinnitust)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Mine'itud jääk muutub kättesaadavaks %n bloki läbimisel</numerusform><numerusform>Mine'itud jääk muutub kättesaadavaks %n bloki läbimisel</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Antud klotsi pole saanud ükski osapool ning tõenäoliselt seda ei aktsepteerita!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Loodud, kuid aktsepteerimata</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Saadud koos</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Kellelt saadud</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Saadetud</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Makse iseendale</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Mine'itud</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Tehingu staatus. Kinnituste arvu kuvamiseks liigu hiire noolega selle peale.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Tehingu saamise kuupäev ning kellaaeg.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tehingu tüüp.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Tehingu saaja aadress.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Jäägile lisatud või eemaldatud summa.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Kõik</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Täna</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Jooksev nädal</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Jooksev kuu</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Eelmine kuu</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Jooksev aasta</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Ulatus...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Saadud koos</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Saadetud</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Iseendale</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Mine'itud</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Muu</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Otsimiseks sisesta märgis või aadress</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Vähim summa</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Aadressi kopeerimine</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Märgise kopeerimine</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopeeri summa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Kopeeri tehingu ID</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Märgise muutmine</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Kuva tehingu detailid</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Tehinguandmete eksport</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaeraldatud fail (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Kinnitatud</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Kuupäev</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tüüp</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Silt</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Aadress</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Kogus</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Viga eksportimisel</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Tõrge faili kirjutamisel %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Ulatus:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>saaja</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Varundatud Rahakott</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Rahakoti andmed (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Varundamine nurjus</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Rahakoti andmete uude kohta salvestamine nurjus.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Varundamine õnnestus</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Rahakoti andmete uude kohta salvestamine õnnestus.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Unicorncoin version</source>
<translation>Unicorncoini versioon</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Kasutus:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or unicorncoind</source>
<translation>Saada käsklus -serverile või unicorncoindile</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Käskluste loetelu</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Käskluste abiinfo</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Valikud:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: unicorncoin.conf)</source>
<translation>Täpsusta sätete fail (vaikimisi: unicorncoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: unicorncoind.pid)</source>
<translation>Täpsusta PID fail (vaikimisi: unicorncoin.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Täpsusta andmekataloog</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Sea andmebaasi vahemälu suurus MB (vaikeväärtus: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9933 or testnet: 19933)</source>
<translation>Kuula ühendusi pordil <port> (vaikeväärtus: 9933 või testnet: 19933)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source><|fim▁hole|> <message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Peeri aadressi saamiseks ühendu korraks node'iga</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Täpsusta enda avalik aadress</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Ulakate peeride valulävi (vaikeväärtus: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Mitme sekundi pärast ulakad peerid tagasi võivad tulla (vaikeväärtus: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>RPC pordi %u kuulamiseks seadistamisel ilmnes viga IPv4'l: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>Kuula JSON-RPC ühendusel seda porti <port> (vaikeväärtus: 9332 või testnet: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Luba käsurea ning JSON-RPC käsklusi</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Tööta taustal ning aktsepteeri käsklusi</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Testvõrgu kasutamine</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Luba välisühendusi (vaikeväärtus: 1 kui puudub -proxy või -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=unicorncoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Unicorncoin Alert" [email protected]
</source>
<translation>%s, sul tuleb rpcpassword määrata seadete failis:
%s
Soovitatav on kasutada järgmist juhuslikku parooli:
rpcuser=unicorncoinrpc
rpcpassword=%s
(seda parooli ei pea meeles pidama)
Kasutajanimi ning parool EI TOHI kattuda.
Kui faili ei leita, loo see ainult-omaniku-loetavas failiõigustes .
Soovitatav on seadistada tõrgete puhul teavitus;
nt: alertnotify=echo %%s | email -s "Unicorncoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>RPC pordi %u kuulamiseks seadistamisel ilmnes viga IPv6'l, lülitumine tagasi IPv4'le : %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Määratud aadressiga sidumine ning sellelt kuulamine. IPv6 jaoks kasuta vormingut [host]:port</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Unicorncoin is probably already running.</source>
<translation>Ei suuda määrata ainuõigust andmekaustale %s. Tõenäolisel on Unicorncoin juba avatud.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Tõrge: Tehingust keelduti! Põhjuseks võib olla juba kulutatud mündid, nt kui wallet.dat fail koopias kulutatid mündid, kuid ei märgitud neid siin vastavalt.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Tõrge: Selle tehingu jaoks on nõutav lisatasu vähemalt %s. Põhjuseks võib olla summa suurus, keerukus või hiljuti saadud summade kasutamine!</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Käivita käsklus, kui saabub tähtis hoiatus (%s cmd's asendatakse sõnumiga)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Käivita käsklus, kui rahakoti tehing muutub (%s cmd's muudetakse TxID'ks)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Sea "kõrge tähtsusega"/"madala tehingu lisatasuga" tehingute maksimumsuurus baitides (vaikeväärtus: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>See on test-versioon - kasutamine omal riisikol - ära kasuta mining'uks ega kaupmeeste programmides</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Hoiatus: -paytxfee on seatud väga kõrgeks! See on sinu poolt makstav tehingu lisatasu.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Hoiatus: Kuvatavad tehingud ei pruugi olla korrektsed! Sina või node'id peate tegema uuenduse.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Unicorncoin will not work properly.</source>
<translation>Hoiatus: Palun kontrolli oma arvuti kuupäeva/kellaaega! Kui arvuti kell on vale, siis Unicorncoin ei tööta korralikult</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Hoiatus: ilmnes tõrge wallet.dat faili lugemisel! Võtmed on terved, kuid tehingu andmed või aadressiraamatu kirjed võivad olla kadunud või vigased.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Hoiatus: toimus wallet.dat faili andmete päästmine! Originaal wallet.dat nimetati kaustas %s ümber wallet.{ajatempel}.bak'iks, jäägi või tehingute ebakõlade puhul tuleks teha backup'ist taastamine.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Püüa vigasest wallet.dat failist taastada turvavõtmed</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Blokeeri loomise valikud:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Ühendu ainult määratud node'i(de)ga</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Tuvastati vigane bloki andmebaas</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Leia oma IP aadress (vaikeväärtus: 1, kui kuulatakse ning puudub -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Kas soovid bloki andmebaasi taastada?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Tõrge bloki andmebaasi käivitamisel</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Tõrge rahakoti keskkonna %s käivitamisel!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Tõrge bloki baasi lugemisel</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Tõrge bloki andmebaasi avamisel</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Tõrge: liiga vähe kettaruumi!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Tõrge: Rahakott on lukus, tehingu loomine ei ole võimalik!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Tõrge: süsteemi tõrge:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Pordi kuulamine nurjus. Soovikorral kasuta -listen=0.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Tõrge bloki sisu lugemisel</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Bloki lugemine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Bloki indeksi sünkimine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Bloki indeksi kirjutamine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Bloki sisu kirjutamine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Tõrge bloki sisu kirjutamisel</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Tõrge faili info kirjutamisel</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Tõrge mündi andmebaasi kirjutamisel</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Tehingu indeksi kirjutamine ebaõnnestus</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Tagasivõtmise andmete kirjutamine ebaõnnestus</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Otsi DNS'i lookup'i kastavaid peere (vaikeväärtus: 1, kui mitte -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Käivitamisel kontrollitavate blokkide arv (vaikeväärtus: 288, 0=kõik)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Blokkide kontrollimise põhjalikkus (0-4, vaikeväärtus: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Taasta bloki jada indeks blk000??.dat failist</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Määra RPC kõnede haldurite arv (vaikeväärtus: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Kontrollin blokke...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Kontrollin rahakotti...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Impordi blokid välisest blk000??.dat failist</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Informatsioon</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Vigane -tor aadress: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Säilita kogu tehingu indeks (vaikeväärtus: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Maksimaalne saamise puhver -connection kohta , <n>*1000 baiti (vaikeväärtus: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Maksimaalne saatmise puhver -connection kohta , <n>*1000 baiti (vaikeväärtus: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Tunnusta ainult sisseehitatud turvapunktidele vastavaid bloki jadu (vaikeväärtus: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Ühenda ainult node'idega <net> võrgus (IPv4, IPv6 või Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Väljund lisa debug'imise infoks. Tuleneb kõikidest teistest -debug* valikutest</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Lisa võrgu debug'imise info väljund</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Varusta debugi väljund ajatempliga</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Unicorncoin Wiki for SSL setup instructions)</source>
<translation>SSL valikud: (vaata Unicorncoini Wikist või SSL sätete juhendist)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Vali turva proxi SOCKS versioon (4-5, vaikeväärtus: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Saada jälitus/debug, debug.log faili asemel, konsooli</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Saada jälitus/debug info debuggerile</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Sea maksimaalne bloki suurus baitides (vaikeväärtus: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Sea minimaalne bloki suurus baitides (vaikeväärtus: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Kahanda programmi käivitamisel debug.log faili (vaikeväärtus: 1, kui ei ole -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Sea ühenduse timeout millisekundites (vaikeväärtus: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Süsteemi tõrge:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Kasuta kuulatava pordi määramiseks UPnP ühendust (vaikeväärtus: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Kasuta kuulatava pordi määramiseks UPnP ühendust (vaikeväärtus: 1, kui kuulatakse)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Kasuta varjatud teenustele ligipääsuks proxy't (vaikeväärtus: sama, mis -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>JSON-RPC ühenduste kasutajatunnus</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Hoiatus</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Hoiatus: versioon on aegunud, uuendus on nõutav!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Andmebaas tuleb taastada kasutades -reindex, et muuta -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat fail on katki, päästmine ebaõnnestus</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>JSON-RPC ühenduste salasõna</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>JSON-RPC ühenduste lubamine kindla IP pealt</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Saada käsklusi node'ile IP'ga <ip> (vaikeväärtus: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Käivita käsklus, kui parim plokk muutub (käskluse %s asendatakse ploki hash'iga)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Uuenda rahakott uusimasse vormingusse</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Sea võtmete hulgaks <n> (vaikeväärtus: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Otsi ploki jadast rahakoti kadunud tehinguid</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Kasuta JSON-RPC ühenduste jaoks OpenSSL'i (https)</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Serveri sertifikaadifail (vaikeväärtus: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Serveri privaatvõti (vaikeväärtus: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Lubatud šiffrid (vaikeväärtus: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Käesolev abitekst</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Selle arvutiga ei ole võimalik siduda %s külge (katse nurjus %d, %s tõttu)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Ühendu läbi turva proxi</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>-addnode, -seednode ja -connect tohivad kasutada DNS lookup'i</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Aadresside laadimine...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Viga wallet.dat käivitamisel. Vigane rahakkott</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Unicorncoin</source>
<translation>Viga wallet.dat käivitamisel: Rahakott nõuab Unicorncoini uusimat versiooni</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Unicorncoin to complete</source>
<translation>Rahakott tuli ümberkirjutada: toimingu lõpetamiseks taaskäivita Unicorncoin</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Viga wallet.dat käivitamisel</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Vigane -proxi aadress: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Kirjeldatud tundmatu võrgustik -onlynet'is: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Küsitud tundmatu -socks proxi versioon: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Tundmatu -bind aadress: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Tundmatu -externalip aadress: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>-paytxfee=<amount> jaoks vigane kogus: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Kehtetu summa</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Liiga suur summa</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Klotside indeksi laadimine...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Lisa node ning hoia ühendus avatud</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Unicorncoin is probably already running.</source>
<translation>%s'ga ei ole võimalik sellest arvutist siduda. Unicorncoin juba töötab.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Minu saadetavate tehingute lisatasu KB kohta</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Rahakoti laadimine...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Rahakoti vanandamine ebaõnnestus</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Tõrge vaikimisi aadressi kirjutamisel</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Üleskaneerimine...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Laetud</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>%s valiku kasutamine</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Tõrge</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>rpcpassword=<password> peab sätete failis olema seadistatud:⏎
%s⏎
Kui seda faili ei ole, loo see ainult-omanikule-lugemiseks faili õigustes.</translation>
</message>
</context>
</TS><|fim▁end|>
|
<translation>Säilita vähemalt <n> ühendust peeridega (vaikeväärtus: 125)</translation>
</message>
|
<|file_name|>issue-6596-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: unknown macro variable `nonexistent`
macro_rules! e {<|fim▁hole|> );
}
fn main() {
e!(foo);
}<|fim▁end|>
|
($inp:ident) => (
$nonexistent
|
<|file_name|>reference.rs<|end_file_name|><|fim▁begin|>use crate::real_std::{any::Any, fmt, marker::PhantomData, sync::Mutex};
use crate::{
api::{generic::A, Generic, Unrooted, Userdata, WithVM, IO},
gc::{CloneUnrooted, GcPtr, GcRef, Move, Trace},
thread::ThreadInternal,
value::{Cloner, Value},
vm::Thread,
ExternModule, Result,
};
#[derive(VmType)]
#[gluon(gluon_vm)]
#[gluon(vm_type = "std.reference.Reference")]
pub struct Reference<T> {
value: Mutex<Value>,
thread: GcPtr<Thread>,
_marker: PhantomData<T>,
}
impl<T> Userdata for Reference<T>
where
T: Any + Send + Sync,
{
fn deep_clone<'gc>(
&self,
deep_cloner: &'gc mut Cloner,
) -> Result<GcRef<'gc, Box<dyn Userdata>>> {
let value = self.value.lock().unwrap();
// SAFETY During the `alloc` call the unrooted values are scanned through the `DataDef`
unsafe {
let cloned_value = deep_cloner.deep_clone(&value)?.unrooted();
let data: Box<dyn Userdata> = Box::new(Reference {
value: Mutex::new(cloned_value),
thread: GcPtr::from_raw(deep_cloner.thread()),
_marker: PhantomData::<A>,
});
deep_cloner.gc().alloc(Move(data))
}
}
}
impl<T> fmt::Debug for Reference<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Ref({:?})", *self.value.lock().unwrap())
}
}
unsafe impl<T> Trace for Reference<T> {
impl_trace_fields! { self, gc; value }
}
fn set(r: &Reference<A>, a: Generic<A>) -> IO<()> {
match r.thread.deep_clone_value(&r.thread, a.get_value()) {
// SAFETY Rooted when stored in the reference
Ok(a) => unsafe {
*r.value.lock().unwrap() = a.get_value().clone_unrooted();
IO::Value(())
},
Err(err) => IO::Exception(format!("{}", err)),
}
}
fn get(r: &Reference<A>) -> IO<Unrooted<A>> {
// SAFETY The returned, unrooted value gets pushed immediately to the stack
IO::Value(unsafe { Unrooted::from(r.value.lock().unwrap().clone_unrooted()) })
}
fn make_ref(a: WithVM<Generic<A>>) -> IO<Reference<A>> {
// SAFETY The returned, unrooted value gets pushed immediately to the stack
unsafe {
IO::Value(Reference {
value: Mutex::new(a.value.get_value().clone_unrooted()),
thread: GcPtr::from_raw(a.vm),
_marker: PhantomData,
})
}
}
mod std {
pub mod reference {
pub use crate::reference as prim;
}
}
pub fn load(vm: &Thread) -> Result<ExternModule> {
let _ = vm.register_type::<Reference<A>>("std.reference.Reference", &["a"]);
ExternModule::new(
vm,
record! {
type Reference a => Reference<A>,
(store "<-") => primitive!(2, "std.reference.prim.(<-)", std::reference::prim::set),
load => primitive!(1, "std.reference.prim.load", std::reference::prim::get),
(ref_ "ref") => primitive!(1, "std.reference.prim.ref", std::reference::prim::make_ref),
},
)
}
pub mod st {
use super::*;
use crate::api::RuntimeResult;
fn set(r: &Reference<A>, a: Generic<A>) -> RuntimeResult<(), String> {
match r.thread.deep_clone_value(&r.thread, a.get_value()) {
// SAFETY Rooted when stored in the reference
Ok(a) => unsafe {
*r.value.lock().unwrap() = a.get_value().clone_unrooted();
RuntimeResult::Return(())
},
Err(err) => RuntimeResult::Panic(format!("{}", err)),
}
}
fn get(r: &Reference<A>) -> Unrooted<A> {
// SAFETY The returned, unrooted value gets pushed immediately to the stack
unsafe { Unrooted::from(r.value.lock().unwrap().clone_unrooted()) }
}
<|fim▁hole|> unsafe {
Reference {
value: Mutex::new(a.value.get_value().clone_unrooted()),
thread: GcPtr::from_raw(a.vm),
_marker: PhantomData,
}
}
}
mod std {
pub mod st {
pub mod reference {
pub use crate::reference::st as prim;
}
}
}
pub fn load(vm: &Thread) -> Result<ExternModule> {
ExternModule::new(
vm,
record! {
type Reference a => Reference<A>,
(store "<-") => primitive!(2, "std.st.reference.prim.(<-)", std::st::reference::prim::set),
load => primitive!(1, "std.st.reference.prim.load", std::st::reference::prim::get),
(ref_ "ref") => primitive!(1, "std.st.reference.prim.ref", std::st::reference::prim::make_ref),
},
)
}
}<|fim▁end|>
|
fn make_ref(a: WithVM<Generic<A>>) -> Reference<A> {
// SAFETY The returned, unrooted value gets pushed immediately to the stack
|
<|file_name|>generator.go<|end_file_name|><|fim▁begin|>package generator
import (
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/rest"
"k8s.io/kubernetes/pkg/runtime"
buildapi "github.com/openshift/origin/pkg/build/api"
"github.com/openshift/origin/pkg/build/generator"<|fim▁hole|> "github.com/openshift/origin/pkg/build/registry/clone"
)
// NewStorage creates a new storage object for build generation
func NewStorage(generator *generator.BuildGenerator) *CloneREST {
return &CloneREST{generator: generator}
}
// CloneREST is a RESTStorage implementation for a BuildGenerator which supports only
// the Get operation (as the generator has no underlying storage object).
type CloneREST struct {
generator *generator.BuildGenerator
}
// New creates a new build clone request
func (s *CloneREST) New() runtime.Object {
return &buildapi.BuildRequest{}
}
// Create instantiates a new build from an existing build
func (s *CloneREST) Create(ctx kapi.Context, obj runtime.Object) (runtime.Object, error) {
if err := rest.BeforeCreate(clone.Strategy, ctx, obj); err != nil {
return nil, err
}
return s.generator.Clone(ctx, obj.(*buildapi.BuildRequest))
}<|fim▁end|>
| |
<|file_name|>Plan.py<|end_file_name|><|fim▁begin|>import sys
from collections import deque
from Catalog.Schema import DBSchema
from Query.Operators.TableScan import TableScan
from Query.Operators.Select import Select
from Query.Operators.Project import Project
from Query.Operators.Union import Union
from Query.Operators.Join import Join
from Query.Operators.GroupBy import GroupBy
from Query.Operators.Sort import Sort
class Plan:<|fim▁hole|> A data structure implementing query plans.
Query plans are tree data structures whose nodes are objects
inheriting from the Query.Operator class.
Our Query.Plan class tracks the root of the plan tree,
and provides basic accessors such as the ability to
retrieve the relations accessed by the query, the query's
output schema, and plan pretty printing facilities.
Plan instances delegate their iterator to the root operator,
enabling direct iteration over query results.
Plan instances should use the 'prepare' method prior to
iteration (as done with Database.processQuery), to initialize
all operators contained in the plan.
"""
def __init__(self, **kwargs):
other = kwargs.get("other", None)
if other:
self.fromOther(other)
elif "root" in kwargs:
self.root = kwargs["root"]
else:
raise ValueError("No root operator specified for query plan")
def fromOther(self):
self.root = other.root
# Returns the root operator in the query plan
def root(self):
return self.root
# Returns the query result schema.
def schema(self):
return self.root.schema()
# Returns the relations used by the query.
def relations(self):
return [op.relationId() for (_,op) in self.flatten() if isinstance(op, TableScan)]
# Pre-order depth-first flattening of the query tree.
def flatten(self):
if self.root:
result = []
queue = deque([(0, self.root)])
while queue:
(depth, operator) = queue.popleft()
children = operator.inputs()
result.append((depth, operator))
if children:
queue.extendleft([(depth+1, c) for c in children])
return result
# Plan preparation and execution
# Returns a prepared plan, where every operator has filled in
# internal parameters necessary for processing data.
def prepare(self, database):
if self.root:
for (_, operator) in self.flatten():
operator.prepare(database)
return self
else:
raise ValueError("Invalid query plan")
# Iterator abstraction for query processing.
# Thus, we can use: "for page in plan: ..."
def __iter__(self):
return iter(self.root)
# Plan and statistics information.
# Returns a description for the entire query plan, based on the
# description of each individual operator.
def explain(self):
if self.root:
planDesc = []
indent = ' ' * 2
for (depth, operator) in self.flatten():
planDesc.append(indent * depth + operator.explain())
return '\n'.join(planDesc)
# Returns the cost of this query plan. Each operator should determine
# its own local cost added to the cost of its children.
def cost(self):
return self.root.cost()
# Plan I/O, e.g., for query shipping.
def pack(self):
raise NotImplementedError
def unpack(self):
raise NotImplementedError
class PlanBuilder:
"""
A query plan builder class that can be used for LINQ-like construction of queries.
A plan builder consists of an operator field, as the running root of the query tree.
Each method returns a plan builder instance, that can be used to further
operators compose with additional builder methods.
A plan builder yields a Query.Plan instance through its finalize() method.
>>> import Database
>>> db = Database.Database()
>>> db.createRelation('employee', [('id', 'int'), ('age', 'int')])
>>> schema = db.relationSchema('employee')
# Populate relation
>>> for tup in [schema.pack(schema.instantiate(i, 2*i+20)) for i in range(20)]:
... _ = db.insertTuple(schema.name, tup)
...
### SELECT * FROM Employee WHERE age < 30
>>> query1 = db.query().fromTable('employee').where("age < 30").finalize()
>>> query1.relations()
['employee']
>>> print(query1.explain()) # doctest: +ELLIPSIS
Select[...,cost=...](predicate='age < 30')
TableScan[...,cost=...](employee)
>>> [schema.unpack(tup).age for page in db.processQuery(query1) for tup in page[1]]
[20, 22, 24, 26, 28]
### SELECT eid FROM Employee WHERE age < 30
>>> query2 = db.query().fromTable('employee').where("age < 30").select({'id': ('id', 'int')}).finalize()
>>> print(query2.explain()) # doctest: +ELLIPSIS
Project[...,cost=...](projections={'id': ('id', 'int')})
Select[...,cost=...](predicate='age < 30')
TableScan[...,cost=...](employee)
>>> [query2.schema().unpack(tup).id for page in db.processQuery(query2) for tup in page[1]]
[0, 1, 2, 3, 4]
### SELECT * FROM Employee UNION ALL Employee
>>> query3 = db.query().fromTable('employee').union(db.query().fromTable('employee')).finalize()
>>> print(query3.explain()) # doctest: +ELLIPSIS
UnionAll[...,cost=...]
TableScan[...,cost=...](employee)
TableScan[...,cost=...](employee)
>>> [query3.schema().unpack(tup).id for page in db.processQuery(query3) for tup in page[1]] # doctest:+ELLIPSIS
[0, 1, 2, ..., 19, 0, 1, 2, ..., 19]
### SELECT * FROM Employee E1 JOIN Employee E2 ON E1.id = E2.id
>>> e2schema = schema.rename('employee2', {'id':'id2', 'age':'age2'})
>>> query4 = db.query().fromTable('employee').join( \
db.query().fromTable('employee'), \
rhsSchema=e2schema, \
method='block-nested-loops', expr='id == id2').finalize()
>>> print(query4.explain()) # doctest: +ELLIPSIS
BNLJoin[...,cost=...](expr='id == id2')
TableScan[...,cost=...](employee)
TableScan[...,cost=...](employee)
>>> q4results = [query4.schema().unpack(tup) for page in db.processQuery(query4) for tup in page[1]]
>>> [(tup.id, tup.id2) for tup in q4results] # doctest:+ELLIPSIS
[(0, 0), (1, 1), (2, 2), ..., (18, 18), (19, 19)]
### Hash join test with the same query.
### SELECT * FROM Employee E1 JOIN Employee E2 ON E1.id = E2.id
>>> e2schema = schema.rename('employee2', {'id':'id2', 'age':'age2'})
>>> keySchema = DBSchema('employeeKey', [('id', 'int')])
>>> keySchema2 = DBSchema('employeeKey2', [('id2', 'int')])
>>> query5 = db.query().fromTable('employee').join( \
db.query().fromTable('employee'), \
rhsSchema=e2schema, \
method='hash', \
lhsHashFn='hash(id) % 4', lhsKeySchema=keySchema, \
rhsHashFn='hash(id2) % 4', rhsKeySchema=keySchema2, \
).finalize()
>>> print(query5.explain()) # doctest: +ELLIPSIS
HashJoin[...,cost=...](lhsKeySchema=employeeKey[(id,int)],rhsKeySchema=employeeKey2[(id2,int)],lhsHashFn='hash(id) % 4',rhsHashFn='hash(id2) % 4')
TableScan[...,cost=...](employee)
TableScan[...,cost=...](employee)
>>> q5results = [query5.schema().unpack(tup) for page in db.processQuery(query5) for tup in page[1]]
>>> [(tup.id, tup.id2) for tup in q5results] # doctest:+ELLIPSIS
[(0, 0), (1, 1), (2, 2), ..., (18, 18), (19, 19)]
>>> sorted([(tup.id, tup.id2) for tup in q5results]) # doctest:+ELLIPSIS
[(0, 0), (1, 1), (2, 2), ..., (18, 18), (19, 19)]
### Group by aggregate query
### SELECT id, max(age) FROM Employee GROUP BY id
>>> aggMinMaxSchema = DBSchema('minmax', [('minAge', 'int'), ('maxAge','int')])
>>> query6 = db.query().fromTable('employee').groupBy( \
groupSchema=keySchema, \
aggSchema=aggMinMaxSchema, \
groupExpr=(lambda e: e.id), \
aggExprs=[(sys.maxsize, lambda acc, e: min(acc, e.age), lambda x: x), \
(0, lambda acc, e: max(acc, e.age), lambda x: x)], \
groupHashFn=(lambda gbVal: hash(gbVal[0]) % 2) \
).finalize()
>>> print(query6.explain()) # doctest: +ELLIPSIS
GroupBy[...,cost=...](groupSchema=employeeKey[(id,int)], aggSchema=minmax[(minAge,int),(maxAge,int)])
TableScan[...,cost=...](employee)
>>> q6results = [query6.schema().unpack(tup) for page in db.processQuery(query6) for tup in page[1]]
>>> sorted([(tup.id, tup.minAge, tup.maxAge) for tup in q6results]) # doctest:+ELLIPSIS
[(0, 20, 20), (1, 22, 22), ..., (18, 56, 56), (19, 58, 58)]
### Order by query
### SELECT id FROM Employee ORDER by age
>>> query7 = db.query().fromTable('employee') \
.order(sortKeyFn=lambda x: x.age, sortKeyDesc='age') \
.select({'id': ('id', 'int')}).finalize()
>>> print(query7.explain()) # doctest: +ELLIPSIS
Project[...,cost=...](projections={'id': ('id', 'int')})
Sort[...,cost=...](sortKeyDesc='age')
TableScan[...,cost=...](employee)
"""
def __init__(self, **kwargs):
other = kwargs.get("other", None)
if other:
self.fromOther(other)
elif "operator" in kwargs:
self.operator = kwargs["operator"]
elif "db" in kwargs:
self.database = kwargs["db"]
else:
raise ValueError("No initial operator or database given for a plan builder")
def fromOther(self, other):
self.database = other.database
self.operator = other.operator
def fromTable(self, relId):
if self.database:
schema = self.database.relationSchema(relId)
return PlanBuilder(operator=TableScan(relId, schema))
def where(self, conditionExpr):
if self.operator:
return PlanBuilder(operator=Select(self.operator, conditionExpr))
else:
raise ValueError("Invalid where clause")
def select(self, projectExprs):
if self.operator:
return PlanBuilder(operator=Project(self.operator, projectExprs))
else:
raise ValueError("Invalid select list")
def join(self, rhsQuery, **kwargs):
if rhsQuery:
rhsPlan = rhsQuery.operator
else:
raise ValueError("Invalid Join RHS query")
lhsPlan = self.operator
return PlanBuilder(operator=Join(lhsPlan, rhsPlan, **kwargs))
def union(self, subQuery):
if self.operator:
return PlanBuilder(operator=Union(self.operator, subQuery.operator))
else:
raise ValueError("Invalid union clause")
def groupBy(self, **kwargs):
if self.operator:
return PlanBuilder(operator=GroupBy(self.operator, **kwargs))
else:
raise ValueError("Invalid group by operator")
def order(self, **kwargs):
if self.operator:
return PlanBuilder(operator=Sort(self.operator, **kwargs))
else:
raise ValueError("Invalid order by operator")
# Constructs a plan instance from the running plan tree.
def finalize(self):
if self.operator:
return Plan(root=self.operator)
else:
raise ValueError("Invalid query plan")
if __name__ == "__main__":
import doctest
doctest.testmod()<|fim▁end|>
|
"""
|
<|file_name|>lexical.py<|end_file_name|><|fim▁begin|>from ..io.importer import lexicon_data_to_csvs, import_lexicon_csvs
from ..io.enrichment.lexical import enrich_lexicon_from_csv, parse_file
from .spoken import SpokenContext
class LexicalContext(SpokenContext):
"""
Class that contains methods for dealing specifically with words
"""
def enrich_lexicon(self, lexicon_data, type_data=None, case_sensitive=False):
"""
adds properties to lexicon, adds properties to hierarchy
Parameters
----------
lexicon_data : dict
the data in the lexicon
type_data : dict
default to None
case_sensitive : bool<|fim▁hole|> type_data = {k: type(v) for k, v in next(iter(lexicon_data.values())).items()}
removed = [x for x in type_data.keys() if self.hierarchy.has_type_property(self.word_name, x)]
type_data = {k: v for k,v in type_data.items() if k not in removed}
if not type_data:
return
lexicon_data_to_csvs(self, lexicon_data, case_sensitive=case_sensitive)
import_lexicon_csvs(self, type_data, case_sensitive=case_sensitive)
self.hierarchy.add_type_properties(self, self.word_name, type_data.items())
self.encode_hierarchy()
def enrich_lexicon_from_csv(self, path, case_sensitive=False):
"""
Enriches lexicon from a CSV file
Parameters
----------
path : str
the path to the csv file
case_sensitive : boolean
Defaults to false
"""
enrich_lexicon_from_csv(self, path, case_sensitive)
def reset_lexicon_csv(self, path):
"""
Remove properties that were encoded via a CSV file
Parameters
----------
path : str
CSV file to get property names from
"""
data, type_data = parse_file(path, labels=[])
word = getattr(self, 'lexicon_' + self.word_name)
q = self.query_lexicon(word)
property_names = [x for x in type_data.keys()]
q.set_properties(**{x: None for x in property_names})
self.hierarchy.remove_type_properties(self, self.word_name, property_names)
self.encode_hierarchy()<|fim▁end|>
|
default to False
"""
if type_data is None:
|
<|file_name|>selection_manager.ts<|end_file_name|><|fim▁begin|>import {HasProps} from "./has_props"
import {Geometry} from "./geometry"
import {Selection} from "models/selections/selection"
import {Renderer, RendererView} from "models/renderers/renderer"
import {GlyphRendererView} from "models/renderers/glyph_renderer"
import {GraphRendererView} from "models/renderers/graph_renderer"
import * as p from "./properties"
import {ColumnarDataSource} from "models/sources/columnar_data_source"
export namespace SelectionManager {
export type Props = HasProps.Props & {
source: p.Property<ColumnarDataSource>
}
export type Attrs = p.AttrsOf<Props>
}
export interface SelectionManager extends SelectionManager.Attrs {}
export class SelectionManager extends HasProps {
properties: SelectionManager.Props
constructor(attrs?: Partial<SelectionManager.Attrs>) {
super(attrs)
}
static initClass(): void {
this.internal({
source: [ p.Any ],
})
}
inspectors: {[key: string]: Selection} = {}
select(renderer_views: RendererView[], geometry: Geometry, final: boolean, append: boolean = false): boolean {
// divide renderers into glyph_renderers or graph_renderers
const glyph_renderer_views: GlyphRendererView[] = []
const graph_renderer_views: GraphRendererView[] = []
for (const r of renderer_views) {
if (r instanceof GlyphRendererView)
glyph_renderer_views.push(r)
else if (r instanceof GraphRendererView)
graph_renderer_views.push(r)
}
let did_hit = false
// graph renderer case
for (const r of graph_renderer_views) {
const hit_test_result = r.model.selection_policy.hit_test(geometry, r)
did_hit = did_hit || r.model.selection_policy.do_selection(hit_test_result, r.model, final, append)
}
// glyph renderers
if (glyph_renderer_views.length > 0) {
const hit_test_result = this.source.selection_policy.hit_test(geometry, glyph_renderer_views)
did_hit = did_hit || this.source.selection_policy.do_selection(hit_test_result, this.source, final, append)
}
return did_hit
}
inspect(renderer_view: RendererView, geometry: Geometry): boolean {
let did_hit = false
if (renderer_view instanceof GlyphRendererView) {
const hit_test_result = renderer_view.hit_test(geometry)
if (hit_test_result != null) {
did_hit = !hit_test_result.is_empty()
const inspection = this.get_or_create_inspector(renderer_view.model)
inspection.update(hit_test_result, true, false)
this.source.setv({inspected: inspection}, {silent: true})
this.source.inspect.emit([renderer_view, {geometry}])
}
} else if (renderer_view instanceof GraphRendererView) {<|fim▁hole|> const hit_test_result = renderer_view.model.inspection_policy.hit_test(geometry, renderer_view)
did_hit = did_hit || renderer_view.model.inspection_policy.do_inspection(hit_test_result, geometry, renderer_view, false, false)
}
return did_hit
}
clear(rview?: RendererView): void {
this.source.selected.clear()
if (rview != null)
this.get_or_create_inspector(rview.model).clear()
}
get_or_create_inspector(rmodel: Renderer): Selection {
if (this.inspectors[rmodel.id] == null)
this.inspectors[rmodel.id] = new Selection()
return this.inspectors[rmodel.id]
}
}
SelectionManager.initClass()<|fim▁end|>
| |
<|file_name|>better-timeinput-polyfill.js<|end_file_name|><|fim▁begin|>(function(DOM, COMPONENT_CLASS) {
"use strict";
if ("orientation" in window) return; // skip mobile/tablet browsers
// polyfill timeinput for desktop browsers
var htmlEl = DOM.find("html"),
timeparts = function(str) {
str = str.split(":");
if (str.length === 2) {
str[0] = parseFloat(str[0]);
str[1] = parseFloat(str[1]);
} else {
str = [];
}
return str;
},
zeropad = function(value) { return ("00" + value).slice(-2) },
ampm = function(pos, neg) { return htmlEl.get("lang") === "en-US" ? pos : neg },
formatISOTime = function(hours, minutes, ampm) {
return zeropad(ampm === "PM" ? hours + 12 : hours) + ":" + zeropad(minutes);
};
DOM.extend("input[type=time]", {
constructor: function() {
var timeinput = DOM.create("input[type=hidden name=${name}]", {name: this.get("name")}),
ampmspan = DOM.create("span.${c}-meridian>(select>option>{AM}^option>{PM})+span>{AM}", {c: COMPONENT_CLASS}),
ampmselect = ampmspan.child(0);
this
// drop native implementation and clear name attribute
.set({type: "text", maxlength: 5, name: null})
.addClass(COMPONENT_CLASS)
.on("change", this.onChange.bind(this, timeinput, ampmselect))
.on("keydown", this.onKeydown, ["which", "shiftKey"])
.after(ampmspan, timeinput);
ampmselect.on("change", this.onMeridianChange.bind(this, timeinput, ampmselect));
// update value correctly on form reset
this.parent("form").on("reset", this.onFormReset.bind(this, timeinput, ampmselect));
// patch set method to update visible input as well
timeinput.set = this.onValueChanged.bind(this, timeinput.set, timeinput, ampmselect);
// update hidden input value and refresh all visible controls
timeinput.set(this.get()).data("defaultValue", timeinput.get());
// update default values to be formatted
this.set("defaultValue", this.get());
ampmselect.next().data("defaultValue", ampmselect.get());
if (this.matches(":focus")) timeinput.fire("focus");
},
onValueChanged: function(setter, timeinput, ampmselect) {
var parts, hours, minutes;
setter.apply(timeinput, Array.prototype.slice.call(arguments, 3));
if (arguments.length === 4) {
parts = timeparts(timeinput.get());
hours = parts[0];
minutes = parts[1];
// select appropriate AM/PM
ampmselect.child((hours -= 12) > 0 ? 1 : Math.min(hours += 12, 0)).set("selected", true);
// update displayed AM/PM
ampmselect.next().set(ampmselect.get());
// update visible input value, need to add zero padding to minutes
this.set(hours < ampm(13, 24) && minutes < 60 ? hours + ":" + zeropad(minutes) : "");
}
return timeinput;
},
onKeydown: function(which, shiftKey) {
return which === 186 && shiftKey || which < 58;
},
onChange: function(timeinput, ampmselect) {
var parts = timeparts(this.get()),
hours = parts[0],
minutes = parts[1],
value = "";
if (hours < ampm(13, 24) && minutes < 60) {
// refresh hidden input with new value
value = formatISOTime(hours, minutes, ampmselect.get());
} else if (parts.length === 2) {
// restore previous valid value
value = timeinput.get();
}
timeinput.set(value);
},
onMeridianChange: function(timeinput, ampmselect) {
// update displayed AM/PM
ampmselect.next().set(ampmselect.get());
// adjust time in hidden input
timeinput.set(function(el) {
var parts = timeparts(el.get()),
hours = parts[0],
minutes = parts[1];
if (ampmselect.get() === "AM") hours -= 12;
return formatISOTime(hours, minutes, ampmselect.get());
});
},
onFormReset: function(timeinput, ampmselect) {
timeinput.set(timeinput.data("defaultValue"));<|fim▁hole|> });
}(window.DOM, "better-timeinput"));<|fim▁end|>
|
ampmselect.next().set(ampmselect.data("defaultValue"));
}
|
<|file_name|>struct_m_s_vehicle_1_1_lane_q.js<|end_file_name|><|fim▁begin|>var struct_m_s_vehicle_1_1_lane_q =
[
[ "allowsContinuation", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#a1491a03d3e914ce9f78fe892c6f8594b", null ],
[ "bestContinuations", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#a2fc7b1df76210eff08026dbd53c13312", null ],
[ "bestLaneOffset", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#aa7f926a77c7d33c071c620ae7e9d0ac1", null ],
[ "lane", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#a3df6bd9b94a7e3e4795547feddf68bf2", null ],
[ "length", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#a637a05a2b120bacaf781181febc5b3bb", null ],
[ "nextOccupation", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#acf8243e1febeb75b139a8b10bb679107", null ],<|fim▁hole|> [ "occupation", "d3/d89/struct_m_s_vehicle_1_1_lane_q.html#a51e793a9c0bfda3e315c62f579089f82", null ]
];<|fim▁end|>
| |
<|file_name|>testcurses.py<|end_file_name|><|fim▁begin|>import androidhelper
import curses
<|fim▁hole|>try:
win.box()
w,h=win.getmaxyx()
win.addstr(2,2,"Curses Test %sx%s" % (w,h))
win.addstr(10,10,"Hit a key")
win.getch()
finally:
curses.endwin()
print("Result=",result)<|fim▁end|>
|
droid = androidhelper.Android()
win=curses.initscr()
result="No result"
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A Rust crate to connect a HD44780 lcd display
//!
//! # Example
//! ```no_run
//! use pi_lcd::*;
//!
//! // create a new lcd
//! let lcd = HD44780::new(11,10,[6,5,4,1],20,4);
//!
//! // send a String to the lcd at row 0
//! lcd.send_string("Hello World".to_string(),0);
//! ```
extern crate cupi;
extern crate regex;
use cupi::{CuPi, PinOutput, DigitalWrite};
use std::time::Duration;
use std::cell::RefCell;
use regex::Regex;
static CGRAM_ADDRESS: u8 = 0x40;
static COMMAND: bool = false;
static DATA: bool = true;
/// The display handle
pub struct HD44780 {
rs: RefCell<PinOutput>,
e: RefCell<PinOutput>,
data: Vec<RefCell<PinOutput>>,
cols: u32,
rows: u32,
lines: Vec<u8>,
}
impl HD44780 {
/// Creates a new HD44780 instance with `disp_rs` as rs pin, `disp_e` as enabled pin, `datalines` as data4 to data7
///
/// `disp_cols` are the number of columns
/// `disp_rows` are the number of rows
pub fn new(disp_rs: u32,
disp_e: u32,
datalines: [u32; 4],
disp_cols: u32,
disp_rows: u32)
-> HD44780 {
let raspi = CuPi::new().unwrap();
let rs = RefCell::new(raspi.pin(disp_rs as usize).unwrap().output());
let e = RefCell::new(raspi.pin(disp_e as usize).unwrap().output());
let mut data: Vec<RefCell<PinOutput>> = Vec::new();
for x in 0..4 {
data.push(RefCell::new(raspi.pin(datalines[x] as usize).unwrap().output()));
}
let lines: Vec<u8>;
match disp_rows {
1 => lines = vec![0x80],
2 => lines = vec![0x80, 0xC0],
3 => lines = vec![0x80, 0xC0, 0x94],
4 => lines = vec![0x80, 0xC0, 0x94, 0xD4],
_ => lines = vec![0x80],
};
let result = HD44780 {
rs: rs,
e: e,
data: data,
cols: disp_cols,
rows: disp_rows,
lines: lines,
};
result
}
/// Initializes the display and clears it
pub fn init(&self) {
self.command(0x33);
self.command(0x32);
self.command(0x28);
self.command(0x0C);
self.command(0x06);
self.clear();
}
/// Clears the display
pub fn clear(&self) {
self.command(0x01);
}
/// Sends a given byte as a command
pub fn command(&self, bits: u8) {
self.send_byte(bits, COMMAND);
}
/// Parses a String and and outputs it to the given row
pub fn send_string(&self, text: String, row: u32) {
let re_char: Regex = Regex::new(r"^\\cg:([0-7])").unwrap();
let mut message: Vec<u8> = Vec::new();
let col = self.cols;
let row = row % self.rows;
// TODO: implement check for custom characters
for i in text.chars() {
message.push(i as u8);
}
message.truncate(col as usize);
self.select_row(row);
self.write(message);
}
/// Creates a new custom character from a bitmap on the given `address`
pub fn create_char(&self, address: u8, bitmap: [u8; 8]) -> Result<u8, &'static str> {
// send new custom character to cgram address
match address {
0...7 => {
self.command(CGRAM_ADDRESS | address << 3);
for row in &bitmap {
self.send_byte(bitmap[*row as usize], DATA);
}
Ok(address)
},
_ => Err("address must be between 0 and 7"),
}
}
<|fim▁hole|>
fn write(&self, charlist: Vec<u8>) {
// send every single char to send_byte
for x in charlist {
self.send_byte(x, DATA);
}
}
fn send_byte(&self, bits: u8, mode: bool) {
if mode {
self.rs.borrow_mut().high().unwrap();
} else {
self.rs.borrow_mut().low().unwrap();
}
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x10 == 0x10 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x20 == 0x20 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x40 == 0x40 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x80 == 0x80 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x01 == 0x01 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x02 == 0x02 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x04 == 0x04 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x08 == 0x08 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
}
}
/// Waits 50 ns to let the display recognize the enabled pin
pub fn e_wait() {
std::thread::sleep(Duration::new(0, 50));
}<|fim▁end|>
|
fn select_row(&self, row: u32) {
// select the row where the String should be printed at
self.send_byte(self.lines[row as usize], COMMAND);
}
|
<|file_name|>yuki279.py<|end_file_name|><|fim▁begin|>from collections import Counter<|fim▁hole|><|fim▁end|>
|
c = Counter(input())
print(min(c['t'], c['r'], c['e']//2))
|
<|file_name|>spyglass_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package spyglass
import (
"context"
"encoding/json"
"fmt"
"os"
"reflect"
"sort"
"strings"
"testing"
"k8s.io/apimachinery/pkg/util/sets"
coreapi "k8s.io/api/core/v1"
"k8s.io/test-infra/prow/gcsupload"
"k8s.io/test-infra/prow/pod-utils/downwardapi"
"github.com/fsouza/fake-gcs-server/fakestorage"
"github.com/sirupsen/logrus"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
tgconf "github.com/GoogleCloudPlatform/testgrid/pb/config"
prowapi "k8s.io/test-infra/prow/apis/prowjobs/v1"
"k8s.io/test-infra/prow/config"
"k8s.io/test-infra/prow/deck/jobs"
"k8s.io/test-infra/prow/io"
"k8s.io/test-infra/prow/kube"
"k8s.io/test-infra/prow/spyglass/api"
"k8s.io/test-infra/prow/spyglass/lenses"
"k8s.io/test-infra/prow/spyglass/lenses/common"
ctrlruntimeclient "sigs.k8s.io/controller-runtime/pkg/client"
)
var (
fakeJa *jobs.JobAgent
fakeGCSServer *fakestorage.Server
)
type fkc []prowapi.ProwJob
func (f fkc) List(ctx context.Context, pjs *prowapi.ProwJobList, _ ...ctrlruntimeclient.ListOption) error {
pjs.Items = f
return nil
}
type fpkc string
func (f fpkc) GetLogs(name, container string) ([]byte, error) {
if name == "wowowow" || name == "powowow" {
return []byte(fmt.Sprintf("%s.%s", f, container)), nil
}
return nil, fmt.Errorf("pod not found: %s", name)
}
type fca struct {
c config.Config
}
func (ca fca) Config() *config.Config {
return &ca.c
}
func TestMain(m *testing.M) {
var longLog string
for i := 0; i < 300; i++ {
longLog += "here a log\nthere a log\neverywhere a log log\n"
}
fakeGCSServer = fakestorage.NewServer([]fakestorage.Object{
{
BucketName: "test-bucket",
Name: "logs/example-ci-run/403/build-log.txt",
Content: []byte("Oh wow\nlogs\nthis is\ncrazy"),
},
{
BucketName: "test-bucket",
Name: "logs/example-ci-run/403/long-log.txt",
Content: []byte(longLog),
},
{
BucketName: "test-bucket",
Name: "logs/example-ci-run/403/junit_01.xml",
Content: []byte(`<testsuite tests="1017" failures="1017" time="0.016981535">
<testcase name="BeforeSuite" classname="Kubernetes e2e suite" time="0.006343795">
<failure type="Failure">
test/e2e/e2e.go:137 BeforeSuite on Node 1 failed test/e2e/e2e.go:137
</failure>
</testcase>
</testsuite>`),
},
{
BucketName: "test-bucket",
Name: "logs/example-ci-run/403/started.json",
Content: []byte(`{
"node": "gke-prow-default-pool-3c8994a8-qfhg",
"repo-version": "v1.12.0-alpha.0.985+e6f64d0a79243c",
"timestamp": 1528742858,
"repos": {
"k8s.io/kubernetes": "master",
"k8s.io/release": "master"
},
"version": "v1.12.0-alpha.0.985+e6f64d0a79243c",
"metadata": {
"pod": "cbc53d8e-6da7-11e8-a4ff-0a580a6c0269"
}
}`),
},
{
BucketName: "test-bucket",
Name: "logs/example-ci-run/403/finished.json",
Content: []byte(`{
"timestamp": 1528742943,
"version": "v1.12.0-alpha.0.985+e6f64d0a79243c",
"result": "SUCCESS",
"passed": true,
"job-version": "v1.12.0-alpha.0.985+e6f64d0a79243c",
"metadata": {
"repo": "k8s.io/kubernetes",
"repos": {
"k8s.io/kubernetes": "master",
"k8s.io/release": "master"
},
"infra-commit": "260081852",
"pod": "cbc53d8e-6da7-11e8-a4ff-0a580a6c0269",
"repo-commit": "e6f64d0a79243c834babda494151fc5d66582240"
},
},`),
},
{
BucketName: "test-bucket",
Name: "logs/symlink-party/123.txt",
Content: []byte(`gs://test-bucket/logs/the-actual-place/123`),
},
{
BucketName: "multi-container-one-log",
Name: "logs/job/123/test-1-build-log.txt",
Content: []byte("this log exists in gcs!"),
},
})
defer fakeGCSServer.Stop()
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "job",
},
Status: prowapi.ProwJobStatus{
PodName: "wowowow",
BuildID: "123",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "jib",
Cluster: "trusted",
},
Status: prowapi.ProwJobStatus{
PodName: "powowow",
BuildID: "123",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "example-ci-run",
PodSpec: &coreapi.PodSpec{
Containers: []coreapi.Container{
{
Image: "tester",
},
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "wowowow",
BuildID: "404",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "multiple-container-job",
PodSpec: &coreapi.PodSpec{
Containers: []coreapi.Container{
{
Name: "test-1",
},
{
Name: "test-2",
},
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "wowowow",
BuildID: "123",
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
os.Exit(m.Run())
}
type dumpLens struct{}
func (dumpLens) Config() lenses.LensConfig {
return lenses.LensConfig{
Name: "dump",
Title: "Dump View",
}
}
func (dumpLens) Header(artifacts []api.Artifact, resourceDir string, config json.RawMessage, spyglassConfig config.Spyglass) string {
return ""
}
func (dumpLens) Body(artifacts []api.Artifact, resourceDir string, data string, config json.RawMessage, spyglassConfig config.Spyglass) string {
var view []byte
for _, a := range artifacts {
data, err := a.ReadAll()
if err != nil {
logrus.WithError(err).Error("Error reading artifact")
continue
}
view = append(view, data...)
}
return string(view)
}
func (dumpLens) Callback(artifacts []api.Artifact, resourceDir string, data string, config json.RawMessage, spyglassConfig config.Spyglass) string {
return ""
}
func TestViews(t *testing.T) {
fakeGCSClient := fakeGCSServer.Client()
testCases := []struct {
name string
registeredViewers []lenses.Lens
lenses []int
expectedLensTitles []string
}{
{
name: "Spyglass basic test",
registeredViewers: []lenses.Lens{dumpLens{}},
lenses: []int{0},
expectedLensTitles: []string{"Dump View"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
for _, l := range tc.registeredViewers {
lenses.RegisterLens(l)
}
c := fca{
c: config.Config{
ProwConfig: config.ProwConfig{
Deck: config.Deck{
Spyglass: config.Spyglass{
Lenses: []config.LensFileConfig{
{
Lens: config.LensConfig{
Name: "dump",
},
},
},
},
},
},
},
}
sg := New(context.Background(), fakeJa, c.Config, io.NewGCSOpener(fakeGCSClient), false)
_, ls := sg.Lenses(tc.lenses)
for _, l := range ls {
var found bool
for _, title := range tc.expectedLensTitles {
if title == l.Config().Title {
found = true
}
}
if !found {
t.Errorf("lens title %s not found in expected titles.", l.Config().Title)
}
}
for _, title := range tc.expectedLensTitles {
var found bool
for _, l := range ls {
if title == l.Config().Title {
found = true
}
}
if !found {
t.Errorf("expected title %s not found in produced lenses.", title)
}
}
})
}
}
func TestSplitSrc(t *testing.T) {
testCases := []struct {
name string
src string
expKeyType string
expKey string
expError bool
}{
{
name: "empty string",
src: "",
expError: true,
},
{
name: "missing key",
src: "gcs",
expError: true,
},
{
name: "prow key",
src: "prowjob/example-job-name/123456",
expKeyType: "prowjob",
expKey: "example-job-name/123456",
},
{
name: "gcs key",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/",
expKeyType: "gcs",
expKey: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/",
},
}
for _, tc := range testCases {
keyType, key, err := splitSrc(tc.src)
if tc.expError && err == nil {
t.Errorf("test %q expected error", tc.name)
}
if !tc.expError && err != nil {
t.Errorf("test %q encountered unexpected error: %v", tc.name, err)
}
if keyType != tc.expKeyType || key != tc.expKey {
t.Errorf("test %q: splitting src %q: Expected <%q, %q>, got <%q, %q>",
tc.name, tc.src, tc.expKeyType, tc.expKey, keyType, key)
}
}
}
func TestJobPath(t *testing.T) {
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PeriodicJob,
Job: "example-periodic-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1111",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "example-presubmit-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "2222",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "undecorated-job",
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "missing-gcs-job",
DecorationConfig: &prowapi.DecorationConfig{},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1",
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
testCases := []struct {
name string
src string
expJobPath string
expError bool
}{
{
name: "non-presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/logs/example-job-name/123/",
expJobPath: "gs/kubernetes-jenkins/logs/example-job-name",
},
{
name: "non-presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/logs/example-job-name/123",
expJobPath: "gs/kubernetes-jenkins/logs/example-job-name",
},
{
name: "presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/",
expJobPath: "gs/kubernetes-jenkins/pr-logs/directory/example-job-name",
},
{
name: "presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159",
expJobPath: "gs/kubernetes-jenkins/pr-logs/directory/example-job-name",
},
{
name: "non-presubmit Prow job",
src: "prowjob/example-periodic-job/1111",
expJobPath: "gs/chum-bucket/logs/example-periodic-job",
},
{
name: "Prow presubmit job",
src: "prowjob/example-presubmit-job/2222",
expJobPath: "gs/chum-bucket/pr-logs/directory/example-presubmit-job",
},
{
name: "nonexistent job",
src: "prowjob/example-periodic-job/0000",
expError: true,
},
{
name: "invalid key type",
src: "oh/my/glob/drama/bomb",
expError: true,
},
{
name: "invalid GCS path",
src: "gcs/kubernetes-jenkins/bad-path",
expError: true,
},
{
name: "job missing decoration",
src: "prowjob/undecorated-job/1",
expError: true,
},
{
name: "job missing GCS config",
src: "prowjob/missing-gcs-job/1",
expError: true,
},
}
for _, tc := range testCases {
fakeGCSClient := fakeGCSServer.Client()
fakeOpener := io.NewGCSOpener(fakeGCSClient)
fca := config.Agent{}
sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false)
jobPath, err := sg.JobPath(tc.src)
if tc.expError && err == nil {
t.Errorf("test %q: JobPath(%q) expected error", tc.name, tc.src)
continue
}
if !tc.expError && err != nil {
t.Errorf("test %q: JobPath(%q) returned unexpected error %v", tc.name, tc.src, err)
continue
}
if jobPath != tc.expJobPath {
t.Errorf("test %q: JobPath(%q) expected %q, got %q", tc.name, tc.src, tc.expJobPath, jobPath)
}
}
}
func TestProwJobName(t *testing.T) {
kc := fkc{
prowapi.ProwJob{
ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-1"},
Spec: prowapi.ProwJobSpec{
Type: prowapi.PeriodicJob,
Job: "example-periodic-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1111",
},
},
prowapi.ProwJob{
ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-2"},
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "example-presubmit-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "2222",
},
},
prowapi.ProwJob{
ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-3"},
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "undecorated-job",
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "missing-name-job",
DecorationConfig: &prowapi.DecorationConfig{},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1",
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
testCases := []struct {
name string
src string
expJobPath string
expError bool
}{
{
name: "non-presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/logs/example-periodic-job/1111/",
expJobPath: "flying-whales-1",
},
{
name: "presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-presubmit-job/2222/",
expJobPath: "flying-whales-2",
},
{
name: "non-presubmit Prow job",
src: "prowjob/example-periodic-job/1111",
expJobPath: "flying-whales-1",
},
{
name: "Prow presubmit job",<|fim▁hole|> src: "prowjob/example-presubmit-job/2222",
expJobPath: "flying-whales-2",
},
{
name: "nonexistent job",
src: "prowjob/example-periodic-job/0000",
expJobPath: "",
},
{
name: "job missing name",
src: "prowjob/missing-name-job/1",
expJobPath: "",
},
{
name: "previously invalid key type is now valid but nonexistent",
src: "oh/my/glob/drama/bomb",
expJobPath: "",
},
{
name: "invalid GCS path",
src: "gcs/kubernetes-jenkins/bad-path",
expError: true,
},
}
for _, tc := range testCases {
fakeGCSClient := fakeGCSServer.Client()
fakeOpener := io.NewGCSOpener(fakeGCSClient)
fca := config.Agent{}
sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false)
jobPath, err := sg.ProwJobName(tc.src)
if tc.expError && err == nil {
t.Errorf("test %q: JobPath(%q) expected error", tc.name, tc.src)
continue
}
if !tc.expError && err != nil {
t.Errorf("test %q: JobPath(%q) returned unexpected error %v", tc.name, tc.src, err)
continue
}
if jobPath != tc.expJobPath {
t.Errorf("test %q: JobPath(%q) expected %q, got %q", tc.name, tc.src, tc.expJobPath, jobPath)
}
}
}
func TestRunPath(t *testing.T) {
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PeriodicJob,
Job: "example-periodic-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1111",
URL: "http://magic/view/gcs/chum-bucket/logs/example-periodic-job/1111",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "example-presubmit-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
Refs: &prowapi.Refs{
Org: "some-org",
Repo: "some-repo",
Pulls: []prowapi.Pull{
{
Number: 42,
},
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "2222",
URL: "http://magic/view/gcs/chum-bucket/pr-logs/pull/some-org_some-repo/42/example-presubmit-job/2222",
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
testCases := []struct {
name string
src string
expRunPath string
expError bool
}{
{
name: "non-presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/logs/example-job-name/123/",
expRunPath: "kubernetes-jenkins/logs/example-job-name/123",
},
{
name: "non-presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/logs/example-job-name/123",
expRunPath: "kubernetes-jenkins/logs/example-job-name/123",
},
{
name: "presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/",
expRunPath: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159",
},
{
name: "presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159",
expRunPath: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159",
},
{
name: "non-presubmit Prow job",
src: "prowjob/example-periodic-job/1111",
expRunPath: "chum-bucket/logs/example-periodic-job/1111",
},
{
name: "Prow presubmit job with full path",
src: "prowjob/example-presubmit-job/2222",
expRunPath: "chum-bucket/pr-logs/pull/some-org_some-repo/42/example-presubmit-job/2222",
},
{
name: "nonexistent job",
src: "prowjob/example-periodic-job/0000",
expError: true,
},
{
name: "previously invalid key type is now valid",
src: "oh/my/glob/drama/bomb",
expRunPath: "my/glob/drama/bomb",
},
{
name: "nonsense string errors",
src: "this is not useful",
expError: true,
},
}
for _, tc := range testCases {
fakeGCSClient := fakeGCSServer.Client()
fakeOpener := io.NewGCSOpener(fakeGCSClient)
fca := config.Agent{}
fca.Set(&config.Config{
ProwConfig: config.ProwConfig{
Plank: config.Plank{
JobURLPrefixConfig: map[string]string{"*": "http://magic/view/gcs/"},
},
},
})
sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false)
jobPath, err := sg.RunPath(tc.src)
if tc.expError && err == nil {
t.Errorf("test %q: RunPath(%q) expected error, got %q", tc.name, tc.src, jobPath)
continue
}
if !tc.expError && err != nil {
t.Errorf("test %q: RunPath(%q) returned unexpected error %v", tc.name, tc.src, err)
continue
}
if jobPath != tc.expRunPath {
t.Errorf("test %q: RunPath(%q) expected %q, got %q", tc.name, tc.src, tc.expRunPath, jobPath)
}
}
}
func TestRunToPR(t *testing.T) {
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PeriodicJob,
Job: "example-periodic-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "1111",
URL: "http://magic/view/gcs/chum-bucket/logs/example-periodic-job/1111",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Type: prowapi.PresubmitJob,
Job: "example-presubmit-job",
DecorationConfig: &prowapi.DecorationConfig{
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "chum-bucket",
},
},
Refs: &prowapi.Refs{
Org: "some-org",
Repo: "some-repo",
Pulls: []prowapi.Pull{
{
Number: 42,
},
},
},
},
Status: prowapi.ProwJobStatus{
PodName: "flying-whales",
BuildID: "2222",
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
testCases := []struct {
name string
src string
expOrg string
expRepo string
expNumber int
expError bool
}{
{
name: "presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/Katharine_test-infra/1234/example-job-name/314159/",
expOrg: "Katharine",
expRepo: "test-infra",
expNumber: 1234,
},
{
name: "presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/pr-logs/pull/Katharine_test-infra/1234/example-job-name/314159",
expOrg: "Katharine",
expRepo: "test-infra",
expNumber: 1234,
},
{
name: "presubmit job in GCS without org name",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/2345/example-job-name/314159",
expOrg: "kubernetes",
expRepo: "test-infra",
expNumber: 2345,
},
{
name: "presubmit job in GCS without org or repo name",
src: "gcs/kubernetes-jenkins/pr-logs/pull/3456/example-job-name/314159",
expOrg: "kubernetes",
expRepo: "kubernetes",
expNumber: 3456,
},
{
name: "Prow presubmit job",
src: "prowjob/example-presubmit-job/2222",
expOrg: "some-org",
expRepo: "some-repo",
expNumber: 42,
},
{
name: "Prow periodic job errors",
src: "prowjob/example-periodic-job/1111",
expError: true,
},
{
name: "GCS periodic job errors",
src: "gcs/kuberneretes-jenkins/logs/example-periodic-job/1111",
expError: true,
},
{
name: "GCS job with non-numeric PR number errors",
src: "gcs/kubernetes-jenkins/pr-logs/pull/asdf/example-job-name/314159",
expError: true,
},
{
name: "GCS PR job in directory errors",
src: "gcs/kubernetes-jenkins/pr-logs/directory/example-job-name/314159",
expError: true,
},
{
name: "Bad GCS key errors",
src: "gcs/this is just nonsense",
expError: true,
},
{
name: "Longer bad GCS key errors",
src: "gcs/kubernetes-jenkins/pr-logs",
expError: true,
},
{
name: "Nonsense string errors",
src: "friendship is magic",
expError: true,
},
}
for _, tc := range testCases {
fakeGCSClient := fakeGCSServer.Client()
fca := config.Agent{}
fca.Set(&config.Config{
ProwConfig: config.ProwConfig{
Plank: config.Plank{
DefaultDecorationConfigs: config.DefaultDecorationMapToSliceTesting(
map[string]*prowapi.DecorationConfig{
"*": {
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "kubernetes-jenkins",
DefaultOrg: "kubernetes",
DefaultRepo: "kubernetes",
PathStrategy: "legacy",
},
},
}),
},
},
})
sg := New(context.Background(), fakeJa, fca.Config, io.NewGCSOpener(fakeGCSClient), false)
org, repo, num, err := sg.RunToPR(tc.src)
if tc.expError && err == nil {
t.Errorf("test %q: RunToPR(%q) expected error", tc.name, tc.src)
continue
}
if !tc.expError && err != nil {
t.Errorf("test %q: RunToPR(%q) returned unexpected error %v", tc.name, tc.src, err)
continue
}
if org != tc.expOrg || repo != tc.expRepo || num != tc.expNumber {
t.Errorf("test %q: RunToPR(%q) expected %s/%s#%d, got %s/%s#%d", tc.name, tc.src, tc.expOrg, tc.expRepo, tc.expNumber, org, repo, num)
}
}
}
func TestProwToGCS(t *testing.T) {
testCases := []struct {
name string
key string
configPrefix string
expectedPath string
expectError bool
}{
{
name: "extraction from gubernator-like URL",
key: "gubernator-job/1111",
configPrefix: "https://gubernator.example.com/build/",
expectedPath: "some-bucket/gubernator-job/1111/",
expectError: false,
},
{
name: "extraction from spyglass-like URL",
key: "spyglass-job/2222",
configPrefix: "https://prow.example.com/view/gcs/",
expectedPath: "some-bucket/spyglass-job/2222/",
expectError: false,
},
{
name: "failed extraction from wrong URL",
key: "spyglass-job/1111",
configPrefix: "https://gubernator.example.com/build/",
expectedPath: "",
expectError: true,
},
{
name: "prefix longer than URL",
key: "spyglass-job/2222",
configPrefix: strings.Repeat("!", 100),
expectError: true,
},
}
for _, tc := range testCases {
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Job: "gubernator-job",
},
Status: prowapi.ProwJobStatus{
URL: "https://gubernator.example.com/build/some-bucket/gubernator-job/1111/",
BuildID: "1111",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Job: "spyglass-job",
},
Status: prowapi.ProwJobStatus{
URL: "https://prow.example.com/view/gcs/some-bucket/spyglass-job/2222/",
BuildID: "2222",
},
},
}
fakeGCSClient := fakeGCSServer.Client()
fakeConfigAgent := fca{
c: config.Config{
ProwConfig: config.ProwConfig{
Plank: config.Plank{
JobURLPrefixConfig: map[string]string{"*": tc.configPrefix},
},
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config)
fakeJa.Start()
sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false)
_, p, err := sg.prowToGCS(tc.key)
if err != nil && !tc.expectError {
t.Errorf("test %q: unexpected error: %v", tc.key, err)
continue
}
if err == nil && tc.expectError {
t.Errorf("test %q: expected an error but instead got success and path '%s'", tc.key, p)
continue
}
if p != tc.expectedPath {
t.Errorf("test %q: expected '%s' but got '%s'", tc.key, tc.expectedPath, p)
}
}
}
func TestGCSPathRoundTrip(t *testing.T) {
testCases := []struct {
name string
pathStrategy string
defaultOrg string
defaultRepo string
org string
repo string
}{
{
name: "simple explicit path",
pathStrategy: "explicit",
org: "test-org",
repo: "test-repo",
},
{
name: "explicit path with underscores",
pathStrategy: "explicit",
org: "test-org",
repo: "underscore_repo",
},
{
name: "'single' path with default repo",
pathStrategy: "single",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "default-org",
repo: "default-repo",
},
{
name: "'single' path with non-default repo",
pathStrategy: "single",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "default-org",
repo: "random-repo",
},
{
name: "'single' path with non-default org but default repo",
pathStrategy: "single",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "random-org",
repo: "default-repo",
},
{
name: "'single' path with non-default org and repo",
pathStrategy: "single",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "random-org",
repo: "random-repo",
},
{
name: "legacy path with default repo",
pathStrategy: "legacy",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "default-org",
repo: "default-repo",
},
{
name: "legacy path with non-default repo",
pathStrategy: "legacy",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "default-org",
repo: "random-repo",
},
{
name: "legacy path with non-default org but default repo",
pathStrategy: "legacy",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "random-org",
repo: "default-repo",
},
{
name: "legacy path with non-default org and repo",
pathStrategy: "legacy",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "random-org",
repo: "random-repo",
},
{
name: "legacy path with non-default org and repo with underscores",
pathStrategy: "legacy",
defaultOrg: "default-org",
defaultRepo: "default-repo",
org: "random-org",
repo: "underscore_repo",
},
}
for _, tc := range testCases {
kc := fkc{}
fakeConfigAgent := fca{
c: config.Config{
ProwConfig: config.ProwConfig{
Plank: config.Plank{
DefaultDecorationConfigs: config.DefaultDecorationMapToSliceTesting(
map[string]*prowapi.DecorationConfig{
"*": {
GCSConfiguration: &prowapi.GCSConfiguration{
DefaultOrg: tc.defaultOrg,
DefaultRepo: tc.defaultRepo,
},
},
}),
},
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config)
fakeJa.Start()
fakeGCSClient := fakeGCSServer.Client()
sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false)
gcspath, _, _ := gcsupload.PathsForJob(
&prowapi.GCSConfiguration{Bucket: "test-bucket", PathStrategy: tc.pathStrategy},
&downwardapi.JobSpec{
Job: "test-job",
BuildID: "1234",
Type: prowapi.PresubmitJob,
Refs: &prowapi.Refs{
Org: tc.org, Repo: tc.repo,
Pulls: []prowapi.Pull{{Number: 42}},
},
}, "")
fmt.Println(gcspath)
org, repo, prnum, err := sg.RunToPR("gcs/test-bucket/" + gcspath)
if err != nil {
t.Errorf("unexpected error: %v", err)
continue
}
if org != tc.org || repo != tc.repo || prnum != 42 {
t.Errorf("expected %s/%s#42, got %s/%s#%d", tc.org, tc.repo, org, repo, prnum)
}
}
}
func TestTestGridLink(t *testing.T) {
testCases := []struct {
name string
src string
expQuery string
expError bool
}{
{
name: "non-presubmit job in GCS with trailing /",
src: "gcs/kubernetes-jenkins/logs/periodic-job/123/",
expQuery: "some-dashboard#periodic",
},
{
name: "non-presubmit job in GCS without trailing /",
src: "gcs/kubernetes-jenkins/logs/periodic-job/123",
expQuery: "some-dashboard#periodic",
},
{
name: "presubmit job in GCS",
src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/presubmit-job/314159/",
expQuery: "some-dashboard#presubmit",
},
{
name: "non-presubmit Prow job",
src: "prowjob/periodic-job/1111",
expQuery: "some-dashboard#periodic",
},
{
name: "presubmit Prow job",
src: "prowjob/presubmit-job/2222",
expQuery: "some-dashboard#presubmit",
},
{
name: "nonexistent job",
src: "prowjob/nonexistent-job/0000",
expError: true,
},
{
name: "invalid key type",
src: "oh/my/glob/drama/bomb",
expError: true,
},
{
name: "nonsense string errors",
src: "this is not useful",
expError: true,
},
}
kc := fkc{}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config)
fakeJa.Start()
tg := TestGrid{c: &tgconf.Configuration{
Dashboards: []*tgconf.Dashboard{
{
Name: "some-dashboard",
DashboardTab: []*tgconf.DashboardTab{
{
Name: "periodic",
TestGroupName: "periodic-job",
},
{
Name: "presubmit",
TestGroupName: "presubmit-job",
},
{
Name: "some-other-job",
TestGroupName: "some-other-job",
},
},
},
},
}}
for _, tc := range testCases {
fakeGCSClient := fakeGCSServer.Client()
fca := config.Agent{}
fca.Set(&config.Config{
ProwConfig: config.ProwConfig{
Deck: config.Deck{
Spyglass: config.Spyglass{
TestGridRoot: "https://testgrid.com/",
},
},
},
})
sg := New(context.Background(), fakeJa, fca.Config, io.NewGCSOpener(fakeGCSClient), false)
sg.testgrid = &tg
link, err := sg.TestGridLink(tc.src)
if tc.expError {
if err == nil {
t.Errorf("test %q: TestGridLink(%q) expected error, got %q", tc.name, tc.src, link)
}
continue
}
if err != nil {
t.Errorf("test %q: TestGridLink(%q) returned unexpected error %v", tc.name, tc.src, err)
continue
}
if link != "https://testgrid.com/"+tc.expQuery {
t.Errorf("test %q: TestGridLink(%q) expected %q, got %q", tc.name, tc.src, "https://testgrid.com/"+tc.expQuery, link)
}
}
}
func TestFetchArtifactsPodLog(t *testing.T) {
kc := fkc{
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "job",
},
Status: prowapi.ProwJobStatus{
PodName: "wowowow",
BuildID: "123",
URL: "https://gubernator.example.com/build/job/123",
},
},
prowapi.ProwJob{
Spec: prowapi.ProwJobSpec{
Agent: prowapi.KubernetesAgent,
Job: "multi-container-one-log",
},
Status: prowapi.ProwJobStatus{
PodName: "wowowow",
BuildID: "123",
URL: "https://gubernator.example.com/build/multi-container/123",
},
},
}
fakeConfigAgent := fca{
c: config.Config{
ProwConfig: config.ProwConfig{
Deck: config.Deck{
AllKnownStorageBuckets: sets.NewString("job", "kubernetes-jenkins", "multi-container-one-log"),
},
Plank: config.Plank{
JobURLPrefixConfig: map[string]string{"*": "https://gubernator.example.com/build/"},
},
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config)
fakeJa.Start()
fakeGCSClient := fakeGCSServer.Client()
sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false)
testKeys := []string{
"prowjob/job/123",
"gcs/kubernetes-jenkins/logs/job/123/",
"gcs/kubernetes-jenkins/logs/job/123",
}
for _, key := range testKeys {
result, err := sg.FetchArtifacts(context.Background(), key, "", 500e6, []string{"build-log.txt"})
if err != nil {
t.Errorf("Unexpected error grabbing pod log for %s: %v", key, err)
continue
}
if len(result) != 1 {
t.Errorf("Expected 1 artifact for %s, got %d", key, len(result))
continue
}
content, err := result[0].ReadAll()
if err != nil {
t.Errorf("Unexpected error reading pod log for %s: %v", key, err)
continue
}
if string(content) != fmt.Sprintf("clusterA.%s", kube.TestContainerName) {
t.Errorf("Bad pod log content for %s: %q (expected 'clusterA')", key, content)
}
}
multiContainerOneLogKey := "gcs/multi-container-one-log/logs/job/123"
testKeys = append(testKeys, multiContainerOneLogKey)
for _, key := range testKeys {
containers := []string{"test-1", "test-2"}
result, err := sg.FetchArtifacts(context.Background(), key, "", 500e6, []string{fmt.Sprintf("%s-%s", containers[0], singleLogName), fmt.Sprintf("%s-%s", containers[1], singleLogName)})
if err != nil {
t.Errorf("Unexpected error grabbing pod log for %s: %v", key, err)
continue
}
for i, art := range result {
content, err := art.ReadAll()
if err != nil {
t.Errorf("Unexpected error reading pod log for %s: %v", key, err)
continue
}
expected := fmt.Sprintf("clusterA.%s", containers[i])
if key == multiContainerOneLogKey && containers[i] == "test-1" {
expected = "this log exists in gcs!"
}
if string(content) != expected {
t.Errorf("Bad pod log content for %s: %q (expected '%s')", key, content, expected)
}
}
}
}
func TestKeyToJob(t *testing.T) {
testCases := []struct {
name string
path string
jobName string
buildID string
expectErr bool
}{
{
name: "GCS periodic path with trailing slash",
path: "gcs/kubernetes-jenkins/logs/periodic-kubernetes-bazel-test-1-14/40/",
jobName: "periodic-kubernetes-bazel-test-1-14",
buildID: "40",
},
{
name: "GCS periodic path without trailing slash",
path: "gcs/kubernetes-jenkins/logs/periodic-kubernetes-bazel-test-1-14/40",
jobName: "periodic-kubernetes-bazel-test-1-14",
buildID: "40",
},
{
name: "GCS PR path with trailing slash",
path: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/11573/pull-test-infra-bazel/25366/",
jobName: "pull-test-infra-bazel",
buildID: "25366",
},
{
name: "GCS PR path without trailing slash",
path: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/11573/pull-test-infra-bazel/25366",
jobName: "pull-test-infra-bazel",
buildID: "25366",
},
{
name: "Prowjob path with trailing slash",
path: "prowjob/pull-test-infra-bazel/25366/",
jobName: "pull-test-infra-bazel",
buildID: "25366",
},
{
name: "Prowjob path without trailing slash",
path: "prowjob/pull-test-infra-bazel/25366",
jobName: "pull-test-infra-bazel",
buildID: "25366",
},
{
name: "Path with only one component",
path: "nope",
expectErr: true,
},
}
for _, tc := range testCases {
jobName, buildID, err := common.KeyToJob(tc.path)
if err != nil {
if !tc.expectErr {
t.Errorf("%s: unexpected error %v", tc.name, err)
}
continue
}
if tc.expectErr {
t.Errorf("%s: expected an error, but got result %s #%s", tc.name, jobName, buildID)
continue
}
if jobName != tc.jobName {
t.Errorf("%s: expected job name %q, but got %q", tc.name, tc.jobName, jobName)
continue
}
if buildID != tc.buildID {
t.Errorf("%s: expected build ID %q, but got %q", tc.name, tc.buildID, buildID)
}
}
}
func TestResolveSymlink(t *testing.T) {
testCases := []struct {
name string
path string
result string
expectErr bool
}{
{
name: "symlink without trailing slash is resolved",
path: "gcs/test-bucket/logs/symlink-party/123",
result: "gs/test-bucket/logs/the-actual-place/123",
},
{
name: "symlink with trailing slash is resolved",
path: "gcs/test-bucket/logs/symlink-party/123/",
result: "gs/test-bucket/logs/the-actual-place/123",
},
{
name: "non-symlink without trailing slash is unchanged",
path: "gcs/test-bucket/better-logs/42",
result: "gs/test-bucket/better-logs/42",
},
{
name: "non-symlink with trailing slash drops the slash",
path: "gcs/test-bucket/better-logs/42/",
result: "gs/test-bucket/better-logs/42",
},
{
name: "prowjob without trailing slash is unchanged",
path: "prowjob/better-logs/42",
result: "prowjob/better-logs/42",
},
{
name: "prowjob with trailing slash drops the slash",
path: "prowjob/better-logs/42/",
result: "prowjob/better-logs/42",
},
{
name: "unknown key type is an error",
path: "wtf/what-is-this/send-help",
expectErr: true,
},
{
name: "insufficient path components are an error",
path: "gcs/hi",
expectErr: true,
},
}
for _, tc := range testCases {
fakeConfigAgent := fca{}
fakeJa = jobs.NewJobAgent(context.Background(), fkc{}, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config)
fakeJa.Start()
fakeGCSClient := fakeGCSServer.Client()
sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false)
result, err := sg.ResolveSymlink(tc.path)
if err != nil {
if !tc.expectErr {
t.Errorf("test %q: unexpected error: %v", tc.name, err)
}
continue
}
if tc.expectErr {
t.Errorf("test %q: expected an error, but got result %q", tc.name, result)
continue
}
if result != tc.result {
t.Errorf("test %q: expected %q, but got %q", tc.name, tc.result, result)
continue
}
}
}
func TestExtraLinks(t *testing.T) {
testCases := []struct {
name string
content string
links []ExtraLink
expectErr bool
}{
{
name: "does nothing without error given no started.json",
links: nil,
},
{
name: "errors given a malformed started.json",
content: "this isn't json",
expectErr: true,
},
{
name: "does nothing given metadata with no links",
content: `{"metadata": {"somethingThatIsntLinks": 23}}`,
links: nil,
},
{
name: "returns well-formed links",
content: `{"metadata": {"links": {"ResultStore": {"url": "http://resultstore", "description": "The thing that isn't spyglass"}}}}`,
links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore", Description: "The thing that isn't spyglass"}},
},
{
name: "returns links without a description",
content: `{"metadata": {"links": {"ResultStore": {"url": "http://resultstore"}}}}`,
links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore"}},
},
{
name: "skips links without a URL",
content: `{"metadata": {"links": {"No Link": {"description": "bad link"}, "ResultStore": {"url": "http://resultstore"}}}}`,
links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore"}},
},
{
name: "skips links without a name",
content: `{"metadata": {"links": {"": {"url": "http://resultstore"}}}}`,
links: []ExtraLink{},
},
{
name: "returns no links when links is empty",
content: `{"metadata": {"links": {}}}`,
links: []ExtraLink{},
},
{
name: "returns multiple links",
content: `{"metadata": {"links": {"A": {"url": "http://a", "description": "A!"}, "B": {"url": "http://b"}}}}`,
links: []ExtraLink{{Name: "A", URL: "http://a", Description: "A!"}, {Name: "B", URL: "http://b"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
var objects []fakestorage.Object
if tc.content != "" {
objects = []fakestorage.Object{
{
BucketName: "test-bucket",
Name: "logs/some-job/42/started.json",
Content: []byte(tc.content),
},
}
}
gcsServer := fakestorage.NewServer(objects)
defer gcsServer.Stop()
gcsClient := gcsServer.Client()
fakeConfigAgent := fca{
c: config.Config{
ProwConfig: config.ProwConfig{
Deck: config.Deck{
AllKnownStorageBuckets: sets.NewString("test-bucket"),
},
},
},
}
fakeJa = jobs.NewJobAgent(context.Background(), fkc{}, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config)
fakeJa.Start()
sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(gcsClient), false)
result, err := sg.ExtraLinks(context.Background(), "gcs/test-bucket/logs/some-job/42")
if err != nil {
if !tc.expectErr {
t.Fatalf("unexpected error: %v", err)
}
return
}
sort.Slice(result, func(i, j int) bool { return result[i].Name < result[j].Name })
sort.Slice(tc.links, func(i, j int) bool { return tc.links[i].Name < tc.links[j].Name })
if !reflect.DeepEqual(result, tc.links) {
t.Fatalf("Expected links %#v, got %#v", tc.links, result)
}
})
}
}<|fim▁end|>
| |
<|file_name|>generate_metadata_pkl.py<|end_file_name|><|fim▁begin|>import argparse
import glob
import re
import cPickle as pickle
from dicom.sequence import Sequence
from log import print_to_file
from paths import LOGS_PATH, TRAIN_DATA_PATH, TEST_DATA_PATH
def read_slice(path):
return pickle.load(open(path))['data']
def convert_to_number(value):
value = str(value)
try:
if "." in value:
return float(value)
else:
return int(value)
except:
pass
return value
def clean_metadata(metadatadict):
# Do cleaning
keys = sorted(list(metadatadict.keys()))
for key in keys:
value = metadatadict[key]
if key == 'PatientAge':
metadatadict[key] = int(value[:-1])
if key == 'PatientSex':
metadatadict[key] = 1 if value == 'F' else -1
else:
if isinstance(value, Sequence):
#convert to list
value = [i for i in value]
if isinstance(value, (list,)):
metadatadict[key] = [convert_to_number(i) for i in value]
else:
metadatadict[key] = convert_to_number(value)
return metadatadict
def read_metadata(path):
d = pickle.load(open(path))['metadata'][0]
metadata = clean_metadata(d)
return metadata
def get_patient_data(patient_data_path):
patient_data = []
spaths = sorted(glob.glob(patient_data_path + r'/*.pkl'),
key=lambda x: int(re.search(r'/*_(\d+)\.pkl$', x).group(1)))
pid = re.search(r'/(\d+)/study$', patient_data_path).group(1)
for s in spaths:
slice_id = re.search(r'/(((4ch)|(2ch)|(sax))_\d+\.pkl)$', s).group(1)
metadata = read_metadata(s)
patient_data.append({'metadata': metadata,
'slice_id': slice_id})
print slice_id
return patient_data, pid
def get_metadata(data_path):
patient_paths = sorted(glob.glob(data_path + '*/study'))
metadata_dict = {}
for p in patient_paths:
patient_data, pid = get_patient_data(p)
print "patient", pid
metadata_dict[pid] = dict()
for pd in patient_data:
metadata_dict[pid][pd['slice_id']] = pd['metadata']
<|fim▁hole|> filename = data_path.split('/')[-2] + '_metadata.pkl'
with open(filename, 'w') as f:
pickle.dump(metadata_dict, f)
print 'saved to ', filename
return metadata_dict
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
required = parser.add_argument_group('required arguments')
#required.add_argument('-c', '--config',
# help='configuration to run',
# required=True)
args = parser.parse_args()
data_paths = [TRAIN_DATA_PATH, TEST_DATA_PATH]
log_path = LOGS_PATH + "generate_metadata.log"
with print_to_file(log_path):
for d in data_paths:
get_metadata(d)
print "log saved to '%s'" % log_path<|fim▁end|>
| |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import "react-hot-loader/patch";
let seed: number | null = null;
const queryParamMatch = window.location.search.slice(1).match(/seed=(\d+)/);
if (queryParamMatch != null) {
seed = parseInt(queryParamMatch[1], 10);
}
function isNumeric(n: any): n is number {
return !isNaN(parseFloat(n)) && isFinite(n);
}
if (!isNumeric(seed)) {
seed = Date.now();
}
function random() {
const x = Math.sin(seed++ + 0.59322) * 10000;
return x - Math.floor(x);
}
Math.random = random;
import * as _ from "lodash";
import * as Perf from "react-addons-perf";
import * as ReactDOM from "react-dom";
import * as React from "react";
import { AppContainer } from "react-hot-loader";
import { Provider } from "react-redux";
import * as Redux from "redux";
import { batchedSubscribe } from "redux-batched-subscribe";
import thunk from "redux-thunk";
import { Main as InitialMain } from "components/main";
import reducer from "reducer";
import { IState } from "state";
import { buildInitialState } from "initialState";
import DEBUG_FLAGS from "debugFlags";
import "./index.less";
(window as any).Perf = Perf;
// add redux devtools reporting if debug query param exists
const compose = DEBUG_FLAGS.debug ? (window as any).__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ : Redux.compose;
const storeEnhancer: Redux.GenericStoreEnhancer = compose(
Redux.applyMiddleware(
thunk
),
// only notify subscriptions (aka react-redux) to update in a debounce loop to prevent intermediate renders from
// redux-thunk
batchedSubscribe(_.debounce((notify: any) => notify()))
);
const store = Redux.createStore<IState>(reducer as Redux.Reducer<IState>, buildInitialState(), storeEnhancer);
const root = document.createElement("div");
root.id = "root";
document.body.appendChild(root);
function renderMain(MainComponent: any) {
ReactDOM.render(
<AppContainer>
<Provider store={store}>
<MainComponent />
</Provider>
</AppContainer>,
root
);
}
renderMain(InitialMain);<|fim▁hole|>
declare var module: any;
declare var require: any;
if (module.hot) {
module.hot.accept("components/main", () => {
const NextMain = require("components/main").Main;
renderMain(NextMain);
});
}<|fim▁end|>
| |
<|file_name|>test_workflow_history.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import Permission
from django.test import TestCase
from django.urls import reverse
from wagtail.core.models import Page
from wagtail.tests.utils import WagtailTestUtils
class TestWorkflowHistoryDetail(TestCase, WagtailTestUtils):<|fim▁hole|> self.user = self.create_test_user()
self.login(self.user)
self.christmas_event = Page.objects.get(url_path='/home/events/christmas/')
self.christmas_event.save_revision()
workflow = self.christmas_event.get_workflow()
self.workflow_state = workflow.start(self.christmas_event, self.user)
def test_get_index(self):
response = self.client.get(
reverse('wagtailadmin_pages:workflow_history', args=[self.christmas_event.id])
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('wagtailadmin_pages:edit', args=[self.christmas_event.id]))
self.assertContains(response, reverse('wagtailadmin_pages:workflow_history_detail', args=[self.christmas_event.id, self.workflow_state.id]))
def test_get_index_with_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
response = self.client.get(
reverse('wagtailadmin_pages:workflow_history', args=[self.christmas_event.id])
)
self.assertEqual(response.status_code, 302)
def test_get_detail(self):
response = self.client.get(
reverse('wagtailadmin_pages:workflow_history_detail', args=[self.christmas_event.id, self.workflow_state.id])
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('wagtailadmin_pages:edit', args=[self.christmas_event.id]))
self.assertContains(response, reverse('wagtailadmin_pages:workflow_history', args=[self.christmas_event.id]))
def test_get_detail_with_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
response = self.client.get(
reverse('wagtailadmin_pages:workflow_history_detail', args=[self.christmas_event.id, self.workflow_state.id])
)
self.assertEqual(response.status_code, 302)<|fim▁end|>
|
fixtures = ['test.json']
def setUp(self):
|
<|file_name|>PlainTextView.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.ant.config.execution;
import com.intellij.execution.filters.Filter;
import com.intellij.execution.filters.OpenFileHyperlinkInfo;
import com.intellij.execution.filters.TextConsoleBuilder;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.OutputStream;
public final class PlainTextView implements AntOutputView {
private final ConsoleView myConsole;
private final Project myProject;
private String myCommandLine;
private final LightProcessHandler myProcessHandler = new LightProcessHandler();
public PlainTextView(Project project) {
myProject = project;
TextConsoleBuilder builder = TextConsoleBuilderFactory.getInstance().createBuilder(project);
builder.addFilter(new AntMessageFilter());
builder.addFilter(new JUnitFilter());<|fim▁hole|>
public void dispose() {
Disposer.dispose(myConsole);
}
@Override
public String getId() {
return "_text_view_";
}
@Override
public JComponent getComponent() {
return myConsole.getComponent();
}
@Override
@Nullable
public Object addMessage(AntMessage message) {
print(message.getText() + "\n", ProcessOutputTypes.STDOUT);
return null;
}
private void print(String text, Key type) {
myProcessHandler.notifyTextAvailable(text, type);
}
public void addMessages(AntMessage[] messages) {
for (AntMessage message : messages) {
addMessage(message);
}
}
@Override
public void addJavacMessage(AntMessage message, String url) {
if (message.getLine() > 0) {
String msg = TreeView.printMessage(message, url);
print(msg, ProcessOutputTypes.STDOUT);
}
print(message.getText(), ProcessOutputTypes.STDOUT);
}
@Override
public void addException(AntMessage exception, boolean showFullTrace) {
String text = exception.getText();
if (!showFullTrace) {
int index = text.indexOf("\r\n");
if (index != -1) {
text = text.substring(0, index) + "\n";
}
}
print(text, ProcessOutputTypes.STDOUT);
}
public void clearAllMessages() {
myConsole.clear();
}
@Override
public void startBuild(AntMessage message) {
print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM);
addMessage(message);
}
@Override
public void buildFailed(AntMessage message) {
print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM);
addMessage(message);
}
@Override
public void startTarget(AntMessage message) {
addMessage(message);
}
@Override
public void startTask(AntMessage message) {
addMessage(message);
}
@Override
public void finishBuild(String messageText) {
print("\n" + messageText + "\n", ProcessOutputTypes.SYSTEM);
}
@Override
public void finishTarget() {
}
@Override
public void finishTask() {
}
@Override
@Nullable
public Object getData(@NotNull String dataId) {
return null;
}
public void setBuildCommandLine(String commandLine) {
myCommandLine = commandLine;
}
private final class JUnitFilter implements Filter {
@Override
@Nullable
public Result applyFilter(String line, int entireLength) {
HyperlinkUtil.PlaceInfo placeInfo = HyperlinkUtil.parseJUnitMessage(myProject, line);
if (placeInfo == null) {
return null;
}
int textStartOffset = entireLength - line.length();
int highlightStartOffset = textStartOffset + placeInfo.getLinkStartIndex();
int highlightEndOffset = textStartOffset + placeInfo.getLinkEndIndex() + 1;
OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, placeInfo.getFile(), placeInfo.getLine(), placeInfo.getColumn());
return new Result(highlightStartOffset, highlightEndOffset, info);
}
}
private final class AntMessageFilter implements Filter {
@Override
public Result applyFilter(String line, int entireLength) {
int afterLineNumberIndex = line.indexOf(": "); // end of file_name_and_line_number sequence
if (afterLineNumberIndex == -1) {
return null;
}
String fileAndLineNumber = line.substring(0, afterLineNumberIndex);
int index = fileAndLineNumber.lastIndexOf(':');
if (index == -1) {
return null;
}
final String fileName = fileAndLineNumber.substring(0, index);
String lineNumberStr = fileAndLineNumber.substring(index + 1).trim();
int lineNumber;
try {
lineNumber = Integer.parseInt(lineNumberStr);
}
catch (NumberFormatException e) {
return null;
}
final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(fileName.replace(File.separatorChar, '/'));
if (file == null) {
return null;
}
int textStartOffset = entireLength - line.length();
int highlightEndOffset = textStartOffset + afterLineNumberIndex;
OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, file, lineNumber - 1);
return new Result(textStartOffset, highlightEndOffset, info);
}
}
private static class LightProcessHandler extends ProcessHandler {
@Override
protected void destroyProcessImpl() {
throw new UnsupportedOperationException();
}
@Override
protected void detachProcessImpl() {
throw new UnsupportedOperationException();
}
@Override
public boolean detachIsDefault() {
return false;
}
@Override
@Nullable
public OutputStream getProcessInput() {
return null;
}
}
}<|fim▁end|>
|
myConsole = builder.getConsole();
myConsole.attachToProcess(myProcessHandler);
}
|
<|file_name|>bg.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
{
'!langcode!': 'bg',
'!langname!': 'Български',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',<|fim▁hole|>'%s': '%s',
'%s %%{row} deleted': '%s записите бяха изтрити',
'%s %%{row} updated': '%s записите бяха обновени',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(something like "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'A new version of web2py is available': 'A new version of web2py is available',
'A new version of web2py is available: %s': 'A new version of web2py is available: %s',
'Abort': 'Abort',
'About': 'about',
'About application': 'About application',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'additional code for your application',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'admin disabled because no admin password',
'admin disabled because not supported on google app engine': 'admin disabled because not supported on google apps engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'admin disabled because unable to access password file',
'Admin is disabled because insecure channel': 'Admin is disabled because insecure channel',
'Admin is disabled because unsecure channel': 'Admin is disabled because unsecure channel',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'Administrator Password:',
'and rename it (required):': 'and rename it (required):',
'and rename it:': 'and rename it:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'Application': 'Application',
'application "%s" uninstalled': 'application "%s" uninstalled',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'application compiled',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'application is compiled and cannot be designed',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'Are you sure you want to delete file "%s"?',
'Are you sure you want to delete plugin "%s"?': 'Are you sure you want to delete plugin "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': 'Are you sure you want to uninstall application "%s"',
'Are you sure you want to uninstall application "%s"?': 'Are you sure you want to uninstall application "%s"?',
'Are you sure you want to upgrade web2py now?': 'Are you sure you want to upgrade web2py now?',
'Are you sure?': 'Are you sure?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
'ATTENTION: you cannot edit the running application!': 'ATTENTION: you cannot edit the running application!',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'Available databases and tables',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'back',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'cache',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'cache, errors and sessions cleaned',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'Cannot be empty',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Cannot compile: there are errors in your app. Debug it, correct errors and try again.',
'Cannot compile: there are errors in your app:': 'Cannot compile: there are errors in your app:',
'cannot create file': 'cannot create file',
'cannot upload file "%(filename)s"': 'cannot upload file "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'change admin password',
'change editor settings': 'change editor settings',
'Changelog': 'Changelog',
'check all': 'check all',
'Check for upgrades': 'check for upgrades',
'Check to delete': 'Check to delete',
'Checking for upgrades...': 'Checking for upgrades...',
'Clean': 'clean',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click here for online examples': 'щракни тук за онлайн примери',
'click here for the administrative interface': 'щракни тук за административния интерфейс',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'click to check for upgrades',
'code': 'code',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'compile',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'compiled application removed',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'Controllers',
'controllers': 'controllers',
'Count': 'Count',
'Create': 'create',
'create file with filename:': 'create file with filename:',
'create new application:': 'create new application:',
'Create new simple application': 'Create new simple application',
'Create/Upload': 'Create/Upload',
'created by': 'created by',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'crontab',
'Current request': 'Current request',
'Current response': 'Current response',
'Current session': 'Current session',
'currently running': 'currently running',
'currently saved or': 'currently saved or',
'data uploaded': 'данните бяха качени',
'Database': 'Database',
'database': 'database',
'Database %s select': 'Database %s select',
'database %s select': 'database %s select',
'Database administration': 'Database administration',
'database administration': 'database administration',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'Date and Time',
'db': 'дб',
'Debug': 'Debug',
'defines tables': 'defines tables',
'Delete': 'Delete',
'delete': 'delete',
'delete all checked': 'delete all checked',
'delete plugin': 'delete plugin',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'Delete:',
'deleted after first hit': 'deleted after first hit',
'Demo': 'Demo',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'Deploy on Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Deployment form': 'Deployment form',
'Deployment Interface': 'Deployment Interface',
'Description:': 'Description:',
'design': 'дизайн',
'DESIGN': 'DESIGN',
'Design for': 'Design for',
'Detailed traceback description': 'Detailed traceback description',
'details': 'details',
'direction: ltr': 'direction: ltr',
'directory not found': 'directory not found',
'Disable': 'Disable',
'Disabled': 'Disabled',
'disabled in demo mode': 'disabled in demo mode',
'disabled in GAE mode': 'disabled in GAE mode',
'disabled in multi user mode': 'disabled in multi user mode',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Display line numbers': 'Display line numbers',
'DO NOT use the "Pack compiled" feature.': 'DO NOT use the "Pack compiled" feature.',
'docs': 'docs',
'Docs': 'Docs',
'done!': 'готово!',
'Downgrade': 'Downgrade',
'Download .w2p': 'Download .w2p',
'Download as .exe': 'Download as .exe',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'EDIT': 'EDIT',
'Edit': 'edit',
'edit all': 'edit all',
'Edit application': 'Edit application',
'edit controller': 'edit controller',
'edit controller:': 'edit controller:',
'Edit current record': 'Edit current record',
'edit views:': 'edit views:',
'Editing %s': 'Editing %s',
'Editing file': 'Editing file',
'Editing file "%s"': 'Editing file "%s"',
'Editing Language file': 'Editing Language file',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Editor': 'Editor',
'Email Address': 'Email Address',
'Enable': 'Enable',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'Enterprise Web Framework',
'Error': 'Error',
'Error logs for "%(app)s"': 'Error logs for "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'errors',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'Exception instance attributes',
'Exit Fullscreen': 'Exit Fullscreen',
'Expand Abbreviation (html files only)': 'Expand Abbreviation (html files only)',
'export as csv file': 'export as csv file',
'Exports:': 'Exports:',
'exposes': 'exposes',
'exposes:': 'exposes:',
'extends': 'extends',
'failed to compile file because:': 'failed to compile file because:',
'failed to reload module': 'failed to reload module',
'failed to reload module because:': 'failed to reload module because:',
'File': 'File',
'file "%(filename)s" created': 'file "%(filename)s" created',
'file "%(filename)s" deleted': 'file "%(filename)s" deleted',
'file "%(filename)s" uploaded': 'file "%(filename)s" uploaded',
'file "%(filename)s" was not deleted': 'file "%(filename)s" was not deleted',
'file "%s" of %s restored': 'file "%s" of %s restored',
'file changed on disk': 'file changed on disk',
'file does not exist': 'file does not exist',
'file not found': 'file not found',
'file saved on %(time)s': 'file saved on %(time)s',
'file saved on %s': 'file saved on %s',
'filename': 'filename',
'Filename': 'Filename',
'Files added': 'Files added',
'filter': 'filter',
'Find Next': 'Find Next',
'Find Previous': 'Find Previous',
'Form has errors': 'Form has errors',
'Frames': 'Frames',
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
'GAE Email': 'GAE Email',
'GAE Output': 'GAE Output',
'GAE Password': 'GAE Password',
'Generate': 'Generate',
'Get from URL:': 'Get from URL:',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals##debug',
'go!': 'go!',
'Google App Engine Deployment Interface': 'Google App Engine Deployment Interface',
'Google Application Id': 'Google Application Id',
'Goto': 'Goto',
'graph model': 'graph model',
'Graph Model': 'Graph Model',
'Hello World': 'Здравей, свят',
'Help': 'help',
'here': 'here',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'Hits': 'Hits',
'Home': 'Home',
'honored only if the expression evaluates to true': 'honored only if the expression evaluates to true',
'htmledit': 'htmledit',
'If start the downgrade, be patient, it may take a while to rollback': 'If start the downgrade, be patient, it may take a while to rollback',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
'import': 'import',
'Import/Export': 'Import/Export',
'In development, use the default Rocket webserver that is currently supported by this debugger.': 'In development, use the default Rocket webserver that is currently supported by this debugger.',
'includes': 'includes',
'Indent with tabs': 'Indent with tabs',
'insert new': 'insert new',
'insert new %s': 'insert new %s',
'inspect attributes': 'inspect attributes',
'Install': 'install',
'Installation of %(plugin)s for %(app)s': 'Installation of %(plugin)s for %(app)s',
'Installed applications': 'Installed applications',
'Interaction at %s line %s': 'Interaction at %s line %s',
'Interactive console': 'Interactive console',
'internal error': 'internal error',
'internal error: %s': 'internal error: %s',
'Internal State': 'Internal State',
'Invalid action': 'Invalid action',
'Invalid application name': 'Invalid application name',
'invalid circular reference': 'invalid circular reference',
'Invalid git repository specified.': 'Invalid git repository specified.',
'invalid password': 'invalid password',
'invalid password.': 'invalid password.',
'Invalid Query': 'Невалидна заявка',
'invalid request': 'невалидна заявка',
'Invalid request': 'Invalid request',
'invalid table names (auth_* tables already defined)': 'invalid table names (auth_* tables already defined)',
'invalid ticket': 'invalid ticket',
'Key': 'Key',
'Keyboard shortcuts': 'Keyboard shortcuts',
'kill process': 'kill process',
'language file "%(filename)s" created/updated': 'language file "%(filename)s" created/updated',
'Language files (static strings) updated': 'Language files (static strings) updated',
'languages': 'languages',
'Languages': 'Languages',
'languages updated': 'languages updated',
'Last Revision': 'Last Revision',
'Last saved on:': 'Last saved on:',
'License for': 'License for',
'License:': 'License:',
'Line Nr': 'Line Nr',
'Line number': 'Line number',
'lists by exception': 'lists by exception',
'lists by ticket': 'lists by ticket',
'Loading...': 'Loading...',
'loading...': 'loading...',
'Local Apps': 'Local Apps',
'locals': 'locals',
'Locals##debug': 'Locals##debug',
'Login': 'Login',
'login': 'login',
'Login successful': 'Login successful',
'Login to the Administrative Interface': 'Login to the Administrative Interface',
'Login/Register': 'Login/Register',
'Logout': 'logout',
'lost password': 'lost password',
'Main Menu': 'Main Menu',
'Manage': 'Manage',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Admin Users/Students': 'Manage Admin Users/Students',
'Manage Cache': 'Manage Cache',
'Manage Students': 'Manage Students',
'Memberships': 'Memberships',
'merge': 'merge',
'Models': 'Models',
'models': 'models',
'Modified On': 'Modified On',
'Modules': 'Modules',
'modules': 'modules',
'Multi User Mode': 'Multi User Mode',
'new application "%s" created': 'new application "%s" created',
'new application "%s" imported': 'new application "%s" imported',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'New application wizard',
'new plugin installed': 'new plugin installed',
'New plugin installed: %s': 'New plugin installed: %s',
'New Record': 'New Record',
'new record inserted': 'новият запис беше добавен',
'New simple application': 'New simple application',
'next': 'next',
'next %s rows': 'next %s rows',
'next 100 rows': 'next 100 rows',
'NO': 'NO',
'no changes': 'no changes',
'No databases in this application': 'No databases in this application',
'No Interaction yet': 'No Interaction yet',
'no match': 'no match',
'no package selected': 'no package selected',
'no permission to uninstall "%s"': 'no permission to uninstall "%s"',
'Node:': 'Node:',
'Not Authorized': 'Not Authorized',
'Not supported': 'Not supported',
'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.',
"On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.",
'Open new app in new window': 'Open new app in new window',
'OpenShift Deployment Interface': 'OpenShift Deployment Interface',
'OpenShift Output': 'OpenShift Output',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'or import from csv file',
'or provide app url:': 'or provide app url:',
'or provide application url:': 'or provide application url:',
'Original/Translation': 'Original/Translation',
'Overview': 'Overview',
'Overwrite installed app': 'overwrite installed app',
'Pack all': 'pack all',
'Pack compiled': 'pack compiled',
'Pack custom': 'Pack custom',
'pack plugin': 'pack plugin',
'PAM authenticated user, cannot change password here': 'PAM authenticated user, cannot change password here',
'password changed': 'password changed',
'Past revisions': 'Past revisions',
'Path to appcfg.py': 'Path to appcfg.py',
'Path to local openshift repo root.': 'Path to local openshift repo root.',
'Peeking at file': 'Peeking at file',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Please': 'Please',
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" deleted',
'Plugin "%s" in application': 'Plugin "%s" in application',
'plugin not specified': 'plugin not specified',
'Plugin page': 'Plugin page',
'plugins': 'plugins',
'Plugins': 'Plugins',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'Powered by',
'Preferences saved correctly': 'Preferences saved correctly',
'Preferences saved on session only': 'Preferences saved on session only',
'previous %s rows': 'previous %s rows',
'previous 100 rows': 'previous 100 rows',
'Private files': 'Private files',
'private files': 'private files',
'Project Progress': 'Project Progress',
'Pull': 'Pull',
'Pull failed, certain files could not be checked out. Check logs for details.': 'Pull failed, certain files could not be checked out. Check logs for details.',
'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.': 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.',
'Push': 'Push',
'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.': 'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.',
'pygraphviz library not found': 'pygraphviz library not found',
'PythonAnywhere Apps': 'PythonAnywhere Apps',
'PythonAnywhere Password': 'PythonAnywhere Password',
'Query:': 'Query:',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Rapid Search': 'Rapid Search',
'Record': 'Record',
'record': 'record',
'record does not exist': 'записът не съществува',
'record id': 'record id',
'Record id': 'Record id',
'refresh': 'refresh',
'register': 'register',
'Reload routes': 'Reload routes',
'Remove compiled': 'remove compiled',
'Removed Breakpoint on %s at line %s': 'Removed Breakpoint on %s at line %s',
'Replace': 'Replace',
'Replace All': 'Replace All',
'Repository (%s)': 'Repository (%s)',
'request': 'request',
'requires distutils, but not installed': 'requires distutils, but not installed',
'requires python-git, but not installed': 'requires python-git, but not installed',
'Resolve Conflict file': 'Resolve Conflict file',
'response': 'response',
'restart': 'restart',
'restore': 'restore',
'return': 'return',
'Revert': 'Revert',
'revert': 'revert',
'reverted to revision %s': 'reverted to revision %s',
'Revision %s': 'Revision %s',
'Revision:': 'Revision:',
'Role': 'Role',
'Roles': 'Roles',
'Rows in table': 'Rows in table',
'Rows in Table': 'Rows in Table',
'Rows selected': 'Rows selected',
'rules are not defined': 'rules are not defined',
'Run tests': 'Run tests',
'Run tests in this file': 'Run tests in this file',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'Running on %s',
'Save': 'Save',
'save': 'save',
'Save file:': 'Save file:',
'Save file: %s': 'Save file: %s',
'Save model as...': 'Save model as...',
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'Saved file hash:',
'Screenshot %s': 'Screenshot %s',
'Search': 'Search',
'Select Files to Package': 'Select Files to Package',
'selected': 'selected',
'session': 'session',
'session expired': 'session expired',
'Session saved correctly': 'Session saved correctly',
'Session saved on session only': 'Session saved on session only',
'Set Breakpoint on %s at line %s: %s': 'Set Breakpoint on %s at line %s: %s',
'shell': 'shell',
'Showing %s to %s of %s %s found': 'Showing %s to %s of %s %s found',
'Singular Form': 'Singular Form',
'Site': 'site',
'Size of cache:': 'Size of cache:',
'skip to generate': 'skip to generate',
'some files could not be removed': 'some files could not be removed',
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
'Sorry, could not find mercurial installed': 'Sorry, could not find mercurial installed',
'source : db': 'source : db',
'source : filesystem': 'source : filesystem',
'Start a new app': 'Start a new app',
'Start searching': 'Start searching',
'Start wizard': 'start wizard',
'state': 'състояние',
'Static': 'Static',
'static': 'static',
'Static files': 'Static files',
'Statistics': 'Statistics',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'submit': 'submit',
'Submit': 'Submit',
'successful': 'successful',
'Sure you want to delete this object?': 'Сигурен ли си, че искаш да изтриеш този обект?',
'switch to : db': 'switch to : db',
'switch to : filesystem': 'switch to : filesystem',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'table',
'Table': 'Table',
'Temporary': 'Temporary',
'test': 'test',
'Testing application': 'Testing application',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The app exists, was created by wizard, continue to overwrite!': 'The app exists, was created by wizard, continue to overwrite!',
'The app exists, was NOT created by wizard, continue to overwrite!': 'The app exists, was NOT created by wizard, continue to overwrite!',
'the application logic, each URL path is mapped in one exposed function in the controller': 'the application logic, each URL path is mapped in one exposed function in the controller',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'the data representation, define database tables and sets',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'the presentations layer, views are also known as templates',
'Theme': 'Theme',
'There are no controllers': 'There are no controllers',
'There are no models': 'There are no models',
'There are no modules': 'There are no modules',
'There are no plugins': 'There are no plugins',
'There are no private files': 'There are no private files',
'There are no static files': 'There are no static files',
'There are no translators': 'There are no translators',
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
'There are no views': 'There are no views',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'these files are served without processing, your images go here',
"This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.",
'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'This is the %(filename)s template',
"This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.",
'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.',
'this page to see if a breakpoint was hit and debug interaction is required.': 'this page to see if a breakpoint was hit and debug interaction is required.',
'This will pull changes from the remote repo for application "%s"?': 'This will pull changes from the remote repo for application "%s"?',
'This will push changes to the remote repo for application "%s".': 'This will push changes to the remote repo for application "%s".',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Ticket Missing': 'Ticket Missing',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'TM': 'TM',
'to previous version.': 'to previous version.',
'To create a plugin, name a file/folder plugin_[name]': 'To create a plugin, name a file/folder plugin_[name]',
'To emulate a breakpoint programatically, write:': 'To emulate a breakpoint programatically, write:',
'to use the debugger!': 'to use the debugger!',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'translation strings for the application',
'Translation strings for the application': 'Translation strings for the application',
'try': 'try',
'try something like': 'try something like',
'Try the mobile interface': 'Try the mobile interface',
'try view': 'try view',
'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'Type PDB debugger command in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'Unable to check for upgrades',
'unable to create application "%s"': 'unable to create application "%s"',
'unable to delete file "%(filename)s"': 'unable to delete file "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'unable to delete file plugin "%(plugin)s"',
'Unable to determine the line number!': 'Unable to determine the line number!',
'Unable to download': 'Unable to download',
'Unable to download app because:': 'Unable to download app because:',
'Unable to download because': 'Unable to download because',
'unable to download layout': 'unable to download layout',
'unable to download plugin: %s': 'unable to download plugin: %s',
'Unable to download the list of plugins': 'Unable to download the list of plugins',
'unable to install plugin "%s"': 'unable to install plugin "%s"',
'unable to parse csv file': 'не е възможна обработката на csv файла',
'unable to uninstall "%s"': 'unable to uninstall "%s"',
'unable to upgrade because "%s"': 'unable to upgrade because "%s"',
'uncheck all': 'uncheck all',
'Uninstall': 'uninstall',
'Unsupported webserver working mode: %s': 'Unsupported webserver working mode: %s',
'update': 'update',
'update all languages': 'update all languages',
'Update:': 'Update:',
'Upgrade': 'Upgrade',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'upgrade web2py now',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'Upload & install packed application',
'Upload a package:': 'Upload a package:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'upload application:',
'Upload existing application': 'Upload existing application',
'upload file:': 'upload file:',
'upload plugin file:': 'upload plugin file:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'Use an url:': 'Use an url:',
'User': 'User',
'Username': 'Username',
'Users': 'Users',
'Using the shell may lock the database to other users of this app.': 'Using the shell may lock the database to other users of this app.',
'variables': 'variables',
'Version': 'Version',
'Versioning': 'Versioning',
'versioning': 'versioning',
'view': 'view',
'Views': 'Views',
'views': 'views',
'Warning!': 'Warning!',
'WARNING:': 'WARNING:',
'WARNING: The following views could not be compiled:': 'WARNING: The following views could not be compiled:',
'Web Framework': 'Web Framework',
'web2py Admin Password': 'web2py Admin Password',
'web2py apps to deploy': 'web2py apps to deploy',
'web2py Debugger': 'web2py Debugger',
'web2py downgrade': 'web2py downgrade',
'web2py is up to date': 'web2py is up to date',
'web2py online debugger': 'web2py online debugger',
'web2py Recent Tweets': 'web2py Recent Tweets',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
'Welcome to web2py': 'Добре дошъл в web2py',
'Working...': 'Working...',
'WSGI reference name': 'WSGI reference name',
'YES': 'YES',
'Yes': 'Yes',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button',
'You can inspect variables using the console below': 'You can inspect variables using the console below',
'You have one more login attempt before you are locked out': 'You have one more login attempt before you are locked out',
'You need to set up and reach a': 'You need to set up and reach a',
'You only need these if you have already registered': 'You only need these if you have already registered',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Your application will be blocked until you click an action button (next, step, continue, etc.)',
}<|fim▁end|>
|
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: yuchou<|fim▁hole|><|fim▁end|>
|
@time: 2017/8/7 10:28
"""
|
<|file_name|>hdr_intrusive_msqueue_dhp.cpp<|end_file_name|><|fim▁begin|>/*
This file is a part of libcds - Concurrent Data Structures library
Version: 2.0.0
(C) Copyright Maxim Khizhinsky ([email protected]) 2006-2014
Distributed under the BSD license (see accompanying file license.txt)
Source code repo: http://github.com/khizmax/libcds/
Download: http://sourceforge.net/projects/libcds/files/
*/
#include "hdr_intrusive_msqueue.h"
#include <cds/intrusive/msqueue.h>
#include <cds/gc/dhp.h>
namespace queue {
#define TEST(X) void IntrusiveQueueHeaderTest::test_##X() { test<X>(); }
namespace {
typedef IntrusiveQueueHeaderTest::base_hook_item< ci::msqueue::node<cds::gc::DHP > > base_item_type;
typedef IntrusiveQueueHeaderTest::member_hook_item< ci::msqueue::node<cds::gc::DHP > > member_item_type;
// DHP base hook
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_base;
// DHP member hook
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_member;
/// DHP base hook + item counter
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, co::item_counter< cds::atomicity::item_counter >
, co::memory_model< co::v::relaxed_ordering >
>::type
> MSQueue_DHP_base_ic;
// DHP member hook + item counter
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::item_counter< cds::atomicity::item_counter >
>::type
> MSQueue_DHP_member_ic;
// DHP base hook + stat
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::stat< ci::msqueue::stat<> >
>::type
> MSQueue_DHP_base_stat;
// DHP member hook + stat
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::stat< ci::msqueue::stat<> >
>::type
> MSQueue_DHP_member_stat;
// DHP base hook + alignment
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, co::alignment< 32 >
>::type
> MSQueue_DHP_base_align;
// DHP member hook + alignment
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, co::alignment< 32 >
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_member_align;
// DHP base hook + no alignment
struct traits_MSQueue_DHP_base_noalign : public ci::msqueue::traits {
typedef ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> > hook;
typedef IntrusiveQueueHeaderTest::faked_disposer disposer;
enum { alignment = co::no_special_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, base_item_type, traits_MSQueue_DHP_base_noalign > MSQueue_DHP_base_noalign;
// DHP member hook + no alignment
struct traits_MSQueue_DHP_member_noalign : public ci::msqueue::traits {
typedef ci::msqueue::member_hook <
offsetof( member_item_type, hMember ),
ci::opt::gc < cds::gc::DHP >
> hook;
typedef IntrusiveQueueHeaderTest::faked_disposer disposer;
enum { alignment = co::no_special_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, member_item_type, traits_MSQueue_DHP_member_noalign > MSQueue_DHP_member_noalign;
// DHP base hook + cache alignment
struct traits_MSQueue_DHP_base_cachealign : public traits_MSQueue_DHP_base_noalign
{
enum { alignment = co::cache_line_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, base_item_type, traits_MSQueue_DHP_base_cachealign > MSQueue_DHP_base_cachealign;
// DHP member hook + cache alignment
struct traits_MSQueue_DHP_member_cachealign : public traits_MSQueue_DHP_member_noalign
{
enum { alignment = co::cache_line_alignment };
};<|fim▁hole|> } // namespace
TEST(MSQueue_DHP_base)
TEST(MSQueue_DHP_member)
TEST(MSQueue_DHP_base_ic)
TEST(MSQueue_DHP_member_ic)
TEST(MSQueue_DHP_base_stat)
TEST(MSQueue_DHP_member_stat)
TEST(MSQueue_DHP_base_align)
TEST(MSQueue_DHP_member_align)
TEST(MSQueue_DHP_base_noalign)
TEST(MSQueue_DHP_member_noalign)
TEST(MSQueue_DHP_base_cachealign)
TEST(MSQueue_DHP_member_cachealign)
} // namespace queue<|fim▁end|>
|
typedef ci::MSQueue< cds::gc::DHP, member_item_type, traits_MSQueue_DHP_member_cachealign > MSQueue_DHP_member_cachealign;
|
<|file_name|>build.index.prod.ts<|end_file_name|><|fim▁begin|>import * as gulp from 'gulp';
import * as gulpLoadPlugins from 'gulp-load-plugins';
import { join, sep, normalize } from 'path';
import * as slash from 'slash';
import Config from '../../config';
import { TemplateLocalsBuilder } from '../../utils';
const plugins = <any>gulpLoadPlugins();
/**
* Executes the build process, injecting the JavaScript and CSS dependencies into the `index.html` for the production
* environment.
*/
export = () => {
return gulp.src(join(Config.APP_SRC, 'index.html'))
.pipe(injectJs())
.pipe(injectCss())
.pipe(plugins.template(new TemplateLocalsBuilder().withoutStringifiedEnvConfig().build()))
.pipe(gulp.dest(Config.APP_DEST));
};
/**
* Injects the given file array and transforms the path of the files.
* @param {Array<string>} files - The files to be injected.
*/
function inject(...files: Array<string>) {<|fim▁hole|> });
}
/**
* Injects the bundled JavaScript shims and application bundles for the production environment.
*/
function injectJs() {
return inject(join(Config.JS_DEST, Config.JS_PROD_SHIMS_BUNDLE), join(Config.JS_DEST, Config.JS_PROD_APP_BUNDLE));
}
/**
* Injects the bundled CSS files for the production environment.
*/
function injectCss() {
return inject(join(Config.CSS_DEST, `${Config.CSS_BUNDLE_NAME}.css`));
}
/**
* Transform the path of a dependency to its location within the `dist` directory according to the applications
* environment.
*/
function transformPath() {
return function(filepath: string) {
let path: Array<string> = normalize(filepath).split(sep);
let slice_after = path.indexOf(Config.APP_DEST);
if (slice_after > -1) {
slice_after++;
} else {
slice_after = 3;
}
arguments[0] = Config.APP_BASE + path.slice(slice_after, path.length).join(sep) + `?${Date.now()}`;
return slash(plugins.inject.transform.apply(plugins.inject.transform, arguments));
};
}<|fim▁end|>
|
return plugins.inject(gulp.src(files, { read: false }), {
files,
transform: transformPath()
|
<|file_name|>htmlscriptelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::ascii::AsciiExt;
use dom::attr::Attr;
use dom::attr::AttrHelpers;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::HTMLScriptElementBinding;
use dom::bindings::codegen::Bindings::HTMLScriptElementBinding::HTMLScriptElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::InheritTypes::{HTMLScriptElementDerived, HTMLScriptElementCast};
use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast, NodeCast};
use dom::bindings::js::{JSRef, Temporary, OptionalRootable};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::{HTMLScriptElementTypeId, Element, AttributeHandlers};
use dom::element::{ElementCreator, ParserCreated};
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, NodeHelpers, ElementNodeTypeId, window_from_node, CloneChildrenFlag};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use encoding::all::UTF_8;
use encoding::types::{Encoding, DecodeReplace};
use servo_net::resource_task::load_whole_resource;
use servo_util::str::{DOMString, HTML_SPACE_CHARACTERS, StaticStringVec};
use std::cell::Cell;
use url::UrlParser;
#[dom_struct]
pub struct HTMLScriptElement {
htmlelement: HTMLElement,
/// https://html.spec.whatwg.org/multipage/scripting.html#already-started
already_started: Cell<bool>,
/// https://html.spec.whatwg.org/multipage/scripting.html#parser-inserted
parser_inserted: Cell<bool>,
/// https://html.spec.whatwg.org/multipage/scripting.html#non-blocking
///
/// (currently unused)
non_blocking: Cell<bool>,
/// https://html.spec.whatwg.org/multipage/scripting.html#ready-to-be-parser-executed
///
/// (currently unused)
ready_to_be_parser_executed: Cell<bool>,
}
impl HTMLScriptElementDerived for EventTarget {
fn is_htmlscriptelement(&self) -> bool {
*self.type_id() == NodeTargetTypeId(ElementNodeTypeId(HTMLScriptElementTypeId))
}
}
impl HTMLScriptElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>,
creator: ElementCreator) -> HTMLScriptElement {
HTMLScriptElement {
htmlelement: HTMLElement::new_inherited(HTMLScriptElementTypeId, localName, prefix, document),
already_started: Cell::new(false),
parser_inserted: Cell::new(creator == ParserCreated),
non_blocking: Cell::new(creator != ParserCreated),
ready_to_be_parser_executed: Cell::new(false),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>,
creator: ElementCreator) -> Temporary<HTMLScriptElement> {
let element = HTMLScriptElement::new_inherited(localName, prefix, document, creator);
Node::reflect_node(box element, document, HTMLScriptElementBinding::Wrap)
}
}
pub trait HTMLScriptElementHelpers {
/// Prepare a script (<http://www.whatwg.org/html/#prepare-a-script>)
fn prepare(self);
/// Prepare a script, steps 6 and 7.
fn is_javascript(self) -> bool;
/// Set the "already started" flag (<https://whatwg.org/html/#already-started>)
fn mark_already_started(self);
}
/// Supported script types as defined by
/// <http://whatwg.org/html/#support-the-scripting-language>.
static SCRIPT_JS_MIMES: StaticStringVec = &[
"application/ecmascript",
"application/javascript",
"application/x-ecmascript",
"application/x-javascript",
"text/ecmascript",
"text/javascript",
"text/javascript1.0",
"text/javascript1.1",
"text/javascript1.2",
"text/javascript1.3",
"text/javascript1.4",
"text/javascript1.5",
"text/jscript",
"text/livescript",
"text/x-ecmascript",
"text/x-javascript",
];
impl<'a> HTMLScriptElementHelpers for JSRef<'a, HTMLScriptElement> {
fn prepare(self) {
// https://html.spec.whatwg.org/multipage/scripting.html#prepare-a-script
// Step 1.
if self.already_started.get() {
return;
}
// Step 2.
let was_parser_inserted = self.parser_inserted.get();
self.parser_inserted.set(false);
// Step 3.
let element: JSRef<Element> = ElementCast::from_ref(self);
if was_parser_inserted && element.has_attribute(&atom!("async")) {
self.non_blocking.set(true);
}
// Step 4.
let text = self.Text();
if text.len() == 0 && !element.has_attribute(&atom!("src")) {
return;
}
// Step 5.
let node: JSRef<Node> = NodeCast::from_ref(self);
if !node.is_in_doc() {
return;
}
// Step 6, 7.
if !self.is_javascript() {
return;
}
// Step 8.
if was_parser_inserted {
self.parser_inserted.set(true);
self.non_blocking.set(false);
}
// Step 9.
self.already_started.set(true);
// Step 10.
// TODO: If the element is flagged as "parser-inserted", but the element's node document is
// not the Document of the parser that created the element, then abort these steps.
// Step 11.
// TODO: If scripting is disabled for the script element, then the user agent must abort
// these steps at this point. The script is not executed.
// Step 12.
// TODO: If the script element has an `event` attribute and a `for` attribute, then run
// these substeps...
// Step 13.
// TODO: If the script element has a `charset` attribute, then let the script block's
// character encoding for this script element be the result of getting an encoding from the
// value of the `charset` attribute.
// Step 14 and 15.
// TODO: Add support for the `defer` and `async` attributes. (For now, we fetch all
// scripts synchronously and execute them immediately.)
let window = window_from_node(self).root();
let page = window.page();
let base_url = page.get_url();
let (source, url) = match element.get_attribute(ns!(""), &atom!("src")).root() {
Some(src) => {
if src.deref().Value().is_empty() {
// TODO: queue a task to fire a simple event named `error` at the element
return;
}
match UrlParser::new().base_url(&base_url).parse(src.deref().Value().as_slice()) {
Ok(url) => {
// TODO: Do a potentially CORS-enabled fetch with the mode being the current
// state of the element's `crossorigin` content attribute, the origin being
// the origin of the script element's node document, and the default origin
// behaviour set to taint.
match load_whole_resource(&page.resource_task, url) {
Ok((metadata, bytes)) => {
// TODO: use the charset from step 13.
let source = UTF_8.decode(bytes.as_slice(), DecodeReplace).unwrap();
(source, metadata.final_url)
}
Err(_) => {
error!("error loading script {}", src.deref().Value());
return;
}
}
}
Err(_) => {
// TODO: queue a task to fire a simple event named `error` at the element
error!("error parsing URL for script {}", src.deref().Value());
return;
}
}
}
None => (text, base_url)
};
window.evaluate_script_with_result(source.as_slice(), url.serialize().as_slice());
}
fn is_javascript(self) -> bool {
let element: JSRef<Element> = ElementCast::from_ref(self);
match element.get_attribute(ns!(""), &atom!("type")).root().map(|s| s.Value()) {
Some(ref s) if s.is_empty() => {
// type attr exists, but empty means js
debug!("script type empty, inferring js");
true
},
Some(ref s) => {
debug!("script type={:s}", *s);
SCRIPT_JS_MIMES.contains(&s.to_ascii_lower().as_slice().trim_chars(HTML_SPACE_CHARACTERS))
},
None => {
debug!("no script type");
match element.get_attribute(ns!(""), &atom!("language"))
.root()
.map(|s| s.Value()) {
Some(ref s) if s.is_empty() => {
debug!("script language empty, inferring js");
true
},
Some(ref s) => {
debug!("script language={:s}", *s);
SCRIPT_JS_MIMES.contains(&format!("text/{}", s).to_ascii_lower().as_slice())
},
None => {
debug!("no script type or language, inferring js");
true
}
}
}
}
}
fn mark_already_started(self) {
self.already_started.set(true);
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLScriptElement> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
match self.super_type() {
Some(ref s) => s.after_set_attr(attr),<|fim▁hole|> }
let node: JSRef<Node> = NodeCast::from_ref(*self);
if attr.local_name() == &atom!("src") && !self.parser_inserted.get() && node.is_in_doc() {
self.prepare();
}
}
fn child_inserted(&self, child: JSRef<Node>) {
match self.super_type() {
Some(ref s) => s.child_inserted(child),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
if !self.parser_inserted.get() && node.is_in_doc() {
self.prepare();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
match self.super_type() {
Some(ref s) => s.bind_to_tree(tree_in_doc),
_ => ()
}
if tree_in_doc && !self.parser_inserted.get() {
self.prepare();
}
}
fn cloning_steps(&self, copy: JSRef<Node>, maybe_doc: Option<JSRef<Document>>,
clone_children: CloneChildrenFlag) {
match self.super_type() {
Some(ref s) => s.cloning_steps(copy, maybe_doc, clone_children),
_ => (),
}
// https://whatwg.org/html/#already-started
if self.already_started.get() {
let copy_elem: JSRef<HTMLScriptElement> = HTMLScriptElementCast::to_ref(copy).unwrap();
copy_elem.mark_already_started();
}
}
}
impl<'a> HTMLScriptElementMethods for JSRef<'a, HTMLScriptElement> {
fn Src(self) -> DOMString {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.get_url_attribute(&atom!("src"))
}
// http://www.whatwg.org/html/#dom-script-text
fn Text(self) -> DOMString {
let node: JSRef<Node> = NodeCast::from_ref(self);
Node::collect_text_contents(node.children())
}
// http://www.whatwg.org/html/#dom-script-text
fn SetText(self, value: DOMString) {
let node: JSRef<Node> = NodeCast::from_ref(self);
node.SetTextContent(Some(value))
}
}
impl Reflectable for HTMLScriptElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}<|fim▁end|>
|
_ => (),
|
<|file_name|>BenchmarkTest00544.java<|end_file_name|><|fim▁begin|>/**
* OWASP Benchmark Project v1.2
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://owasp.org/www-project-benchmark/">https://owasp.org/www-project-benchmark/</a>.
*
* The OWASP Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The OWASP Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* @author Nick Sanidas
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet(value="/xss-01/BenchmarkTest00544")
public class BenchmarkTest00544 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {<|fim▁hole|> String param = "";
boolean flag = true;
java.util.Enumeration<String> names = request.getParameterNames();
while (names.hasMoreElements() && flag) {
String name = (String) names.nextElement();
String[] values = request.getParameterValues(name);
if (values != null) {
for(int i=0;i<values.length && flag; i++){
String value = values[i];
if (value.equals("BenchmarkTest00544")) {
param = name;
flag = false;
}
}
}
}
String bar = "alsosafe";
if (param != null) {
java.util.List<String> valuesList = new java.util.ArrayList<String>( );
valuesList.add("safe");
valuesList.add( param );
valuesList.add( "moresafe" );
valuesList.remove(0); // remove the 1st safe value
bar = valuesList.get(1); // get the last 'safe' value
}
response.setHeader("X-XSS-Protection", "0");
response.getWriter().print(bar.toCharArray());
}
}<|fim▁end|>
|
response.setContentType("text/html;charset=UTF-8");
|
<|file_name|>revlog.py<|end_file_name|><|fim▁begin|># revlog.py - storage back-end for mercurial
#
# Copyright 2005-2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Storage back-end for Mercurial.
This provides efficient delta storage with O(1) retrieve and append
and O(changes) merge between branches.
"""
# import stuff from node for others to import from revlog
from node import bin, hex, nullid, nullrev
from i18n import _
import ancestor, mdiff, parsers, error, util, dagutil
import struct, zlib, errno
_pack = struct.pack
_unpack = struct.unpack
_compress = zlib.compress
_decompress = zlib.decompress
_sha = util.sha1
# revlog header flags
REVLOGV0 = 0
REVLOGNG = 1
REVLOGNGINLINEDATA = (1 << 16)
REVLOGGENERALDELTA = (1 << 17)
REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
REVLOG_DEFAULT_FORMAT = REVLOGNG
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
# revlog index flags
REVIDX_KNOWN_FLAGS = 0
# max size of revlog with inline data
_maxinline = 131072
_chunksize = 1048576
RevlogError = error.RevlogError
LookupError = error.LookupError
def getoffset(q):
return int(q >> 16)
def gettype(q):
return int(q & 0xFFFF)
def offset_type(offset, type):
return long(long(offset) << 16 | type)
nullhash = _sha(nullid)
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
This hash combines both the current file contents and its history
in a manner that makes it easy to distinguish nodes with the same
content in the revision graph.
"""
# As of now, if one of the parent node is null, p2 is null
if p2 == nullid:
# deep copy of a hash is faster than creating one
s = nullhash.copy()
s.update(p1)
else:
# none of the parent nodes are nullid
l = [p1, p2]
l.sort()
s = _sha(l[0])
s.update(l[1])
s.update(text)
return s.digest()
def compress(text):
""" generate a possibly-compressed representation of text """
if not text:
return ("", text)
l = len(text)
bin = None
if l < 44:
pass
elif l > 1000000:
# zlib makes an internal copy, thus doubling memory usage for
# large files, so lets do this in pieces
z = zlib.compressobj()
p = []
pos = 0
while pos < l:
pos2 = pos + 2**20
p.append(z.compress(text[pos:pos2]))
pos = pos2
p.append(z.flush())
if sum(map(len, p)) < l:
bin = "".join(p)
else:
bin = _compress(text)
if bin is None or len(bin) > l:
if text[0] == '\0':
return ("", text)
return ('u', text)
return ("", bin)
def decompress(bin):
""" decompress the given input """
if not bin:
return bin
t = bin[0]
if t == '\0':
return bin
if t == 'x':
return _decompress(bin)
if t == 'u':
return bin[1:]
raise RevlogError(_("unknown compression type %r") % t)
indexformatv0 = ">4l20s20s20s"
v0shaoffset = 56
class revlogoldio(object):
def __init__(self):
self.size = struct.calcsize(indexformatv0)
def parseindex(self, data, inline):
s = self.size
index = []
nodemap = {nullid: nullrev}
n = off = 0
l = len(data)
while off + s <= l:
cur = data[off:off + s]
off += s
e = _unpack(indexformatv0, cur)
# transform to revlogv1 format
e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
index.append(e2)
nodemap[e[6]] = n
n += 1
# add the magic null revision at -1
index.append((0, 0, 0, -1, -1, -1, -1, nullid))
return index, nodemap, None
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
raise RevlogError(_("index entry flags need RevlogNG"))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
return _pack(indexformatv0, *e2)
# index ng:
# 6 bytes: offset
# 2 bytes: flags
# 4 bytes: compressed length
# 4 bytes: uncompressed length
# 4 bytes: base rev
# 4 bytes: link rev
# 4 bytes: parent 1 rev
# 4 bytes: parent 2 rev
# 32 bytes: nodeid
indexformatng = ">Qiiiiii20s12x"
ngshaoffset = 32
versionformat = ">I"
class revlogio(object):
def __init__(self):
self.size = struct.calcsize(indexformatng)
def parseindex(self, data, inline):
# call the C implementation to parse the index data
index, cache = parsers.parse_index2(data, inline)
return index, getattr(index, 'nodemap', None), cache
def packentry(self, entry, node, version, rev):
p = _pack(indexformatng, *entry)
if rev == 0:
p = _pack(versionformat, version) + p[4:]
return p
class revlog(object):
"""
the underlying revision storage object
A revlog consists of two parts, an index and the revision data.
The index is a file with a fixed record size containing
information on each revision, including its nodeid (hash), the
nodeids of its parents, the position and offset of its data within
the data file, and the revision it's based on. Finally, each entry
contains a linkrev entry that can serve as a pointer to external
data.
The revision data itself is a linear collection of data chunks.
Each chunk represents a revision and is usually represented as a
delta against the previous chunk. To bound lookup time, runs of
deltas are limited to about 2 times the length of the original
version data. This makes retrieval of a version proportional to
its size, or O(1) relative to the number of revisions.
Both pieces of the revlog are written to in an append-only
fashion, which means we never need to rewrite a file to insert or
remove data, and can use some simple techniques to avoid the need
for locking while reading.
"""
def __init__(self, opener, indexfile):
"""
create a revlog object
opener is a function that abstracts the file opening operation
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
self.datafile = indexfile[:-2] + ".d"
self.opener = opener
self._cache = None
self._basecache = (0, 0)
self._chunkcache = (0, '')
self.index = []
self._pcache = {}
self._nodecache = {nullid: nullrev}
self._nodepos = None
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
if 'revlogv1' in opts:
if 'generaldelta' in opts:
v |= REVLOGGENERALDELTA
else:
v = 0
i = ''
self._initempty = True
try:
f = self.opener(self.indexfile)
i = f.read()
f.close()
if len(i) > 0:
v = struct.unpack(versionformat, i[:4])[0]
self._initempty = False
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.version = v
self._inline = v & REVLOGNGINLINEDATA
self._generaldelta = v & REVLOGGENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
if fmt == REVLOGV0 and flags:
raise RevlogError(_("index %s unknown flags %#04x for format v0")
% (self.indexfile, flags >> 16))
elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
raise RevlogError(_("index %s unknown flags %#04x for revlogng")
% (self.indexfile, flags >> 16))
elif fmt > REVLOGNG:
raise RevlogError(_("index %s unknown format %d")
% (self.indexfile, fmt))
self._io = revlogio()
if self.version == REVLOGV0:
self._io = revlogoldio()
try:
d = self._io.parseindex(i, self._inline)
except (ValueError, IndexError):
raise RevlogError(_("index %s is corrupted") % (self.indexfile))
self.index, nodemap, self._chunkcache = d
if nodemap is not None:
self.nodemap = self._nodecache = nodemap
if not self._chunkcache:
self._chunkclear()
def tip(self):
return self.node(len(self.index) - 2)
def __len__(self):
return len(self.index) - 1
def __iter__(self):
for i in xrange(len(self)):
yield i
@util.propertycache
def nodemap(self):
self.rev(self.node(0))
return self._nodecache
def hasnode(self, node):
try:
self.rev(node)
return True
except KeyError:
return False
def clearcaches(self):
try:
self._nodecache.clearcaches()
except AttributeError:
self._nodecache = {nullid: nullrev}
self._nodepos = None
def rev(self, node):
try:
return self._nodecache[node]
except RevlogError:
# parsers.c radix tree lookup failed
raise LookupError(node, self.indexfile, _('no node'))
except KeyError:
# pure python cache lookup failed
n = self._nodecache
i = self.index
p = self._nodepos
if p is None:
p = len(i) - 2
for r in xrange(p, -1, -1):
v = i[r][7]
n[v] = r
if v == node:
self._nodepos = r - 1
return r
raise LookupError(node, self.indexfile, _('no node'))
def node(self, rev):
return self.index[rev][7]
def linkrev(self, rev):
return self.index[rev][4]
def parents(self, node):
i = self.index
d = i[self.rev(node)]
return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
def parentrevs(self, rev):
return self.index[rev][5:7]
def start(self, rev):
return int(self.index[rev][0] >> 16)
def end(self, rev):
return self.start(rev) + self.length(rev)
def length(self, rev):
return self.index[rev][1]
def chainbase(self, rev):
index = self.index
base = index[rev][3]
while base != rev:
rev = base
base = index[rev][3]
return base
def flags(self, rev):
return self.index[rev][0] & 0xFFFF
def rawsize(self, rev):
"""return the length of the uncompressed text for a given revision"""
l = self.index[rev][2]
if l >= 0:
return l
t = self.revision(self.node(rev))
return len(t)
size = rawsize
def reachable(self, node, stop=None):
"""return the set of all nodes ancestral to a given node, including
the node itself, stopping when stop is matched"""
reachable = set((node,))
visit = [node]
if stop:
stopn = self.rev(stop)
else:
stopn = 0
while visit:
n = visit.pop(0)
if n == stop:
continue
if n == nullid:
continue
for p in self.parents(n):
if self.rev(p) < stopn:
continue
if p not in reachable:
reachable.add(p)
visit.append(p)
return reachable
def ancestors(self, *revs):
"""Generate the ancestors of 'revs' in reverse topological order.
Yield a sequence of revision numbers starting with the parents
of each revision in revs, i.e., each revision is *not* considered
an ancestor of itself. Results are in breadth-first order:
parents of each rev in revs, then parents of those, etc. Result
does not include the null revision."""
visit = list(revs)
seen = set([nullrev])
while visit:
for parent in self.parentrevs(visit.pop(0)):
if parent not in seen:
visit.append(parent)
seen.add(parent)
yield parent
def descendants(self, *revs):
"""Generate the descendants of 'revs' in revision order.
Yield a sequence of revision numbers starting with a child of
some rev in revs, i.e., each revision is *not* considered a
descendant of itself. Results are ordered by revision number (a
topological sort)."""
first = min(revs)
if first == nullrev:
for i in self:
yield i
return
seen = set(revs)
for i in xrange(first + 1, len(self)):
for x in self.parentrevs(i):
if x != nullrev and x in seen:
seen.add(i)
yield i
break
def findcommonmissing(self, common=None, heads=None):
"""Return a tuple of the ancestors of common and the ancestors of heads
that are not ancestors of common. In revset terminology, we return the
tuple:
::common, (::heads) - (::common)
The list is sorted by revision number, meaning it is
topologically sorted.
'heads' and 'common' are both lists of node IDs. If heads is
not supplied, uses all of the revlog's heads. If common is not
supplied, uses nullid."""
if common is None:
common = [nullid]
if heads is None:
heads = self.heads()
common = [self.rev(n) for n in common]
heads = [self.rev(n) for n in heads]
# we want the ancestors, but inclusive
has = set(self.ancestors(*common))
has.add(nullrev)
has.update(common)
# take all ancestors from heads that aren't in has
missing = set()
visit = [r for r in heads if r not in has]
while visit:
r = visit.pop(0)
if r in missing:
continue
else:
missing.add(r)
for p in self.parentrevs(r):
if p not in has:
visit.append(p)
missing = list(missing)
missing.sort()
return has, [self.node(r) for r in missing]
def findmissing(self, common=None, heads=None):
"""Return the ancestors of heads that are not ancestors of common.
More specifically, return a list of nodes N such that every N
satisfies the following constraints:
1. N is an ancestor of some node in 'heads'
2. N is not an ancestor of any node in 'common'
The list is sorted by revision number, meaning it is
topologically sorted.
'heads' and 'common' are both lists of node IDs. If heads is
not supplied, uses all of the revlog's heads. If common is not
supplied, uses nullid."""
_common, missing = self.findcommonmissing(common, heads)
return missing
def nodesbetween(self, roots=None, heads=None):
"""Return a topological path from 'roots' to 'heads'.
Return a tuple (nodes, outroots, outheads) where 'nodes' is a
topologically sorted list of all nodes N that satisfy both of
these constraints:
1. N is a descendant of some node in 'roots'
2. N is an ancestor of some node in 'heads'
Every node is considered to be both a descendant and an ancestor
of itself, so every reachable node in 'roots' and 'heads' will be
included in 'nodes'.
'outroots' is the list of reachable nodes in 'roots', i.e., the
subset of 'roots' that is returned in 'nodes'. Likewise,
'outheads' is the subset of 'heads' that is also in 'nodes'.
'roots' and 'heads' are both lists of node IDs. If 'roots' is
unspecified, uses nullid as the only root. If 'heads' is
unspecified, uses list of all of the revlog's heads."""
nonodes = ([], [], [])
if roots is not None:
roots = list(roots)
if not roots:
return nonodes
lowestrev = min([self.rev(n) for n in roots])
else:
roots = [nullid] # Everybody's a descendant of nullid
lowestrev = nullrev
if (lowestrev == nullrev) and (heads is None):
# We want _all_ the nodes!
return ([self.node(r) for r in self], [nullid], list(self.heads()))
if heads is None:
# All nodes are ancestors, so the latest ancestor is the last
# node.
highestrev = len(self) - 1
# Set ancestors to None to signal that every node is an ancestor.
ancestors = None
# Set heads to an empty dictionary for later discovery of heads
heads = {}
else:
heads = list(heads)
if not heads:
return nonodes
ancestors = set()
# Turn heads into a dictionary so we can remove 'fake' heads.
# Also, later we will be using it to filter out the heads we can't
# find from roots.
heads = dict.fromkeys(heads, False)
# Start at the top and keep marking parents until we're done.
nodestotag = set(heads)
# Remember where the top was so we can use it as a limit later.
highestrev = max([self.rev(n) for n in nodestotag])
while nodestotag:
# grab a node to tag
n = nodestotag.pop()
# Never tag nullid
if n == nullid:
continue
# A node's revision number represents its place in a
# topologically sorted list of nodes.
r = self.rev(n)
if r >= lowestrev:
if n not in ancestors:
# If we are possibly a descendant of one of the roots
# and we haven't already been marked as an ancestor
ancestors.add(n) # Mark as ancestor
# Add non-nullid parents to list of nodes to tag.
nodestotag.update([p for p in self.parents(n) if
p != nullid])
elif n in heads: # We've seen it before, is it a fake head?
# So it is, real heads should not be the ancestors of
# any other heads.
heads.pop(n)
if not ancestors:
return nonodes
# Now that we have our set of ancestors, we want to remove any
# roots that are not ancestors.
# If one of the roots was nullid, everything is included anyway.
if lowestrev > nullrev:
# But, since we weren't, let's recompute the lowest rev to not
# include roots that aren't ancestors.
# Filter out roots that aren't ancestors of heads
roots = [n for n in roots if n in ancestors]
# Recompute the lowest revision
if roots:
lowestrev = min([self.rev(n) for n in roots])
else:
# No more roots? Return empty list
return nonodes
else:
# We are descending from nullid, and don't need to care about
# any other roots.
lowestrev = nullrev
roots = [nullid]
# Transform our roots list into a set.
descendants = set(roots)
# Also, keep the original roots so we can filter out roots that aren't
# 'real' roots (i.e. are descended from other roots).
roots = descendants.copy()
# Our topologically sorted list of output nodes.
orderedout = []
# Don't start at nullid since we don't want nullid in our output list,
# and if nullid shows up in descedents, empty parents will look like
# they're descendants.
for r in xrange(max(lowestrev, 0), highestrev + 1):
n = self.node(r)
isdescendant = False
if lowestrev == nullrev: # Everybody is a descendant of nullid
isdescendant = True
elif n in descendants:
# n is already a descendant
isdescendant = True
# This check only needs to be done here because all the roots
# will start being marked is descendants before the loop.
if n in roots:
# If n was a root, check if it's a 'real' root.
p = tuple(self.parents(n))
# If any of its parents are descendants, it's not a root.
if (p[0] in descendants) or (p[1] in descendants):
roots.remove(n)
else:
p = tuple(self.parents(n))
# A node is a descendant if either of its parents are
# descendants. (We seeded the dependents list with the roots
# up there, remember?)
if (p[0] in descendants) or (p[1] in descendants):
descendants.add(n)
isdescendant = True
if isdescendant and ((ancestors is None) or (n in ancestors)):
# Only include nodes that are both descendants and ancestors.
orderedout.append(n)
if (ancestors is not None) and (n in heads):
# We're trying to figure out which heads are reachable
# from roots.
# Mark this head as having been reached
heads[n] = True
elif ancestors is None:
# Otherwise, we're trying to discover the heads.
# Assume this is a head because if it isn't, the next step
# will eventually remove it.
heads[n] = True
# But, obviously its parents aren't.
for p in self.parents(n):
heads.pop(p, None)
heads = [n for n, flag in heads.iteritems() if flag]
roots = list(roots)
assert orderedout
assert roots
assert heads
return (orderedout, roots, heads)
def headrevs(self):
count = len(self)
if not count:
return [nullrev]
ishead = [1] * (count + 1)
index = self.index
for r in xrange(count):
e = index[r]
ishead[e[5]] = ishead[e[6]] = 0
return [r for r in xrange(count) if ishead[r]]
def heads(self, start=None, stop=None):
"""return the list of all nodes that have no children
if start is specified, only heads that are descendants of
start will be returned
if stop is specified, it will consider all the revs from stop
as if they had no children
"""
if start is None and stop is None:
if not len(self):
return [nullid]
return [self.node(r) for r in self.headrevs()]
if start is None:
start = nullid
if stop is None:
stop = []
stoprevs = set([self.rev(n) for n in stop])
startrev = self.rev(start)
reachable = set((startrev,))
heads = set((startrev,))
parentrevs = self.parentrevs
for r in xrange(startrev + 1, len(self)):
for p in parentrevs(r):
if p in reachable:
if r not in stoprevs:
reachable.add(r)
heads.add(r)
if p in heads and p not in stoprevs:
heads.remove(p)
return [self.node(r) for r in heads]
def children(self, node):
"""find the children of a given node"""
c = []
p = self.rev(node)
for r in range(p + 1, len(self)):
prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
if prevs:
for pr in prevs:
if pr == p:
c.append(self.node(r))
elif p == nullrev:
c.append(self.node(r))
return c
def descendant(self, start, end):
if start == nullrev:
return True
for i in self.descendants(start):
if i == end:
return True
elif i > end:
break
return False
def ancestor(self, a, b):
"""calculate the least common ancestor of nodes a and b"""
# fast path, check if it is a descendant
a, b = self.rev(a), self.rev(b)
start, end = sorted((a, b))
if self.descendant(start, end):
return self.node(start)
def parents(rev):
return [p for p in self.parentrevs(rev) if p != nullrev]
c = ancestor.ancestor(a, b, parents)
if c is None:
return nullid
return self.node(c)
def _match(self, id):
if isinstance(id, (long, int)):
# rev
return self.node(id)
if len(id) == 20:
# possibly a binary node
# odds of a binary node being all hex in ASCII are 1 in 10**25
try:
node = id
self.rev(node) # quick search the index
return node
except LookupError:
pass # may be partial hex id
try:
# str(rev)
rev = int(id)
if str(rev) != id:
raise ValueError
if rev < 0:
rev = len(self) + rev
if rev < 0 or rev >= len(self):
raise ValueError
return self.node(rev)
except (ValueError, OverflowError):
pass
if len(id) == 40:
try:
# a full hex nodeid?
node = bin(id)
self.rev(node)
return node
except (TypeError, LookupError):
pass
def _partialmatch(self, id):
if id in self._pcache:
return self._pcache[id]
if len(id) < 40:
try:
# hex(node)[:...]
l = len(id) // 2 # grab an even number of digits
prefix = bin(id[:l * 2])
nl = [e[7] for e in self.index if e[7].startswith(prefix)]
nl = [n for n in nl if hex(n).startswith(id)]
if len(nl) > 0:
if len(nl) == 1:
self._pcache[id] = nl[0]
return nl[0]
raise LookupError(id, self.indexfile,
_('ambiguous identifier'))
return None
except TypeError:
pass
def lookup(self, id):
"""locate a node based on:
- revision number or str(revision number)
- nodeid or subset of hex nodeid
"""
n = self._match(id)
if n is not None:
return n
n = self._partialmatch(id)
if n:
return n
raise LookupError(id, self.indexfile, _('no match found'))
def cmp(self, node, text):
"""compare text with a given file revision
returns True if text is different than what is stored.
"""
p1, p2 = self.parents(node)
return hash(text, p1, p2) != node
def _addchunk(self, offset, data):
o, d = self._chunkcache
# try to add to existing cache
if o + len(d) == offset and len(d) + len(data) < _chunksize:
self._chunkcache = o, d + data
else:
self._chunkcache = offset, data
def _loadchunk(self, offset, length):
if self._inline:
df = self.opener(self.indexfile)<|fim▁hole|>
readahead = max(65536, length)
df.seek(offset)
d = df.read(readahead)
df.close()
self._addchunk(offset, d)
if readahead > length:
return util.buffer(d, 0, length)
return d
def _getchunk(self, offset, length):
o, d = self._chunkcache
l = len(d)
# is it in the cache?
cachestart = offset - o
cacheend = cachestart + length
if cachestart >= 0 and cacheend <= l:
if cachestart == 0 and cacheend == l:
return d # avoid a copy
return util.buffer(d, cachestart, cacheend - cachestart)
return self._loadchunk(offset, length)
def _chunkraw(self, startrev, endrev):
start = self.start(startrev)
length = self.end(endrev) - start
if self._inline:
start += (startrev + 1) * self._io.size
return self._getchunk(start, length)
def _chunk(self, rev):
return decompress(self._chunkraw(rev, rev))
def _chunkbase(self, rev):
return self._chunk(rev)
def _chunkclear(self):
self._chunkcache = (0, '')
def deltaparent(self, rev):
"""return deltaparent of the given revision"""
base = self.index[rev][3]
if base == rev:
return nullrev
elif self._generaldelta:
return base
else:
return rev - 1
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
if rev1 != nullrev and self.deltaparent(rev2) == rev1:
return str(self._chunk(rev2))
return mdiff.textdiff(self.revision(rev1),
self.revision(rev2))
def revision(self, nodeorrev):
"""return an uncompressed revision of a given node or revision
number.
"""
if isinstance(nodeorrev, int):
rev = nodeorrev
node = self.node(rev)
else:
node = nodeorrev
rev = None
cachedrev = None
if node == nullid:
return ""
if self._cache:
if self._cache[0] == node:
return self._cache[2]
cachedrev = self._cache[1]
# look up what we need to read
text = None
if rev is None:
rev = self.rev(node)
# check rev flags
if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
raise RevlogError(_('incompatible revision flag %x') %
(self.flags(rev) & ~REVIDX_KNOWN_FLAGS))
# build delta chain
chain = []
index = self.index # for performance
generaldelta = self._generaldelta
iterrev = rev
e = index[iterrev]
while iterrev != e[3] and iterrev != cachedrev:
chain.append(iterrev)
if generaldelta:
iterrev = e[3]
else:
iterrev -= 1
e = index[iterrev]
chain.reverse()
base = iterrev
if iterrev == cachedrev:
# cache hit
text = self._cache[2]
# drop cache to save memory
self._cache = None
self._chunkraw(base, rev)
if text is None:
text = str(self._chunkbase(base))
bins = [self._chunk(r) for r in chain]
text = mdiff.patches(text, bins)
text = self._checkhash(text, node, rev)
self._cache = (node, rev, text)
return text
def _checkhash(self, text, node, rev):
p1, p2 = self.parents(node)
if node != hash(text, p1, p2):
raise RevlogError(_("integrity check failed on %s:%d")
% (self.indexfile, rev))
return text
def checkinlinesize(self, tr, fp=None):
if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
return
trinfo = tr.find(self.indexfile)
if trinfo is None:
raise RevlogError(_("%s not found in the transaction")
% self.indexfile)
trindex = trinfo[2]
dataoff = self.start(trindex)
tr.add(self.datafile, dataoff)
if fp:
fp.flush()
fp.close()
df = self.opener(self.datafile, 'w')
try:
for r in self:
df.write(self._chunkraw(r, r))
finally:
df.close()
fp = self.opener(self.indexfile, 'w', atomictemp=True)
self.version &= ~(REVLOGNGINLINEDATA)
self._inline = False
for i in self:
e = self._io.packentry(self.index[i], self.node, self.version, i)
fp.write(e)
# if we don't call close, the temp file will never replace the
# real index
fp.close()
tr.replace(self.indexfile, trindex * self._io.size)
self._chunkclear()
def addrevision(self, text, transaction, link, p1, p2, cachedelta=None):
"""add a revision to the log
text - the revision data to add
transaction - the transaction object used for rollback
link - the linkrev data to add
p1, p2 - the parent nodeids of the revision
cachedelta - an optional precomputed delta
"""
node = hash(text, p1, p2)
if node in self.nodemap:
return node
dfh = None
if not self._inline:
dfh = self.opener(self.datafile, "a")
ifh = self.opener(self.indexfile, "a+")
try:
return self._addrevision(node, text, transaction, link, p1, p2,
cachedelta, ifh, dfh)
finally:
if dfh:
dfh.close()
ifh.close()
def _addrevision(self, node, text, transaction, link, p1, p2,
cachedelta, ifh, dfh):
"""internal function to add revisions to the log
see addrevision for argument descriptions.
invariants:
- text is optional (can be None); if not set, cachedelta must be set.
if both are set, they must correspond to eachother.
"""
btext = [text]
def buildtext():
if btext[0] is not None:
return btext[0]
# flush any pending writes here so we can read it in revision
if dfh:
dfh.flush()
ifh.flush()
basetext = self.revision(self.node(cachedelta[0]))
btext[0] = mdiff.patch(basetext, cachedelta[1])
chk = hash(btext[0], p1, p2)
if chk != node:
raise RevlogError(_("consistency error in delta"))
return btext[0]
def builddelta(rev):
# can we use the cached delta?
if cachedelta and cachedelta[0] == rev:
delta = cachedelta[1]
else:
t = buildtext()
ptext = self.revision(self.node(rev))
delta = mdiff.textdiff(ptext, t)
data = compress(delta)
l = len(data[1]) + len(data[0])
if basecache[0] == rev:
chainbase = basecache[1]
else:
chainbase = self.chainbase(rev)
dist = l + offset - self.start(chainbase)
if self._generaldelta:
base = rev
else:
base = chainbase
return dist, l, data, base, chainbase
curr = len(self)
prev = curr - 1
base = chainbase = curr
offset = self.end(prev)
flags = 0
d = None
basecache = self._basecache
p1r, p2r = self.rev(p1), self.rev(p2)
# should we try to build a delta?
if prev != nullrev:
if self._generaldelta:
if p1r >= basecache[1]:
d = builddelta(p1r)
elif p2r >= basecache[1]:
d = builddelta(p2r)
else:
d = builddelta(prev)
else:
d = builddelta(prev)
dist, l, data, base, chainbase = d
# full versions are inserted when the needed deltas
# become comparable to the uncompressed text
if text is None:
textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]),
cachedelta[1])
else:
textlen = len(text)
if d is None or dist > textlen * 2:
text = buildtext()
data = compress(text)
l = len(data[1]) + len(data[0])
base = chainbase = curr
e = (offset_type(offset, flags), l, textlen,
base, link, p1r, p2r, node)
self.index.insert(-1, e)
self.nodemap[node] = curr
entry = self._io.packentry(e, self.node, self.version, curr)
if not self._inline:
transaction.add(self.datafile, offset)
transaction.add(self.indexfile, curr * len(entry))
if data[0]:
dfh.write(data[0])
dfh.write(data[1])
dfh.flush()
ifh.write(entry)
else:
offset += curr * self._io.size
transaction.add(self.indexfile, offset, curr)
ifh.write(entry)
ifh.write(data[0])
ifh.write(data[1])
self.checkinlinesize(transaction, ifh)
if type(text) == str: # only accept immutable objects
self._cache = (node, curr, text)
self._basecache = (curr, chainbase)
return node
def group(self, nodelist, bundler, reorder=None):
"""Calculate a delta group, yielding a sequence of changegroup chunks
(strings).
Given a list of changeset revs, return a set of deltas and
metadata corresponding to nodes. The first delta is
first parent(nodelist[0]) -> nodelist[0], the receiver is
guaranteed to have this parent as it has all history before
these changesets. In the case firstparent is nullrev the
changegroup starts with a full revision.
"""
# if we don't have any revisions touched by these changesets, bail
if len(nodelist) == 0:
yield bundler.close()
return
# for generaldelta revlogs, we linearize the revs; this will both be
# much quicker and generate a much smaller bundle
if (self._generaldelta and reorder is not False) or reorder:
dag = dagutil.revlogdag(self)
revs = set(self.rev(n) for n in nodelist)
revs = dag.linearize(revs)
else:
revs = sorted([self.rev(n) for n in nodelist])
# add the parent of the first rev
p = self.parentrevs(revs[0])[0]
revs.insert(0, p)
# build deltas
for r in xrange(len(revs) - 1):
prev, curr = revs[r], revs[r + 1]
for c in bundler.revchunk(self, curr, prev):
yield c
yield bundler.close()
def addgroup(self, bundle, linkmapper, transaction):
"""
add a delta group
given a set of deltas, add them to the revision log. the
first delta is against its parent, which should be in our
log, the rest are against the previous delta.
"""
# track the base of the current delta log
content = []
node = None
r = len(self)
end = 0
if r:
end = self.end(r - 1)
ifh = self.opener(self.indexfile, "a+")
isize = r * self._io.size
if self._inline:
transaction.add(self.indexfile, end + isize, r)
dfh = None
else:
transaction.add(self.indexfile, isize, r)
transaction.add(self.datafile, end)
dfh = self.opener(self.datafile, "a")
try:
# loop through our set of deltas
chain = None
while True:
chunkdata = bundle.deltachunk(chain)
if not chunkdata:
break
node = chunkdata['node']
p1 = chunkdata['p1']
p2 = chunkdata['p2']
cs = chunkdata['cs']
deltabase = chunkdata['deltabase']
delta = chunkdata['delta']
content.append(node)
link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
chain = node
continue
for p in (p1, p2):
if not p in self.nodemap:
raise LookupError(p, self.indexfile,
_('unknown parent'))
if deltabase not in self.nodemap:
raise LookupError(deltabase, self.indexfile,
_('unknown delta base'))
baserev = self.rev(deltabase)
chain = self._addrevision(node, None, transaction, link,
p1, p2, (baserev, delta), ifh, dfh)
if not dfh and not self._inline:
# addrevision switched from inline to conventional
# reopen the index
ifh.close()
dfh = self.opener(self.datafile, "a")
ifh = self.opener(self.indexfile, "a")
finally:
if dfh:
dfh.close()
ifh.close()
return content
def strip(self, minlink, transaction):
"""truncate the revlog on the first revision with a linkrev >= minlink
This function is called when we're stripping revision minlink and
its descendants from the repository.
We have to remove all revisions with linkrev >= minlink, because
the equivalent changelog revisions will be renumbered after the
strip.
So we truncate the revlog on the first of these revisions, and
trust that the caller has saved the revisions that shouldn't be
removed and that it'll re-add them after this truncation.
"""
if len(self) == 0:
return
for rev in self:
if self.index[rev][4] >= minlink:
break
else:
return
# first truncate the files on disk
end = self.start(rev)
if not self._inline:
transaction.add(self.datafile, end)
end = rev * self._io.size
else:
end += rev * self._io.size
transaction.add(self.indexfile, end)
# then reset internal state in memory to forget those revisions
self._cache = None
self._chunkclear()
for x in xrange(rev, len(self)):
del self.nodemap[self.node(x)]
del self.index[rev:-1]
def checksize(self):
expected = 0
if len(self):
expected = max(0, self.end(len(self) - 1))
try:
f = self.opener(self.datafile)
f.seek(0, 2)
actual = f.tell()
f.close()
dd = actual - expected
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
dd = 0
try:
f = self.opener(self.indexfile)
f.seek(0, 2)
actual = f.tell()
f.close()
s = self._io.size
i = max(0, actual // s)
di = actual - (i * s)
if self._inline:
databytes = 0
for r in self:
databytes += max(0, self.length(r))
dd = 0
di = actual - len(self) * s - databytes
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
di = 0
return (dd, di)
def files(self):
res = [self.indexfile]
if not self._inline:
res.append(self.datafile)
return res<|fim▁end|>
|
else:
df = self.opener(self.datafile)
|
<|file_name|>oai_harvest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio Demosite.<|fim▁hole|># Invenio Demosite is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio Demosite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from datetime import datetime
from fixture import DataSet
class OaiREPOSITORYData(DataSet):
class OaiREPOSITORY_2:
f1 = u'reportnumber'
f2 = u'division'
f3 = u''
setRecList = None
setDefinition = u'c=;p1=CERN;f1=reportnumber;m1=a;p2=(EP|PPE);f2=division;m2=r;p3=;f3=;m3=;'
last_updated = datetime.now()
id = 2
setSpec = u'cern:experiment'
setDescription = u''
p3 = u''
p1 = u'CERN'
setName = u'CERN experimental papers'
setCollection = u''
p2 = u'(EP|PPE)'
m1 = u'a'
m3 = u''
m2 = u'r'
class OaiREPOSITORY_3:
f1 = u'reportnumber'
f2 = u'division'
f3 = u''
setRecList = None
setDefinition = u'c=;p1=CERN;f1=reportnumber;m1=a;p2=TH;f2=division;m2=e;p3=;f3=;m3=;'
last_updated = datetime.now()
id = 3
setSpec = u'cern:theory'
setDescription = u''
p3 = u''
p1 = u'CERN'
setName = u'CERN theoretical papers'
setCollection = u''
p2 = u'TH'
m1 = u'a'
m3 = u''
m2 = u'e'
__all__ = ('OaiREPOSITORYData', )<|fim▁end|>
|
# Copyright (C) 2013 CERN.
#
|
<|file_name|>hydro.conf.js<|end_file_name|><|fim▁begin|>/**
* Hydro configuration
*
* @param {Hydro} hydro
*/
module.exports = function(hydro) {
hydro.set({
suite: 'equals',
timeout: 500,
plugins: [
require('hydro-chai'),
require('hydro-bdd')
],
chai: {<|fim▁hole|> stack: true
}
})
}<|fim▁end|>
|
chai: require('chai'),
styles: ['should'],
|
<|file_name|>layout.rs<|end_file_name|><|fim▁begin|>use super::{FractionalHex,Hex};
use std::f32::consts::PI;<|fim▁hole|> f: Vec<f32>,
b: Vec<f32>,
start_angle: f32
}
impl Orientation {
pub fn new(f: Vec<f32>,b: Vec<f32>,start_angle: f32) -> Orientation {
Orientation {
f: f,
b: b,
start_angle: start_angle
}
}
pub fn pointy() -> Orientation {
Orientation::new(
vec![(3.0 as f32).sqrt(), (3.0 as f32).sqrt() / 2.0, 0.0, 3.0 / 2.0],
vec![(3.0 as f32).sqrt() / 3.0, -1.0 / 3.0, 0.0, 2.0 / 3.0],
0.5
)
}
pub fn flat() -> Orientation {
Orientation::new(vec![3.0 / 2.0, 0.0, (3.0 as f32).sqrt() / 2.0, (3.0 as f32).sqrt()],
vec![2.0 / 3.0, 0.0, -1.0 / 3.0, (3.0 as f32).sqrt() / 3.0],
0.0)
}
}
pub struct Point {
pub x: f32,
pub y: f32
}
impl Point {
pub fn new(x: f32,y: f32) -> Point {
Point {
x: x,
y: y
}
}
}
pub struct Layout {
orientation: Orientation,
size: Point,
origin: Point
}
impl Layout {
pub fn new(orientation: Orientation,size: Point,origin: Point) -> Layout {
Layout {
orientation: orientation,
size: size,
origin: origin
}
}
pub fn hex_to_pixel(&self,h: Hex) -> Point {
let orient = &self.orientation;
let x = (orient.f[0] * h.x as f32 + orient.f[1] * h.y as f32 ) * &self.size.x;
let y = (orient.f[2] * h.x as f32 + orient.f[3] * h.y as f32 ) * &self.size.y;
Point::new(x + &self.origin.x,y + &self.origin.y)
}
pub fn screen_to_hex(&self,p: Point) -> FractionalHex {
let orient = &self.orientation;
let pt = Point::new((p.x - &self.origin.x) / &self.size.x,(p.y - &self.size.y));
let x: f32 = orient.b[0] * pt.x as f32 + orient.b[1] * pt.y as f32 ;
let y: f32 = orient.b[2] * pt.x as f32 + orient.b[2] * pt.y as f32 ;
FractionalHex::new(x,y,-x - y)
}
pub fn hex_corner_offset(&self,corner: i32) -> Point{
let angle = 2.0 * PI * (&self.orientation.start_angle + corner as f32) / 6.0;
Point::new(&self.size.x * angle.cos(), &self.size.y * angle.sin())
}
pub fn polygon_corners(&self,h: Hex) -> Vec<Point> {
let mut corners: Vec<Point> = Vec::new();
let center = &self.hex_to_pixel(h);
for i in 1..6 {
let offset = &self.hex_corner_offset(i);
corners.push(Point::new(center.x + offset.x,center.y + offset.y))
}
corners
}
}<|fim▁end|>
|
pub struct Orientation {
|
<|file_name|>toolshell.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.<|fim▁hole|>//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
use gtk::{mod, ffi};
use gtk::cast::GTK_TOOLSHELL;
use gtk::{IconSize, Orientation, ReliefStyle, ToolbarStyle};
pub trait ToolShellTrait: gtk::WidgetTrait {
fn get_icon_size(&self) -> IconSize {
unsafe {
ffi::gtk_tool_shell_get_icon_size(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_orientation(&self) -> Orientation {
unsafe {
ffi::gtk_tool_shell_get_orientation(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_relief_style(&self) -> ReliefStyle {
unsafe {
ffi::gtk_tool_shell_get_relief_style(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_style(&self) -> ToolbarStyle {
unsafe {
ffi::gtk_tool_shell_get_style(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_text_alignment(&self) -> f32 {
unsafe {
ffi::gtk_tool_shell_get_text_alignment(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_text_orientation(&self) -> Orientation {
unsafe {
ffi::gtk_tool_shell_get_text_orientation(GTK_TOOLSHELL(self.get_widget()))
}
}
fn rebuild_menu(&mut self) -> () {
unsafe {
ffi::gtk_tool_shell_rebuild_menu(GTK_TOOLSHELL(self.get_widget()))
}
}
fn get_text_size_group(&self) -> Option<gtk::SizeGroup> {
let tmp_pointer = unsafe { ffi::gtk_tool_shell_get_text_size_group(GTK_TOOLSHELL(self.get_widget()) as *const ffi::C_GtkToolShell) };
if tmp_pointer.is_null() {
None
} else {
Some(gtk::SizeGroup::wrap_pointer(tmp_pointer))
}
}
}<|fim▁end|>
| |
<|file_name|>directive.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
function movieDetail(movieDetailService) {
return {
restrict: 'EA',
replace: true,
templateUrl: './src/app/movieDetail/template.html',
scope: {},
controllerAs: 'vm',<|fim▁hole|> var vm = this;
movieDetailService.getMovie().then(function(response){
vm.movie = response.data;
vm.movie.vote = (vm.movie.vote_average*10);
vm.movie.genres_name = [];
vm.movie.production_companies_name = [];
vm.movie.production_countries_name = [];
for (var i = 0; i <= vm.movie.genres.length-1; i++) {
vm.movie.genres_name.push(vm.movie.genres[i].name);
vm.movie.genres_name.sort();
}
for (var i = 0; i <= vm.movie.production_companies.length-1; i++) {
vm.movie.production_companies_name.push(vm.movie.production_companies[i].name);
vm.movie.production_companies_name.sort();
}
for (var i = 0; i <= vm.movie.production_countries.length-1; i++) {
vm.movie.production_countries_name.push(vm.movie.production_countries[i].name);
vm.movie.production_countries_name.sort();
}
});
},
link: function(scope, elm, attrs) {
}
};
}
angular.module('movieDetailDirective', ['services.movieDetail'])
.directive('movieDetail', movieDetail);
})();<|fim▁end|>
|
bindToController: true,
/*jshint unused:false*/
controller: function($log, $stateParams) {
|
<|file_name|>0021_auto_20170107_0813.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-07 08:13
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations
import djstripe.fields<|fim▁hole|>
dependencies = [
('djstripe', '0020_auto_20161229_0041'),
]
operations = [
migrations.AlterField(
model_name='subscription',
name='application_fee_percent',
field=djstripe.fields.StripePercentField(decimal_places=2, help_text="A positive decimal that represents the fee percentage of the subscription invoice amount that will be transferred to the application owner's Stripe account each billing period.", max_digits=5, null=True, validators=[django.core.validators.MinValueValidator(1.0), django.core.validators.MaxValueValidator(100.0)]),
),
]<|fim▁end|>
|
class Migration(migrations.Migration):
|
<|file_name|>e.rs<|end_file_name|><|fim▁begin|>// rustfmt-format_strings: true<|fim▁hole|>const foo: String =
"Suspendisse vel augue at felis tincidunt \
sollicitudin. Fusce arcu.
Duis et odio et leo
sollicitudin consequat. Aliquam \
lobortis. Phasellus condimentum.";<|fim▁end|>
|
// rustfmt-max_width: 50
// explicit line breaks should be kept in order to preserve the layout
|
<|file_name|>training.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras training and evaluation routines.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import numpy as np
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras import callbacks as cbks
from tensorflow.python.keras._impl.keras import losses
from tensorflow.python.keras._impl.keras import metrics as metrics_module
from tensorflow.python.keras._impl.keras import optimizers
from tensorflow.python.keras._impl.keras.engine.topology import Network
from tensorflow.python.keras._impl.keras.utils.data_utils import GeneratorEnqueuer
from tensorflow.python.keras._impl.keras.utils.data_utils import OrderedEnqueuer
from tensorflow.python.keras._impl.keras.utils.data_utils import Sequence
from tensorflow.python.keras._impl.keras.utils.generic_utils import Progbar
from tensorflow.python.platform import tf_logging as logging
def _standardize_input_data(data,
names,
shapes=None,
check_batch_axis=True,
exception_prefix=''):
"""Normalizes inputs and targets provided by users.
Users may pass data as a list of arrays, dictionary of arrays,
or as a single array. We normalize this to an ordered list of
arrays (same order as `names`), while checking that the provided
arrays have shapes that match the network's expectations.
Arguments:
data: User-provided input data (polymorphic).
names: List of expected array names.
shapes: Optional list of expected array shapes.
check_batch_axis: Boolean; whether to check that
the batch axis of the arrays matches the expected
value found in `shapes`.
exception_prefix: String prefix used for exception formatting.
Returns:
List of standardized input arrays (one array per model input).
Raises:
ValueError: in case of improperly formatted user-provided data.
"""
if not names:
if data is not None and hasattr(data, '__len__') and len(data):
raise ValueError('Error when checking model ' + exception_prefix + ': '
'expected no data, but got:', data)
return []
if data is None:
return [None for _ in range(len(names))]
if isinstance(data, dict):
for key, value in data.items():
if value.__class__.__name__ == 'DataFrame':
data[key] = value.values
arrays = []
for name in names:
if name not in data:
raise ValueError('No data provided for "' + name +
'". Need data for each key in: ' + str(names))
arrays.append(data[name])
elif isinstance(data, list):
for key, value in enumerate(data):
if value.__class__.__name__ == 'DataFrame':
data[key] = value.values
if len(data) != len(names):
if data and hasattr(data[0], 'shape'):
raise ValueError(
'Error when checking model ' + exception_prefix +
': the list of Numpy arrays '
'that you are passing to your model '
'is not the size the model expected. '
'Expected to see ' + str(len(names)) + ' array(s), but instead got '
'the following list of ' + str(len(data)) + ' arrays: ' +
str(data)[:200] + '...')
else:
if len(names) == 1:
data = [np.asarray(data)]
else:
raise ValueError('Error when checking model ' + exception_prefix +
': you are passing a list as '
'input to your model, '
'but the model expects '
'a list of ' + str(len(names)) +
' Numpy arrays instead. '
'The list you passed was: ' + str(data)[:200])
arrays = data
elif data.__class__.__name__ == 'DataFrame':
# test if data is a DataFrame, without pandas installed
arrays = data.values
else:
if not hasattr(data, 'shape'):
raise TypeError('Error when checking model ' + exception_prefix +
': data should be a Numpy array, '
'or list/dict of Numpy arrays. '
'Found: ' + str(data)[:200] + '...')
if len(names) > 1:
# Case: model expects multiple inputs but only received
# a single Numpy array.
raise ValueError('The model expects ' + str(len(names)) + ' ' +
exception_prefix +
' arrays, but only received one array. '
'Found: array with shape ' + str(data.shape))
arrays = [data]
# Make arrays at least 2D.
for i in range(len(names)):
array = arrays[i]
if len(array.shape) == 1:
array = np.expand_dims(array, 1)
arrays[i] = array
# Check shapes compatibility.
if shapes:
for i in range(len(names)):
if shapes[i] is None:
continue
array = arrays[i]
if len(array.shape) != len(shapes[i]):
raise ValueError(
'Error when checking ' + exception_prefix + ': expected ' + names[i]
+ ' to have ' + str(len(shapes[i])) +
' dimensions, but got array with shape ' + str(array.shape))
for j, (dim, ref_dim) in enumerate(zip(array.shape, shapes[i])):
if not j and not check_batch_axis:
# skip the first axis
continue
if ref_dim:
if ref_dim != dim:
raise ValueError('Error when checking ' + exception_prefix +
': expected ' + names[i] + ' to have shape ' +
str(shapes[i]) + ' but got array with shape ' +
str(array.shape))
return arrays
def _standardize_sample_or_class_weights(x_weight, output_names, weight_type):
"""Maps `sample_weight` or `class_weight` to model outputs.
Arguments:
x_weight: User-provided `sample_weight` or `class_weight` argument.
output_names: List of output names (strings) in the model.
weight_type: A string used purely for exception printing.
Returns:
A list of `sample_weight` or `class_weight` where there are exactly
one element per model output.
Raises:
ValueError: In case of invalid user-provided argument.
"""
if x_weight is None or len(x_weight) == 0: # pylint: disable=g-explicit-length-test
return [None for _ in output_names]
if len(output_names) == 1:
if isinstance(x_weight, list) and len(x_weight) == 1:
return x_weight
if isinstance(x_weight, dict) and output_names[0] in x_weight:
return [x_weight[output_names[0]]]
else:
return [x_weight]
if isinstance(x_weight, list):
if len(x_weight) != len(output_names):
raise ValueError('Provided `' + weight_type + '` was a list of ' +
str(len(x_weight)) + ' elements, but the model has ' +
str(len(output_names)) + ' outputs. '
'You should provide one `' + weight_type + '`'
'array per model output.')
return x_weight
if isinstance(x_weight, dict):
x_weights = []
for name in output_names:
x_weights.append(x_weight.get(name))
return x_weights
else:
raise TypeError('The model has multiple outputs, so `' + weight_type + '` '
'should be either a list of a dict. '
'Provided `' + weight_type + '` type not understood: ' +
str(x_weight))
def _standardize_class_weights(class_weight, output_names):
return _standardize_sample_or_class_weights(class_weight, output_names,
'class_weight')
def _standardize_sample_weights(sample_weight, output_names):
return _standardize_sample_or_class_weights(sample_weight, output_names,
'sample_weight')
def _check_array_lengths(inputs, targets, weights=None):
"""Does user input validation for numpy arrays.
Arguments:
inputs: list of Numpy arrays of inputs.
targets: list of Numpy arrays of targets.
weights: list of Numpy arrays of sample weights.
Raises:
ValueError: in case of incorrectly formatted data.
"""
def set_of_lengths(x):
# return a set with the variation between
# different shapes, with None => 0
if x is None:
return {0}
else:
return set([0 if y is None else y.shape[0] for y in x])
set_x = set_of_lengths(inputs)
set_y = set_of_lengths(targets)
set_w = set_of_lengths(weights)
if len(set_x) > 1:
raise ValueError('All input arrays (x) should have '
'the same number of samples. Got array shapes: ' + str(
[x.shape for x in inputs]))
if len(set_y) > 1:
raise ValueError('All target arrays (y) should have '
'the same number of samples. Got array shapes: ' + str(
[y.shape for y in targets]))
if set_x and set_y and list(set_x)[0] != list(set_y)[0]:
raise ValueError('Input arrays should have '
'the same number of samples as target arrays. '
'Found ' + str(list(set_x)[0]) + ' input samples '
'and ' + str(list(set_y)[0]) + ' target samples.')
if len(set_w) > 1:
raise ValueError('All sample_weight arrays should have '
'the same number of samples. Got array shapes: ' + str(
[w.shape for w in weights]))
if set_y and set_w and list(set_y)[0] != list(set_w)[0]:
raise ValueError('Sample_weight arrays should have '
'the same number of samples as target arrays. Got ' +
str(list(set_y)[0]) + ' input samples and ' +
str(list(set_w)[0]) + ' target samples.')
def _check_loss_and_target_compatibility(targets, loss_fns, output_shapes):
"""Does validation on the compatibility of targets and loss functions.
This helps prevent users from using loss functions incorrectly.
Arguments:
targets: list of Numpy arrays of targets.
loss_fns: list of loss functions.
output_shapes: list of shapes of model outputs.
Raises:
ValueError: if a loss function or target array
is incompatible with an output.
"""
key_losses = {
losses.mean_squared_error, losses.binary_crossentropy,
losses.categorical_crossentropy
}
for y, loss, shape in zip(targets, loss_fns, output_shapes):
if loss is None:
continue
if loss is losses.categorical_crossentropy:
if y.shape[-1] == 1:
raise ValueError('You are passing a target array of shape ' + str(
y.shape) + ' while using as loss `categorical_crossentropy`. '
'`categorical_crossentropy` expects '
'targets to be binary matrices (1s and 0s) '
'of shape (samples, classes). '
'If your targets are integer classes, '
'you can convert them to the expected format via:\n'
'```\n'
'from keras.utils import to_categorical\n'
'y_binary = to_categorical(y_int)\n'
'```\n'
'\n'
'Alternatively, you can use the loss function '
'`sparse_categorical_crossentropy` instead, '
'which does expect integer targets.')
if loss in key_losses:
for target_dim, out_dim in zip(y.shape[1:], shape[1:]):
if out_dim is not None and target_dim != out_dim:
raise ValueError('A target array with shape ' + str(y.shape) +
' was passed for an output of shape ' + str(shape) +
' while using as loss `' + loss.__name__ + '`. '
'This loss expects '
'targets to have the same shape '
'as the output.')
def _collect_metrics(metrics, output_names):
"""Maps metric functions to model outputs.
Arguments:
metrics: a list or dict of metric functions.
output_names: a list of the names (strings) of model outputs.
Returns:
A list (one entry per model output) of lists of metric functions.
For instance, if the model has 2 outputs, and for the first output
we want to compute "binary_accuracy" and "binary_crossentropy",
and just "binary_accuracy" for the second output,
the list would look like:
`[[binary_accuracy, binary_crossentropy], [binary_accuracy]]`
Raises:
TypeError: if an incorrect type is passed for the `metrics` argument.
"""
if not metrics:
return [[] for _ in output_names]
if isinstance(metrics, list):
# we then apply all metrics to all outputs.
return [copy.copy(metrics) for _ in output_names]
elif isinstance(metrics, dict):
nested_metrics = []
for name in output_names:
output_metrics = metrics.get(name, [])
if not isinstance(output_metrics, list):
output_metrics = [output_metrics]
nested_metrics.append(output_metrics)
return nested_metrics
else:
raise TypeError('Type of `metrics` argument not understood. '
'Expected a list or dictionary, found: ' + str(metrics))
def _batch_shuffle(index_array, batch_size):
"""Shuffles an array in a batch-wise fashion.
Useful for shuffling HDF5 arrays
(where one cannot access arbitrary indices).
Arguments:
index_array: array of indices to be shuffled.
batch_size: integer.
Returns:
The `index_array` array, shuffled in a batch-wise fashion.
"""
batch_count = int(len(index_array) / batch_size)
# to reshape we need to be cleanly divisible by batch size
# we stash extra items and reappend them after shuffling
last_batch = index_array[batch_count * batch_size:]
index_array = index_array[:batch_count * batch_size]
index_array = index_array.reshape((batch_count, batch_size))
np.random.shuffle(index_array)
index_array = index_array.flatten()
return np.append(index_array, last_batch)
def _make_batches(size, batch_size):
"""Returns a list of batch indices (tuples of indices).
Arguments:
size: Integer, total size of the data to slice into batches.
batch_size: Integer, batch size.
Returns:
A list of tuples of array indices.<|fim▁hole|> return [(i * batch_size, min(size, (i + 1) * batch_size))
for i in range(num_batches)]
def _slice_arrays(arrays, start=None, stop=None):
"""Slice an array or list of arrays.
This takes an array-like, or a list of
array-likes, and outputs:
- arrays[start:stop] if `arrays` is an array-like
- [x[start:stop] for x in arrays] if `arrays` is a list
Can also work on list/array of indices: `_slice_arrays(x, indices)`
Arguments:
arrays: Single array or list of arrays.
start: can be an integer index (start index)
or a list/array of indices
stop: integer (stop index); should be None if
`start` was a list.
Returns:
A slice of the array(s).
"""
if arrays is None:
return [None]
elif isinstance(arrays, list):
if hasattr(start, '__len__'):
# hdf5 datasets only support list objects as indices
if hasattr(start, 'shape'):
start = start.tolist()
return [None if x is None else x[start] for x in arrays]
else:
return [None if x is None else x[start:stop] for x in arrays]
else:
if hasattr(start, '__len__'):
if hasattr(start, 'shape'):
start = start.tolist()
return arrays[start]
elif hasattr(start, '__getitem__'):
return arrays[start:stop]
else:
return [None]
def _weighted_masked_objective(fn):
"""Adds support for masking and sample-weighting to an objective function.
It transforms an objective function `fn(y_true, y_pred)`
into a sample-weighted, cost-masked objective function
`fn(y_true, y_pred, weights, mask)`.
Arguments:
fn: The objective function to wrap,
with signature `fn(y_true, y_pred)`.
Returns:
A function with signature `fn(y_true, y_pred, weights, mask)`.
"""
if fn is None:
return None
def weighted(y_true, y_pred, weights, mask=None):
"""Wrapper function.
Arguments:
y_true: `y_true` argument of `fn`.
y_pred: `y_pred` argument of `fn`.
weights: Weights tensor.
mask: Mask tensor.
Returns:
Scalar tensor.
"""
# score_array has ndim >= 2
score_array = fn(y_true, y_pred)
if mask is not None:
# Cast the mask to floatX to avoid float64 upcasting in theano
mask = K.cast(mask, K.floatx())
# mask should have the same shape as score_array
score_array *= mask
# the loss per batch should be proportional
# to the number of unmasked samples.
score_array /= K.mean(mask)
# apply sample weighting
if weights is not None:
# reduce score_array to same ndim as weight array
ndim = K.ndim(score_array)
weight_ndim = K.ndim(weights)
score_array = K.mean(score_array, axis=list(range(weight_ndim, ndim)))
score_array *= weights
score_array /= K.mean(K.cast(K.not_equal(weights, 0), K.floatx()))
return K.mean(score_array)
return weighted
def _standardize_weights(y,
sample_weight=None,
class_weight=None,
sample_weight_mode=None):
"""Performs sample weight validation and standardization.
Everything gets normalized to a single sample-wise (or timestep-wise)
weight array.
Arguments:
y: Numpy array of model targets to be weighted.
sample_weight: User-provided `sample_weight` argument.
class_weight: User-provided `class_weight` argument.
sample_weight_mode: One of `None` or `"temporal"`.
`"temporal"` indicated that we expect 2D weight data
that will be applied to the last 2 dimensions of
the targets (i.e. we are weighting timesteps, not samples).
Returns:
A numpy array of target weights, one entry per sample to weight.
Raises:
ValueError: In case of invalid user-provided arguments.
"""
if sample_weight_mode is not None:
if sample_weight_mode != 'temporal':
raise ValueError('"sample_weight_mode '
'should be None or "temporal". '
'Found: ' + str(sample_weight_mode))
if len(y.shape) < 3:
raise ValueError('Found a sample_weight array for '
'an input with shape ' + str(y.shape) + '. '
'Timestep-wise sample weighting (use of '
'sample_weight_mode="temporal") is restricted to '
'outputs that are at least 3D, i.e. that have '
'a time dimension.')
if sample_weight is not None and len(sample_weight.shape) != 2:
raise ValueError('Found a sample_weight array with shape ' +
str(sample_weight.shape) + '. '
'In order to use timestep-wise sample weighting, '
'you should pass a 2D sample_weight array.')
else:
if sample_weight is not None and len(sample_weight.shape) != 1:
raise ValueError('Found a sample_weight array with shape ' +
str(sample_weight.shape) + '. '
'In order to use timestep-wise sample weights, '
'you should specify '
'sample_weight_mode="temporal" '
'in compile(). If you just mean to use '
'sample-wise weights, make sure your '
'sample_weight array is 1D.')
if sample_weight is not None:
if len(sample_weight.shape) > len(y.shape):
raise ValueError('Found a sample_weight with shape' +
str(sample_weight.shape) + '.'
'Expected sample_weight with rank '
'less than or equal to ' + str(len(y.shape)))
if y.shape[:sample_weight.ndim] != sample_weight.shape:
raise ValueError('Found a sample_weight array with shape ' +
str(sample_weight.shape) + ' for an input with shape ' +
str(y.shape) + '. '
'sample_weight cannot be broadcast.')
return sample_weight
elif isinstance(class_weight, dict):
if len(y.shape) > 2:
raise ValueError('`class_weight` not supported for '
'3+ dimensional targets.')
if y.shape[1] > 1:
y_classes = y.argmax(axis=1)
elif y.shape[1] == 1:
y_classes = np.reshape(y, y.shape[0])
else:
y_classes = y
weights = np.asarray(
[class_weight[cls] for cls in y_classes if cls in class_weight])
if len(weights) != len(y_classes):
# subtract the sets to pick all missing classes
existing_classes = set(y_classes)
existing_class_weight = set(class_weight.keys())
raise ValueError('`class_weight` must contain all classes in the data.'
' The classes %s exist in the data but not in '
'`class_weight`.' %
(existing_classes - existing_class_weight))
return weights
else:
if sample_weight_mode is None:
return np.ones((y.shape[0],), dtype=K.floatx())
else:
return np.ones((y.shape[0], y.shape[1]), dtype=K.floatx())
class Model(Network):
"""The `Model` class adds training & evaluation routines to a `Network`.
"""
def compile(self,
optimizer,
loss,
metrics=None,
loss_weights=None,
sample_weight_mode=None,
weighted_metrics=None,
target_tensors=None,
**kwargs):
"""Configures the model for training.
Arguments:
optimizer: String (name of optimizer) or optimizer instance.
See [optimizers](/optimizers).
loss: String (name of objective function) or objective function.
See [losses](/losses).
If the model has multiple outputs, you can use a different loss
on each output by passing a dictionary or a list of losses.
The loss value that will be minimized by the model
will then be the sum of all individual losses.
metrics: List of metrics to be evaluated by the model
during training and testing.
Typically you will use `metrics=['accuracy']`.
To specify different metrics for different outputs of a
multi-output model, you could also pass a dictionary,
such as `metrics={'output_a': 'accuracy'}`.
loss_weights: Optional list or dictionary specifying scalar
coefficients (Python floats) to weight the loss contributions
of different model outputs.
The loss value that will be minimized by the model
will then be the *weighted sum* of all individual losses,
weighted by the `loss_weights` coefficients.
If a list, it is expected to have a 1:1 mapping
to the model's outputs. If a tensor, it is expected to map
output names (strings) to scalar coefficients.
sample_weight_mode: If you need to do timestep-wise
sample weighting (2D weights), set this to `"temporal"`.
`None` defaults to sample-wise weights (1D).
If the model has multiple outputs, you can use a different
`sample_weight_mode` on each output by passing a
dictionary or a list of modes.
weighted_metrics: List of metrics to be evaluated and weighted
by sample_weight or class_weight during training and testing.
target_tensors: By default, Keras will create placeholders for the
model's target, which will be fed with the target data during
training. If instead you would like to use your own
target tensors (in turn, Keras will not expect external
Numpy data for these targets at training time), you
can specify them via the `target_tensors` argument. It can be
a single tensor (for a single-output model), a list of tensors,
or a dict mapping output names to target tensors.
**kwargs: These arguments are passed to `tf.Session.run`.
Raises:
ValueError: In case of invalid arguments for
`optimizer`, `loss`, `metrics` or `sample_weight_mode`.
"""
loss = loss or {}
self.optimizer = optimizers.get(optimizer)
self.sample_weight_mode = sample_weight_mode
self.loss = loss
self.loss_weights = loss_weights
self.sample_weight_mode = sample_weight_mode
# Prepare loss functions.
if isinstance(loss, dict):
for name in loss:
if name not in self.output_names:
raise ValueError('Unknown entry in loss '
'dictionary: "' + name + '". '
'Only expected the following keys: ' +
str(self.output_names))
loss_functions = []
for name in self.output_names:
if name not in loss:
logging.warning(
'Output "' + name + '" missing from loss dictionary. '
'We assume this was done on purpose, '
'and we will not be expecting '
'any data to be passed to "' + name + '" during training.')
loss_functions.append(losses.get(loss.get(name)))
elif isinstance(loss, list):
if len(loss) != len(self.outputs):
raise ValueError('When passing a list as loss, '
'it should have one entry per model outputs. '
'The model has ' + str(len(self.outputs)) +
' outputs, but you passed loss=' + str(loss))
loss_functions = [losses.get(l) for l in loss]
else:
loss_function = losses.get(loss)
loss_functions = [loss_function for _ in range(len(self.outputs))]
self.loss_functions = loss_functions
weighted_losses = [_weighted_masked_objective(fn) for fn in loss_functions]
skip_target_indices = []
skip_target_weighing_indices = []
self._feed_outputs = []
self._feed_output_names = []
self._feed_output_shapes = []
self._feed_loss_fns = []
for i in range(len(weighted_losses)):
if weighted_losses[i] is None:
skip_target_indices.append(i)
skip_target_weighing_indices.append(i)
# Prepare output masks.
masks = self.compute_mask(self.inputs, mask=None)
if masks is None:
masks = [None for _ in self.outputs]
if not isinstance(masks, list):
masks = [masks]
# Prepare loss weights.
if loss_weights is None:
loss_weights_list = [1. for _ in range(len(self.outputs))]
elif isinstance(loss_weights, dict):
for name in loss_weights:
if name not in self.output_names:
raise ValueError('Unknown entry in loss_weights '
'dictionary: "' + name + '". '
'Only expected the following keys: ' +
str(self.output_names))
loss_weights_list = []
for name in self.output_names:
loss_weights_list.append(loss_weights.get(name, 1.))
elif isinstance(loss_weights, list):
if len(loss_weights) != len(self.outputs):
raise ValueError('When passing a list as loss_weights, '
'it should have one entry per model outputs. '
'The model has ' + str(len(self.outputs)) +
' outputs, but you passed loss_weights=' +
str(loss_weights))
loss_weights_list = loss_weights
else:
raise TypeError('Could not interpret loss_weights argument: ' +
str(loss_weights) + ' - expected a list of dicts.')
# Prepare targets of model.
self.targets = []
self._feed_targets = []
if target_tensors is not None:
if isinstance(target_tensors, list):
if len(target_tensors) != len(self.outputs):
raise ValueError('When passing a list as `target_tensors`, '
'it should have one entry per model outputs. '
'The model has ' + str(len(self.outputs)) +
' outputs, but you passed target_tensors=' +
str(target_tensors))
elif isinstance(target_tensors, dict):
for name in target_tensors:
if name not in self.output_names:
raise ValueError('Unknown entry in `target_tensors` '
'dictionary: "' + name + '". '
'Only expected the following keys: ' +
str(self.output_names))
target_tensors_ = []
for name in self.output_names:
target_tensors_.append(target_tensors.get(name, None))
target_tensors = target_tensors_
else:
raise TypeError('Expected `target_tensors` to be '
'a list or dict, but got:', target_tensors)
for i in range(len(self.outputs)):
if i in skip_target_indices:
self.targets.append(None)
else:
shape = self.internal_output_shapes[i]
name = self.output_names[i]
if target_tensors is not None:
target = target_tensors[i]
else:
target = None
if target is None or K.is_placeholder(target):
if target is None:
target = K.placeholder(
ndim=len(shape),
name=name + '_target',
sparse=K.is_sparse(self.outputs[i]),
dtype=K.dtype(self.outputs[i]))
self._feed_targets.append(target)
self._feed_outputs.append(self.outputs[i])
self._feed_output_names.append(name)
self._feed_output_shapes.append(shape)
self._feed_loss_fns.append(self.loss_functions[i])
else:
skip_target_weighing_indices.append(i)
self.targets.append(target)
# Prepare sample weights.
sample_weights = []
sample_weight_modes = []
if isinstance(sample_weight_mode, dict):
for name in sample_weight_mode:
if name not in self.output_names:
raise ValueError('Unknown entry in '
'sample_weight_mode dictionary: "' + name + '". '
'Only expected the following keys: ' +
str(self.output_names))
for i, name in enumerate(self.output_names):
if i in skip_target_weighing_indices:
weight = None
sample_weight_modes.append(None)
else:
if name not in sample_weight_mode:
raise ValueError('Output "' + name +
'" missing from sample_weight_modes '
'dictionary')
if sample_weight_mode.get(name) == 'temporal':
weight = K.placeholder(ndim=2, name=name + '_sample_weights')
sample_weight_modes.append('temporal')
else:
weight = K.placeholder(ndim=1, name=name + '_sample_weights')
sample_weight_modes.append(None)
sample_weights.append(weight)
elif isinstance(sample_weight_mode, list):
if len(sample_weight_mode) != len(self.outputs):
raise ValueError('When passing a list as sample_weight_mode, '
'it should have one entry per model outputs. '
'The model has ' + str(len(self.outputs)) +
' outputs, but you passed '
'sample_weight_mode=' + str(sample_weight_mode))
for i in range(len(self.output_names)):
if i in skip_target_weighing_indices:
weight = None
sample_weight_modes.append(None)
else:
mode = sample_weight_mode[i]
name = self.output_names[i]
if mode == 'temporal':
weight = K.placeholder(ndim=2, name=name + '_sample_weights')
sample_weight_modes.append('temporal')
else:
weight = K.placeholder(ndim=1, name=name + '_sample_weights')
sample_weight_modes.append(None)
sample_weights.append(weight)
else:
for i, name in enumerate(self.output_names):
if i in skip_target_weighing_indices:
sample_weight_modes.append(None)
sample_weights.append(None)
else:
if sample_weight_mode == 'temporal':
sample_weights.append(
K.placeholder(ndim=2, name=name + '_sample_weights'))
sample_weight_modes.append('temporal')
else:
sample_weights.append(
K.placeholder(ndim=1, name=name + '_sample_weights'))
sample_weight_modes.append(None)
self.sample_weight_modes = sample_weight_modes
self._feed_sample_weight_modes = []
for i in range(len(self.outputs)):
if i not in skip_target_weighing_indices:
self._feed_sample_weight_modes.append(self.sample_weight_modes[i])
# Prepare metrics.
self.metrics = metrics
self.weighted_metrics = weighted_metrics
self.metrics_names = ['loss']
self.metrics_tensors = []
# Compute total loss.
total_loss = None
with K.name_scope('loss'):
for i in range(len(self.outputs)):
if i in skip_target_indices:
continue
y_true = self.targets[i]
y_pred = self.outputs[i]
weighted_loss = weighted_losses[i]
sample_weight = sample_weights[i]
mask = masks[i]
loss_weight = loss_weights_list[i]
with K.name_scope(self.output_names[i] + '_loss'):
output_loss = weighted_loss(y_true, y_pred, sample_weight, mask)
if len(self.outputs) > 1:
self.metrics_tensors.append(output_loss)
self.metrics_names.append(self.output_names[i] + '_loss')
if total_loss is None:
total_loss = loss_weight * output_loss
else:
total_loss += loss_weight * output_loss
if total_loss is None:
if not self.losses:
raise ValueError('The model cannot be compiled '
'because it has no loss to optimize.')
else:
total_loss = 0.
# Add regularization penalties
# and other layer-specific losses.
for loss_tensor in self.losses:
total_loss += loss_tensor
# List of same size as output_names.
# contains tuples (metrics for output, names of metrics).
nested_metrics = _collect_metrics(metrics, self.output_names)
nested_weighted_metrics = _collect_metrics(weighted_metrics,
self.output_names)
def append_metric(layer_index, metric_name, metric_tensor):
"""Helper function used in loop below."""
if len(self.output_names) > 1:
metric_name = self.output_names[layer_index] + '_' + metric_name
self.metrics_names.append(metric_name)
self.metrics_tensors.append(metric_tensor)
with K.name_scope('metrics'):
for i in range(len(self.outputs)):
if i in skip_target_indices:
continue
y_true = self.targets[i]
y_pred = self.outputs[i]
weights = sample_weights[i]
output_metrics = nested_metrics[i]
output_weighted_metrics = nested_weighted_metrics[i]
def handle_metrics(metrics, weights=None):
metric_name_prefix = 'weighted_' if weights is not None else ''
for metric in metrics:
if metric == 'accuracy' or metric == 'acc':
# custom handling of accuracy
# (because of class mode duality)
output_shape = self.internal_output_shapes[i]
if (output_shape[-1] == 1 or
self.loss_functions[i] == losses.binary_crossentropy):
# case: binary accuracy
acc_fn = metrics_module.binary_accuracy
elif self.loss_functions[
i] == losses.sparse_categorical_crossentropy:
# case: categorical accuracy with sparse targets
acc_fn = metrics_module.sparse_categorical_accuracy
else:
acc_fn = metrics_module.categorical_accuracy
weighted_metric_fn = _weighted_masked_objective(acc_fn)
metric_name = metric_name_prefix + 'acc'
else:
metric_fn = metrics_module.get(metric)
weighted_metric_fn = _weighted_masked_objective(metric_fn)
metric_name = metric_name_prefix + metric_fn.__name__
with K.name_scope(metric_name):
metric_result = weighted_metric_fn(
y_true, y_pred, weights=weights, mask=masks[i])
append_metric(i, metric_name, metric_result)
handle_metrics(output_metrics)
handle_metrics(output_weighted_metrics, weights=weights)
# Prepare gradient updates and state updates.
self.total_loss = total_loss
self.sample_weights = sample_weights
self._feed_sample_weights = []
for i in range(len(self.sample_weights)):
if i not in skip_target_weighing_indices:
self._feed_sample_weights.append(sample_weights[i])
# Functions for train, test and predict will
# be compiled lazily when required.
# This saves time when the user is not using all functions.
self._function_kwargs = kwargs
self.train_function = None
self.test_function = None
self.predict_function = None
# Collected trainable weights, sorted in topological order.
trainable_weights = self.trainable_weights
self._collected_trainable_weights = trainable_weights
def _check_trainable_weights_consistency(self):
"""Check trainable weights count consistency.
This will raise a warning if `trainable_weights` and
`_collected_trainable_weights` are consistent (i.e. have the same
number of parameters).
Inconsistency will typically arise when one modifies `model.trainable`
without calling `model.compile` again.
"""
if not hasattr(self, '_collected_trainable_weights'):
return
if len(self.trainable_weights) != len(self._collected_trainable_weights):
logging.warning(
'Discrepancy between trainable weights and collected trainable'
' weights, did you set `model.trainable` without calling'
' `model.compile` after ?')
def _make_train_function(self):
if not hasattr(self, 'train_function'):
raise RuntimeError('You must compile your model before using it.')
self._check_trainable_weights_consistency()
if self.train_function is None:
inputs = (self._feed_inputs +
self._feed_targets +
self._feed_sample_weights)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
inputs += [K.learning_phase()]
with K.name_scope('training'):
with K.name_scope(self.optimizer.__class__.__name__):
training_updates = self.optimizer.get_updates(
params=self._collected_trainable_weights, loss=self.total_loss)
updates = self.updates + training_updates
# Gets loss and metrics. Updates weights at each call.
self.train_function = K.function(
inputs, [self.total_loss] + self.metrics_tensors,
updates=updates,
name='train_function',
**self._function_kwargs)
def _make_test_function(self):
if not hasattr(self, 'test_function'):
raise RuntimeError('You must compile your model before using it.')
if self.test_function is None:
inputs = (self._feed_inputs +
self._feed_targets +
self._feed_sample_weights)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
inputs += [K.learning_phase()]
# Return loss and metrics, no gradient updates.
# Does update the network states.
self.test_function = K.function(
inputs, [self.total_loss] + self.metrics_tensors,
updates=self.state_updates,
name='test_function',
**self._function_kwargs)
def _make_predict_function(self):
if not hasattr(self, 'predict_function'):
self.predict_function = None
if self.predict_function is None:
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
inputs = self._feed_inputs + [K.learning_phase()]
else:
inputs = self._feed_inputs
# Gets network outputs. Does not update weights.
# Does update the network states.
kwargs = getattr(self, '_function_kwargs', {})
self.predict_function = K.function(
inputs,
self.outputs,
updates=self.state_updates,
name='predict_function',
**kwargs)
def _check_num_samples(self,
ins,
batch_size=None,
steps=None,
steps_name='steps'):
"""Determine the number of samples provided for training and evaluation.
The number of samples is not defined when running with `steps`,
in which case the number of samples is set to `None`.
Arguments:
ins: List of tensors to be fed to the Keras function.
batch_size: Integer batch size or `None` if not defined.
steps: Total number of steps (batches of samples)
before declaring `_predict_loop` finished.
Ignored with the default value of `None`.
steps_name: The public API's parameter name for `steps`.
Raises:
ValueError: when `steps` is `None` and the attribute `ins.shape`
does not exist. Also raises ValueError when `steps` is not `None`
and `batch_size` is not `None` because they are mutually
exclusive.
Returns:
When steps is `None`, returns the number of samples to be
processed based on the size of the first dimension of the
first input numpy array. When steps is not `None` and
`batch_size` is `None`, returns `None`.
"""
if steps is not None:
num_samples = None
if batch_size is not None:
raise ValueError('If ' + steps_name +
' is set, the `batch_size` must be None.')
elif ins and hasattr(ins[0], 'shape'):
num_samples = ins[0].shape[0]
else:
raise ValueError('Either the input data should have '
'a defined shape, or ' + steps_name +
' should be specified.')
return num_samples
def _fit_loop(self,
f,
ins,
out_labels=None,
batch_size=None,
epochs=100,
verbose=1,
callbacks=None,
val_f=None,
val_ins=None,
shuffle=True,
callback_metrics=None,
initial_epoch=0,
steps_per_epoch=None,
validation_steps=None):
"""Abstract fit function for `f(ins)`.
Assume that f returns a list, labeled by out_labels.
Arguments:
f: Keras function returning a list of tensors
ins: List of tensors to be fed to `f`
out_labels: List of strings, display names of
the outputs of `f`
batch_size: Integer batch size or None if unknown.
epochs: Number of times to iterate over the data
verbose: Verbosity mode, 0, 1 or 2
callbacks: List of callbacks to be called during training
val_f: Keras function to call for validation
val_ins: List of tensors to be fed to `val_f`
shuffle: Whether to shuffle the data at the beginning of each epoch
callback_metrics: List of strings, the display names of the metrics
passed to the callbacks. They should be the
concatenation of list the display names of the outputs of
`f` and the list of display names of the outputs of `f_val`.
initial_epoch: Epoch at which to start training
(useful for resuming a previous training run)
steps_per_epoch: Total number of steps (batches of samples)
before declaring one epoch finished and starting the
next epoch. Ignored with the default value of `None`.
validation_steps: Number of steps to run validation for (only if doing
validation from data tensors). Ignored with default value of `None`.
Returns:
`History` object.
Raises:
ValueError: In case of invalid argument values.
"""
do_validation = False
if val_f and val_ins:
do_validation = True
if (verbose and ins and
hasattr(ins[0], 'shape') and hasattr(val_ins[0], 'shape')):
print('Train on %d samples, validate on %d samples' %
(ins[0].shape[0], val_ins[0].shape[0]))
if validation_steps:
if steps_per_epoch is None:
raise ValueError('Can only use `validation_steps` when doing step-wise '
'training, i.e. `steps_per_epoch` must be set.')
do_validation = True
num_train_samples = self._check_num_samples(
ins, batch_size, steps_per_epoch, 'steps_per_epoch')
if num_train_samples is not None:
index_array = np.arange(num_train_samples)
self.history = cbks.History()
callbacks = [cbks.BaseLogger()] + (callbacks or []) + [self.history]
if verbose:
if steps_per_epoch is not None:
count_mode = 'steps'
else:
count_mode = 'samples'
callbacks += [cbks.ProgbarLogger(count_mode)]
callbacks = cbks.CallbackList(callbacks)
out_labels = out_labels or []
# it's possible to callback a different model than self
# (used by Sequential models)
if hasattr(self, 'callback_model') and self.callback_model:
callback_model = self.callback_model
else:
callback_model = self
callbacks.set_model(callback_model)
callbacks.set_params({
'batch_size': batch_size,
'epochs': epochs,
'steps': steps_per_epoch,
'samples': num_train_samples,
'verbose': verbose,
'do_validation': do_validation,
'metrics': callback_metrics or [],
})
callbacks.on_train_begin()
callback_model.stop_training = False
for cbk in callbacks:
cbk.validation_data = val_ins
for epoch in range(initial_epoch, epochs):
callbacks.on_epoch_begin(epoch)
epoch_logs = {}
if steps_per_epoch is not None:
for step_index in range(steps_per_epoch):
batch_logs = {}
batch_logs['batch'] = step_index
batch_logs['size'] = 1
callbacks.on_batch_begin(step_index, batch_logs)
outs = f(ins)
if not isinstance(outs, list):
outs = [outs]
for l, o in zip(out_labels, outs):
batch_logs[l] = o
callbacks.on_batch_end(step_index, batch_logs)
if callback_model.stop_training:
break
if do_validation:
val_outs = self._test_loop(
val_f,
val_ins,
batch_size=batch_size,
steps=validation_steps,
verbose=0)
if not isinstance(val_outs, list):
val_outs = [val_outs]
# Same labels assumed.
for l, o in zip(out_labels, val_outs):
epoch_logs['val_' + l] = o
else:
if shuffle == 'batch':
index_array = _batch_shuffle(index_array, batch_size)
elif shuffle:
np.random.shuffle(index_array)
batches = _make_batches(num_train_samples, batch_size)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
try:
if isinstance(ins[-1], float):
# Do not slice the training phase flag.
ins_batch = _slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = _slice_arrays(ins, batch_ids)
except TypeError:
raise TypeError('TypeError while preparing batch. '
'If using HDF5 input data, '
'pass shuffle="batch".')
batch_logs = {}
batch_logs['batch'] = batch_index
batch_logs['size'] = len(batch_ids)
callbacks.on_batch_begin(batch_index, batch_logs)
outs = f(ins_batch)
if not isinstance(outs, list):
outs = [outs]
for l, o in zip(out_labels, outs):
batch_logs[l] = o
callbacks.on_batch_end(batch_index, batch_logs)
if callback_model.stop_training:
break
if batch_index == len(batches) - 1: # Last batch.
if do_validation:
val_outs = self._test_loop(
val_f, val_ins, batch_size=batch_size, verbose=0)
if not isinstance(val_outs, list):
val_outs = [val_outs]
# Same labels assumed.
for l, o in zip(out_labels, val_outs):
epoch_logs['val_' + l] = o
callbacks.on_epoch_end(epoch, epoch_logs)
if callback_model.stop_training:
break
callbacks.on_train_end()
return self.history
def _predict_loop(self, f, ins, batch_size=32, verbose=0, steps=None):
"""Abstract method to loop over some data in batches.
Arguments:
f: Keras function returning a list of tensors.
ins: list of tensors to be fed to `f`.
batch_size: integer batch size.
verbose: verbosity mode.
steps: Total number of steps (batches of samples)
before declaring `_predict_loop` finished.
Ignored with the default value of `None`.
Returns:
Array of predictions (if the model has a single output)
or list of arrays of predictions
(if the model has multiple outputs).
"""
num_samples = self._check_num_samples(ins, batch_size, steps, 'steps')
if verbose == 1:
if steps is not None:
progbar = Progbar(target=steps)
else:
progbar = Progbar(target=num_samples)
if steps is not None:
# Step-based predictions.
# Since we do not know how many samples
# we will see, we cannot pre-allocate
# the returned Numpy arrays.
# Instead, we store one array per batch seen
# and concatenate them upon returning.
unconcatenated_outs = []
for step in range(steps):
batch_outs = f(ins)
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
if step == 0:
for batch_out in batch_outs:
unconcatenated_outs.append([])
for i, batch_out in enumerate(batch_outs):
unconcatenated_outs[i].append(batch_out)
if verbose == 1:
progbar.update(step + 1)
if len(unconcatenated_outs) == 1:
return np.concatenate(unconcatenated_outs[0], axis=0)
return [
np.concatenate(unconcatenated_outs[i], axis=0)
for i in range(len(unconcatenated_outs))
]
else:
# Sample-based predictions.
outs = []
batches = _make_batches(num_samples, batch_size)
index_array = np.arange(num_samples)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
if ins and isinstance(ins[-1], float):
# Do not slice the training phase flag.
ins_batch = _slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = _slice_arrays(ins, batch_ids)
batch_outs = f(ins_batch)
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
if batch_index == 0:
# Pre-allocate the results arrays.
for batch_out in batch_outs:
shape = (num_samples,) + batch_out.shape[1:]
outs.append(np.zeros(shape, dtype=batch_out.dtype))
for i, batch_out in enumerate(batch_outs):
outs[i][batch_start:batch_end] = batch_out
if verbose == 1:
progbar.update(batch_end)
if len(outs) == 1:
return outs[0]
return outs
def _test_loop(self, f, ins, batch_size=None, verbose=0, steps=None):
"""Abstract method to loop over some data in batches.
Arguments:
f: Keras function returning a list of tensors.
ins: list of tensors to be fed to `f`.
batch_size: integer batch size or `None`.
verbose: verbosity mode.
steps: Total number of steps (batches of samples)
before declaring predictions finished.
Ignored with the default value of `None`.
Returns:
Scalar loss (if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
"""
num_samples = self._check_num_samples(ins, batch_size, steps, 'steps')
outs = []
if verbose == 1:
if steps is not None:
progbar = Progbar(target=steps)
else:
progbar = Progbar(target=num_samples)
if steps is not None:
for step in range(steps):
batch_outs = f(ins)
if isinstance(batch_outs, list):
if step == 0:
for _ in enumerate(batch_outs):
outs.append(0.)
for i, batch_out in enumerate(batch_outs):
outs[i] += batch_out
else:
if step == 0:
outs.append(0.)
outs[0] += batch_outs
if verbose == 1:
progbar.update(step + 1)
for i in range(len(outs)):
outs[i] /= steps
else:
if verbose == 1:
progbar = Progbar(target=num_samples)
batches = _make_batches(num_samples, batch_size)
index_array = np.arange(num_samples)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
if isinstance(ins[-1], float):
# Do not slice the training phase flag.
ins_batch = _slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = _slice_arrays(ins, batch_ids)
batch_outs = f(ins_batch)
if isinstance(batch_outs, list):
if batch_index == 0:
for batch_out in enumerate(batch_outs):
outs.append(0.)
for i, batch_out in enumerate(batch_outs):
outs[i] += batch_out * len(batch_ids)
else:
if batch_index == 0:
outs.append(0.)
outs[0] += batch_outs * len(batch_ids)
if verbose == 1:
progbar.update(batch_end)
for i in range(len(outs)):
outs[i] /= num_samples
if len(outs) == 1:
return outs[0]
return outs
def _standardize_user_data(self,
x,
y,
sample_weight=None,
class_weight=None,
check_batch_axis=True,
batch_size=None):
if not hasattr(self, 'optimizer'):
raise RuntimeError('You must compile a model before '
'training/testing. '
'Use `model.compile(optimizer, loss)`.')
output_shapes = []
for output_shape, loss_fn in zip(self._feed_output_shapes,
self._feed_loss_fns):
if loss_fn is losses.sparse_categorical_crossentropy:
output_shapes.append(output_shape[:-1] + (1,))
else:
output_shapes.append(output_shape)
x = _standardize_input_data(
x,
self._feed_input_names,
self._feed_input_shapes,
check_batch_axis=False,
exception_prefix='input')
y = _standardize_input_data(
y,
self._feed_output_names,
output_shapes,
check_batch_axis=False,
exception_prefix='target')
sample_weights = _standardize_sample_weights(sample_weight,
self._feed_output_names)
class_weights = _standardize_class_weights(class_weight,
self._feed_output_names)
sample_weights = [
_standardize_weights(ref, sw, cw, mode)
for (ref, sw, cw, mode) in zip(y, sample_weights, class_weights,
self._feed_sample_weight_modes)
]
_check_array_lengths(x, y, sample_weights)
_check_loss_and_target_compatibility(y, self._feed_loss_fns,
self._feed_output_shapes)
if self.stateful and batch_size:
if x[0].shape[0] % batch_size != 0:
raise ValueError('In a stateful network, '
'you should only pass inputs with '
'a number of samples that can be '
'divided by the batch size. Found: ' +
str(x[0].shape[0]) + ' samples')
return x, y, sample_weights
def _get_deduped_metrics_names(self):
out_labels = self.metrics_names
# Rename duplicated metrics name
# (can happen with an output layer shared among multiple dataflows).
deduped_out_labels = []
for i, label in enumerate(out_labels):
new_label = label
if out_labels.count(label) > 1:
dup_idx = out_labels[:i].count(label)
new_label += '_' + str(dup_idx + 1)
deduped_out_labels.append(new_label)
return deduped_out_labels
def fit(self,
x=None,
y=None,
batch_size=None,
epochs=1,
verbose=1,
callbacks=None,
validation_split=0.,
validation_data=None,
shuffle=True,
class_weight=None,
sample_weight=None,
initial_epoch=0,
steps_per_epoch=None,
validation_steps=None):
"""Trains the model for a fixed number of epochs (iterations on a dataset).
Arguments:
x: Numpy array of training data (if the model has a single input),
or list of Numpy arrays (if the model has multiple inputs).
If input layers in the model are named, you can also pass a
dictionary mapping input names to Numpy arrays.
`x` can be `None` (default) if feeding from
TensorFlow data tensors.
y: Numpy array of target (label) data
(if the model has a single output),
or list of Numpy arrays (if the model has multiple outputs).
If output layers in the model are named, you can also pass a
dictionary mapping output names to Numpy arrays.
`y` can be `None` (default) if feeding from
TensorFlow data tensors.
Can be `None` (default) if feeding from framework-native tensors.
batch_size: Integer or `None`.
Number of samples per gradient update.
If unspecified, it will default to 32.
epochs: Integer. Number of epochs to train the model.
An epoch is an iteration over the entire `x` and `y`
data provided.
Note that in conjunction with `initial_epoch`,
`epochs` is to be understood as "final epoch".
The model is not trained for a number of iterations
given by `epochs`, but merely until the epoch
of index `epochs` is reached.
verbose: 0, 1, or 2. Verbosity mode.
0 = silent, 1 = progress bar, 2 = one line per epoch.
callbacks: List of `keras.callbacks.Callback` instances.
List of callbacks to apply during training.
See [callbacks](/callbacks).
validation_split: Float between 0 and 1.
Fraction of the training data to be used as validation data.
The model will set apart this fraction of the training data,
will not train on it, and will evaluate
the loss and any model metrics
on this data at the end of each epoch.
The validation data is selected from the last samples
in the `x` and `y` data provided, before shuffling.
validation_data: tuple `(x_val, y_val)` or tuple
`(x_val, y_val, val_sample_weights)` on which to evaluate
the loss and any model metrics at the end of each epoch.
The model will not be trained on this data.
This will override `validation_split`.
shuffle: Boolean (whether to shuffle the training data
before each epoch) or str (for 'batch').
'batch' is a special option for dealing with the
limitations of HDF5 data; it shuffles in batch-sized chunks.
Has no effect when `steps_per_epoch` is not `None`.
class_weight: Optional dictionary mapping class indices (integers)
to a weight (float) value, used for weighting the loss function
(during training only).
This can be useful to tell the model to
"pay more attention" to samples from
an under-represented class.
sample_weight: Optional Numpy array of weights for
the training samples, used for weighting the loss function
(during training only). You can either pass a flat (1D)
Numpy array with the same length as the input samples
(1:1 mapping between weights and samples),
or in the case of temporal data,
you can pass a 2D array with shape
`(samples, sequence_length)`,
to apply a different weight to every timestep of every sample.
In this case you should make sure to specify
`sample_weight_mode="temporal"` in `compile()`.
initial_epoch: Epoch at which to start training
(useful for resuming a previous training run).
steps_per_epoch: Total number of steps (batches of samples)
before declaring one epoch finished and starting the
next epoch. When training with input tensors such as
TensorFlow data tensors, the default `None` is equal to
the number of unique samples in your dataset divided by
the batch size, or 1 if that cannot be determined.
validation_steps: Only relevant if `steps_per_epoch`
is specified. Total number of steps (batches of samples)
to validate before stopping.
Returns:
A `History` object. Its `History.history` attribute is
a record of training loss values and metrics values
at successive epochs, as well as validation loss values
and validation metrics values (if applicable).
Raises:
ValueError: In case of mismatch between the provided input data
and what the model expects.
"""
# Backwards compatibility
if batch_size is None and steps_per_epoch is None:
batch_size = 32
if x is None and y is None and steps_per_epoch is None:
raise ValueError('If fitting from data tensors, '
'you should specify the `steps_per_epoch` '
'argument.')
# Validate user data.
x, y, sample_weights = self._standardize_user_data(
x,
y,
sample_weight=sample_weight,
class_weight=class_weight,
check_batch_axis=False,
batch_size=batch_size)
# Prepare validation data.
do_validation = False
val_ins = []
if validation_data:
do_validation = True
if len(validation_data) == 2:
val_x, val_y = validation_data # pylint: disable=unpacking-non-sequence
val_sample_weight = None
elif len(validation_data) == 3:
val_x, val_y, val_sample_weight = validation_data # pylint: disable=unpacking-non-sequence
else:
raise ValueError(
'When passing validation_data, '
'it must contain 2 (x_val, y_val) '
'or 3 (x_val, y_val, val_sample_weights) '
'items, however it contains %d items' % len(validation_data))
val_x, val_y, val_sample_weights = self._standardize_user_data(
val_x,
val_y,
sample_weight=val_sample_weight,
check_batch_axis=False,
batch_size=batch_size)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
val_ins = val_x + val_y + val_sample_weights + [0.]
else:
val_ins = val_x + val_y + val_sample_weights
elif validation_split and 0. < validation_split < 1.:
do_validation = True
if hasattr(x[0], 'shape'):
split_at = int(x[0].shape[0] * (1. - validation_split))
else:
split_at = int(len(x[0]) * (1. - validation_split))
x, val_x = (_slice_arrays(x, 0, split_at), _slice_arrays(x, split_at))
y, val_y = (_slice_arrays(y, 0, split_at), _slice_arrays(y, split_at))
sample_weights, val_sample_weights = (_slice_arrays(
sample_weights, 0, split_at), _slice_arrays(sample_weights, split_at))
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
val_ins = val_x + val_y + val_sample_weights + [0.]
else:
val_ins = val_x + val_y + val_sample_weights
elif validation_steps:
do_validation = True
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
val_ins = [0.]
# Prepare input arrays and training function.
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + y + sample_weights + [1.]
else:
ins = x + y + sample_weights
self._make_train_function()
f = self.train_function
# Prepare display labels.
out_labels = self._get_deduped_metrics_names()
if do_validation:
self._make_test_function()
val_f = self.test_function
callback_metrics = copy.copy(out_labels) + [
'val_' + n for n in out_labels
]
else:
val_f = None
callback_metrics = copy.copy(out_labels)
# Delegate logic to `_fit_loop`.
return self._fit_loop(
f,
ins,
out_labels=out_labels,
batch_size=batch_size,
epochs=epochs,
verbose=verbose,
callbacks=callbacks,
val_f=val_f,
val_ins=val_ins,
shuffle=shuffle,
callback_metrics=callback_metrics,
initial_epoch=initial_epoch,
steps_per_epoch=steps_per_epoch,
validation_steps=validation_steps)
def evaluate(self,
x=None,
y=None,
batch_size=None,
verbose=1,
sample_weight=None,
steps=None):
"""Returns the loss value & metrics values for the model in test mode.
Computation is done in batches.
Arguments:
x: Numpy array of test data (if the model has a single input),
or list of Numpy arrays (if the model has multiple inputs).
If input layers in the model are named, you can also pass a
dictionary mapping input names to Numpy arrays.
`x` can be `None` (default) if feeding from
framework-native tensors (e.g. TensorFlow data tensors).
y: Numpy array of target (label) data
(if the model has a single output),
or list of Numpy arrays (if the model has multiple outputs).
If output layers in the model are named, you can also pass a
dictionary mapping output names to Numpy arrays.
`y` can be `None` (default) if feeding from
framework-native tensors (e.g. TensorFlow data tensors).
batch_size: Integer or `None`.
Number of samples per evaluation step.
If unspecified, `batch_size` will default to 32.
verbose: 0 or 1. Verbosity mode.
0 = silent, 1 = progress bar.
sample_weight: Optional Numpy array of weights for
the test samples, used for weighting the loss function.
You can either pass a flat (1D)
Numpy array with the same length as the input samples
(1:1 mapping between weights and samples),
or in the case of temporal data,
you can pass a 2D array with shape
`(samples, sequence_length)`,
to apply a different weight to every timestep of every sample.
In this case you should make sure to specify
`sample_weight_mode="temporal"` in `compile()`.
steps: Integer or `None`.
Total number of steps (batches of samples)
before declaring the evaluation round finished.
The default `None` is equal to the number of unique samples in
your dataset divided by the batch size.
Returns:
Scalar test loss (if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
Raises:
ValueError: In case of invalid arguments.
"""
# Backwards compatibility.
if batch_size is None and steps is None:
batch_size = 32
if x is None and y is None and steps is None:
raise ValueError('If evaluating from data tensors, '
'you should specify the `steps` '
'argument.')
# Validate user data.
x, y, sample_weights = self._standardize_user_data(
x,
y,
sample_weight=sample_weight,
check_batch_axis=False,
batch_size=batch_size)
# Prepare inputs, delegate logic to `_test_loop`.
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + y + sample_weights + [0.]
else:
ins = x + y + sample_weights
self._make_test_function()
f = self.test_function
return self._test_loop(
f, ins, batch_size=batch_size, verbose=verbose, steps=steps)
def predict(self, x, batch_size=None, verbose=0, steps=None):
"""Generates output predictions for the input samples.
Computation is done in batches.
Arguments:
x: The input data, as a Numpy array
(or list of Numpy arrays if the model has multiple outputs).
batch_size: Integer. If unspecified, it will default to 32.
verbose: Verbosity mode, 0 or 1.
steps: Total number of steps (batches of samples)
before declaring the prediction round finished.
Ignored with the default value of `None`.
Returns:
Numpy array(s) of predictions.
Raises:
ValueError: In case of mismatch between the provided
input data and the model's expectations,
or in case a stateful model receives a number of samples
that is not a multiple of the batch size.
"""
# Backwards compatibility.
if batch_size is None and steps is None:
batch_size = 32
if x is None and steps is None:
raise ValueError('If predicting from data tensors, '
'you should specify the `steps` '
'argument.')
# Validate user data.
x = _standardize_input_data(
x,
self._feed_input_names,
self._feed_input_shapes,
check_batch_axis=False)
if self.stateful:
if x[0].shape[0] > batch_size and x[0].shape[0] % batch_size != 0:
raise ValueError('In a stateful network, '
'you should only pass inputs with '
'a number of samples that can be '
'divided by the batch size. Found: ' +
str(x[0].shape[0]) + ' samples. '
'Batch size: ' + str(batch_size) + '.')
# Prepare inputs, delegate logic to `_predict_loop`.
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + [0.]
else:
ins = x
self._make_predict_function()
f = self.predict_function
return self._predict_loop(
f, ins, batch_size=batch_size, verbose=verbose, steps=steps)
def train_on_batch(self, x, y, sample_weight=None, class_weight=None):
"""Runs a single gradient update on a single batch of data.
Arguments:
x: Numpy array of training data,
or list of Numpy arrays if the model has multiple inputs.
If all inputs in the model are named,
you can also pass a dictionary
mapping input names to Numpy arrays.
y: Numpy array of target data,
or list of Numpy arrays if the model has multiple outputs.
If all outputs in the model are named,
you can also pass a dictionary
mapping output names to Numpy arrays.
sample_weight: Optional array of the same length as x, containing
weights to apply to the model's loss for each sample.
In the case of temporal data, you can pass a 2D array
with shape (samples, sequence_length),
to apply a different weight to every timestep of every sample.
In this case you should make sure to specify
sample_weight_mode="temporal" in compile().
class_weight: Optional dictionary mapping
class indices (integers) to
a weight (float) to apply to the model's loss for the samples
from this class during training.
This can be useful to tell the model to "pay more attention" to
samples from an under-represented class.
Returns:
Scalar training loss
(if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
"""
x, y, sample_weights = self._standardize_user_data(
x,
y,
sample_weight=sample_weight,
class_weight=class_weight,
check_batch_axis=True)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + y + sample_weights + [1.]
else:
ins = x + y + sample_weights
self._make_train_function()
outputs = self.train_function(ins)
if len(outputs) == 1:
return outputs[0]
return outputs
def test_on_batch(self, x, y, sample_weight=None):
"""Test the model on a single batch of samples.
Arguments:
x: Numpy array of test data,
or list of Numpy arrays if the model has multiple inputs.
If all inputs in the model are named,
you can also pass a dictionary
mapping input names to Numpy arrays.
y: Numpy array of target data,
or list of Numpy arrays if the model has multiple outputs.
If all outputs in the model are named,
you can also pass a dictionary
mapping output names to Numpy arrays.
sample_weight: Optional array of the same length as x, containing
weights to apply to the model's loss for each sample.
In the case of temporal data, you can pass a 2D array
with shape (samples, sequence_length),
to apply a different weight to every timestep of every sample.
In this case you should make sure to specify
sample_weight_mode="temporal" in compile().
Returns:
Scalar test loss (if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
"""
x, y, sample_weights = self._standardize_user_data(
x, y, sample_weight=sample_weight, check_batch_axis=True)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + y + sample_weights + [0.]
else:
ins = x + y + sample_weights
self._make_test_function()
outputs = self.test_function(ins)
if len(outputs) == 1:
return outputs[0]
return outputs
def predict_on_batch(self, x):
"""Returns predictions for a single batch of samples.
Arguments:
x: Input samples, as a Numpy array.
Returns:
Numpy array(s) of predictions.
"""
x = _standardize_input_data(x, self._feed_input_names,
self._feed_input_shapes)
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = x + [0.]
else:
ins = x
self._make_predict_function()
outputs = self.predict_function(ins)
if len(outputs) == 1:
return outputs[0]
return outputs
def fit_generator(self,
generator,
steps_per_epoch,
epochs=1,
verbose=1,
callbacks=None,
validation_data=None,
validation_steps=None,
class_weight=None,
max_queue_size=10,
workers=1,
use_multiprocessing=False,
shuffle=True,
initial_epoch=0,
**kwargs):
"""Fits the model on data yielded batch-by-batch by a Python generator.
The generator is run in parallel to the model, for efficiency.
For instance, this allows you to do real-time data augmentation
on images on CPU in parallel to training your model on GPU.
The use of `keras.utils.Sequence` guarantees the ordering
and guarantees the single use of every input per epoch when
using `use_multiprocessing=True`.
Arguments:
generator: A generator or an instance of Sequence (keras.utils.Sequence)
object in order to avoid duplicate data when using multiprocessing.
The output of the generator must be either
- a tuple (inputs, targets)
- a tuple (inputs, targets, sample_weights).
All arrays should contain the same number of samples.
The generator is expected to loop over its data
indefinitely. An epoch finishes when `steps_per_epoch`
batches have been seen by the model.
steps_per_epoch: Total number of steps (batches of samples)
to yield from `generator` before declaring one epoch
finished and starting the next epoch. It should typically
be equal to the number of unique samples of your dataset
divided by the batch size. Not used if using `Sequence`.
epochs: Integer, total number of iterations on the data.
verbose: Verbosity mode, 0, 1, or 2.
callbacks: List of callbacks to be called during training.
validation_data: This can be either
- a generator for the validation data
- a tuple (inputs, targets)
- a tuple (inputs, targets, sample_weights).
validation_steps: Only relevant if `validation_data`
is a generator. Total number of steps (batches of samples)
to yield from `generator` before stopping.
class_weight: Dictionary mapping class indices to a weight
for the class.
max_queue_size: Maximum size for the generator queue
workers: Maximum number of processes to spin up
when using process-based threading.
use_multiprocessing: If True, use process based threading.
Note that because
this implementation relies on multiprocessing,
you should not pass
non picklable arguments to the generator
as they can't be passed
easily to children processes.
shuffle: Whether to shuffle the data at the beginning of each
epoch. Only used with instances of `Sequence`
(`keras.utils.Sequence`).
initial_epoch: Epoch at which to start training
(useful for resuming a previous training run)
**kwargs: support for legacy arguments.
Returns:
A `History` object.
Example:
```python
def generate_arrays_from_file(path):
while 1:
f = open(path)
for line in f:
# create numpy arrays of input data
# and labels, from each line in the file
x1, x2, y = process_line(line)
yield ({'input_1': x1, 'input_2': x2}, {'output': y})
f.close()
model.fit_generator(generate_arrays_from_file('/my_file.txt'),
steps_per_epoch=10000, epochs=10)
```
Raises:
ValueError: In case the generator yields
data in an invalid format.
"""
# Legacy support
if 'max_q_size' in kwargs:
max_queue_size = kwargs.pop('max_q_size')
logging.warning('The argument `max_q_size` has been renamed '
'`max_queue_size`. Update your method calls accordingly.')
if 'pickle_safe' in kwargs:
use_multiprocessing = kwargs.pop('pickle_safe')
logging.warning('The argument `pickle_safe` has been renamed '
'`use_multiprocessing`. '
'Update your method calls accordingly.')
if kwargs:
raise ValueError('Unrecognized keyword arguments: ' + str(kwargs))
wait_time = 0.01 # in seconds
epoch = initial_epoch
do_validation = bool(validation_data)
self._make_train_function()
if do_validation:
self._make_test_function()
# python 2 has 'next', 3 has '__next__'
# avoid any explicit version checks
val_gen = (hasattr(validation_data, 'next') or
hasattr(validation_data, '__next__') or
isinstance(validation_data, Sequence))
if val_gen and not validation_steps:
raise ValueError('When using a generator for validation data, '
'you must specify a value for '
'`validation_steps`.')
# Prepare display labels.
out_labels = self._get_deduped_metrics_names()
callback_metrics = out_labels + ['val_' + n for n in out_labels]
# prepare callbacks
self.history = cbks.History()
callbacks = [cbks.BaseLogger()] + (callbacks or []) + [self.history]
if verbose:
callbacks += [cbks.ProgbarLogger(count_mode='steps')]
callbacks = cbks.CallbackList(callbacks)
# it's possible to callback a different model than self:
if hasattr(self, 'callback_model') and self.callback_model:
callback_model = self.callback_model
else:
callback_model = self
callbacks.set_model(callback_model)
callbacks.set_params({
'epochs': epochs,
'steps': steps_per_epoch,
'verbose': verbose,
'do_validation': do_validation,
'metrics': callback_metrics,
})
callbacks.on_train_begin()
if do_validation and not val_gen:
if len(validation_data) == 2:
val_x, val_y = validation_data # pylint: disable=unpacking-non-sequence
val_sample_weight = None
elif len(validation_data) == 3:
val_x, val_y, val_sample_weight = validation_data # pylint: disable=unpacking-non-sequence
else:
raise ValueError('`validation_data` should be a tuple '
'`(val_x, val_y, val_sample_weight)` '
'or `(val_x, val_y)`. Found: ' + str(validation_data))
val_x, val_y, val_sample_weights = self._standardize_user_data(
val_x, val_y, val_sample_weight)
val_data = val_x + val_y + val_sample_weights
if self.uses_learning_phase and not isinstance(K.learning_phase(), int):
val_data += [0.]
for cbk in callbacks:
cbk.validation_data = val_data
is_sequence = isinstance(generator, Sequence)
if not is_sequence and use_multiprocessing and workers > 1:
logging.warning(
logging.warning('Using a generator with `use_multiprocessing=True`'
' and multiple workers may duplicate your data.'
' Please consider using the`keras.utils.Sequence'
' class.'))
if is_sequence:
steps_per_epoch = len(generator)
enqueuer = None
try:
if is_sequence:
enqueuer = OrderedEnqueuer(
generator, use_multiprocessing=use_multiprocessing, shuffle=shuffle)
else:
enqueuer = GeneratorEnqueuer(
generator,
use_multiprocessing=use_multiprocessing,
wait_time=wait_time)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
output_generator = enqueuer.get()
callback_model.stop_training = False
while epoch < epochs:
callbacks.on_epoch_begin(epoch)
steps_done = 0
batch_index = 0
while steps_done < steps_per_epoch:
generator_output = next(output_generator)
if not hasattr(generator_output, '__len__'):
raise ValueError('Output of generator should be '
'a tuple `(x, y, sample_weight)` '
'or `(x, y)`. Found: ' + str(generator_output))
if len(generator_output) == 2:
x, y = generator_output
sample_weight = None
elif len(generator_output) == 3:
x, y, sample_weight = generator_output
else:
raise ValueError('Output of generator should be '
'a tuple `(x, y, sample_weight)` '
'or `(x, y)`. Found: ' + str(generator_output))
# build batch logs
batch_logs = {}
if isinstance(x, list):
batch_size = x[0].shape[0]
elif isinstance(x, dict):
batch_size = list(x.values())[0].shape[0]
else:
batch_size = x.shape[0]
batch_logs['batch'] = batch_index
batch_logs['size'] = batch_size
callbacks.on_batch_begin(batch_index, batch_logs)
outs = self.train_on_batch(
x, y, sample_weight=sample_weight, class_weight=class_weight)
if not isinstance(outs, list):
outs = [outs]
for l, o in zip(out_labels, outs):
batch_logs[l] = o
callbacks.on_batch_end(batch_index, batch_logs)
# Construct epoch logs.
epoch_logs = {}
batch_index += 1
steps_done += 1
# Epoch finished.
if steps_done >= steps_per_epoch and do_validation:
if val_gen:
val_outs = self.evaluate_generator(
validation_data,
validation_steps,
max_queue_size=max_queue_size,
workers=workers,
use_multiprocessing=use_multiprocessing)
else:
# No need for try/except because
# data has already been validated.
val_outs = self.evaluate(
val_x,
val_y,
batch_size=batch_size,
sample_weight=val_sample_weights,
verbose=0)
if not isinstance(val_outs, list):
val_outs = [val_outs]
# Same labels assumed.
for l, o in zip(out_labels, val_outs):
epoch_logs['val_' + l] = o
if callback_model.stop_training:
break
callbacks.on_epoch_end(epoch, epoch_logs)
epoch += 1
if callback_model.stop_training:
break
finally:
if enqueuer is not None:
enqueuer.stop()
callbacks.on_train_end()
return self.history
def evaluate_generator(self,
generator,
steps,
max_queue_size=10,
workers=1,
use_multiprocessing=False,
**kwargs):
"""Evaluates the model on a data generator.
The generator should return the same kind of data
as accepted by `test_on_batch`.
Arguments:
generator: Generator yielding tuples (inputs, targets)
or (inputs, targets, sample_weights)
or an instance of Sequence (keras.utils.Sequence)
object in order to avoid duplicate data
when using multiprocessing.
steps: Total number of steps (batches of samples)
to yield from `generator` before stopping.
Not used if using `Sequence`.
max_queue_size: maximum size for the generator queue
workers: maximum number of processes to spin up
when using process-based threading.
use_multiprocessing: if True, use process based threading.
Note that because
this implementation relies on multiprocessing,
you should not pass
non picklable arguments to the generator
as they can't be passed
easily to children processes.
**kwargs: support for legacy arguments.
Returns:
Scalar test loss (if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
Raises:
ValueError: In case the generator yields
data in an invalid format.
"""
# Legacy support
if 'max_q_size' in kwargs:
max_queue_size = kwargs.pop('max_q_size')
logging.warning('The argument `max_q_size` has been renamed '
'`max_queue_size`. Update your method calls accordingly.')
if 'pickle_safe' in kwargs:
use_multiprocessing = kwargs.pop('pickle_safe')
logging.warning('The argument `pickle_safe` has been renamed '
'`use_multiprocessing`. '
'Update your method calls accordingly.')
if kwargs:
raise ValueError('Unrecognized keyword arguments: ' + str(kwargs))
self._make_test_function()
steps_done = 0
wait_time = 0.01
all_outs = []
batch_sizes = []
is_sequence = isinstance(generator, Sequence)
if not is_sequence and use_multiprocessing and workers > 1:
logging.warning(
logging.warning('Using a generator with `use_multiprocessing=True`'
' and multiple workers may duplicate your data.'
' Please consider using the`keras.utils.Sequence'
' class.'))
if is_sequence:
steps = len(generator)
enqueuer = None
try:
if is_sequence:
enqueuer = OrderedEnqueuer(
generator, use_multiprocessing=use_multiprocessing)
else:
enqueuer = GeneratorEnqueuer(
generator,
use_multiprocessing=use_multiprocessing,
wait_time=wait_time)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
output_generator = enqueuer.get()
while steps_done < steps:
generator_output = next(output_generator)
if not hasattr(generator_output, '__len__'):
raise ValueError('Output of generator should be a tuple '
'(x, y, sample_weight) '
'or (x, y). Found: ' + str(generator_output))
if len(generator_output) == 2:
x, y = generator_output
sample_weight = None
elif len(generator_output) == 3:
x, y, sample_weight = generator_output
else:
raise ValueError('Output of generator should be a tuple '
'(x, y, sample_weight) '
'or (x, y). Found: ' + str(generator_output))
outs = self.test_on_batch(x, y, sample_weight=sample_weight)
if isinstance(x, list):
batch_size = len(x[0])
elif isinstance(x, dict):
batch_size = len(list(x.values())[0])
else:
batch_size = len(x)
if batch_size == 0:
raise ValueError('Received an empty batch. '
'Batches should at least contain one item.')
all_outs.append(outs)
steps_done += 1
batch_sizes.append(batch_size)
finally:
if enqueuer is not None:
enqueuer.stop()
if not isinstance(outs, list):
return np.average(np.asarray(all_outs), weights=batch_sizes)
else:
averages = []
for i in range(len(outs)):
averages.append(
np.average([out[i] for out in all_outs], weights=batch_sizes))
return averages
def predict_generator(self,
generator,
steps,
max_queue_size=10,
workers=1,
use_multiprocessing=False,
verbose=0,
**kwargs):
"""Generates predictions for the input samples from a data generator.
The generator should return the same kind of data as accepted by
`predict_on_batch`.
Arguments:
generator: Generator yielding batches of input samples
or an instance of Sequence (keras.utils.Sequence)
object in order to avoid duplicate data
when using multiprocessing.
steps: Total number of steps (batches of samples)
to yield from `generator` before stopping.
max_queue_size: Maximum size for the generator queue.
Not used if using `Sequence`.
workers: Maximum number of processes to spin up
when using process-based threading.
use_multiprocessing: If `True`, use process based threading.
Note that because
this implementation relies on multiprocessing,
you should not pass
non picklable arguments to the generator
as they can't be passed
easily to children processes.
verbose: verbosity mode, 0 or 1.
**kwargs: support for legacy arguments.
Returns:
Numpy array(s) of predictions.
Raises:
ValueError: In case the generator yields
data in an invalid format.
"""
# Legacy support
if 'max_q_size' in kwargs:
max_queue_size = kwargs.pop('max_q_size')
logging.warning('The argument `max_q_size` has been renamed '
'`max_queue_size`. Update your method calls accordingly.')
if 'pickle_safe' in kwargs:
use_multiprocessing = kwargs.pop('pickle_safe')
logging.warning('The argument `pickle_safe` has been renamed '
'`use_multiprocessing`. '
'Update your method calls accordingly.')
self._make_predict_function()
steps_done = 0
wait_time = 0.01
all_outs = []
is_sequence = isinstance(generator, Sequence)
if not is_sequence and use_multiprocessing and workers > 1:
logging.warning(
logging.warning('Using a generator with `use_multiprocessing=True`'
' and multiple workers may duplicate your data.'
' Please consider using the`keras.utils.Sequence'
' class.'))
if is_sequence:
steps = len(generator)
enqueuer = None
try:
if is_sequence:
enqueuer = OrderedEnqueuer(
generator, use_multiprocessing=use_multiprocessing)
else:
enqueuer = GeneratorEnqueuer(
generator,
use_multiprocessing=use_multiprocessing,
wait_time=wait_time)
enqueuer.start(workers=workers, max_queue_size=max_queue_size)
output_generator = enqueuer.get()
if verbose == 1:
progbar = Progbar(target=steps)
while steps_done < steps:
generator_output = next(output_generator)
if isinstance(generator_output, tuple):
# Compatibility with the generators
# used for training.
if len(generator_output) == 2:
x, _ = generator_output
elif len(generator_output) == 3:
x, _, _ = generator_output
else:
raise ValueError('Output of generator should be '
'a tuple `(x, y, sample_weight)` '
'or `(x, y)`. Found: ' + str(generator_output))
else:
# Assumes a generator that only
# yields inputs (not targets and sample weights).
x = generator_output
outs = self.predict_on_batch(x)
if not isinstance(outs, list):
outs = [outs]
if not all_outs:
for out in outs:
all_outs.append([])
for i, out in enumerate(outs):
all_outs[i].append(out)
steps_done += 1
if verbose == 1:
progbar.update(steps_done)
finally:
if enqueuer is not None:
enqueuer.stop()
if len(all_outs) == 1:
if steps_done == 1:
return all_outs[0][0]
else:
return np.concatenate(all_outs[0])
if steps_done == 1:
return [out for out in all_outs]
else:
return [np.concatenate(out) for out in all_outs]<|fim▁end|>
|
"""
num_batches = int(np.ceil(size / float(batch_size)))
|
<|file_name|>session.test.js<|end_file_name|><|fim▁begin|>/**!
* koa-generic-session - test/session.test.js
* Copyright(c) 2013
* MIT Licensed
*
* Authors:
* dead_horse <[email protected]> (http://deadhorse.me)
*/
'use strict';
/**
* Module dependencies.
*/
var Session = require('..');
var koa = require('koa');
var app = require('./support/server');
var request = require('supertest');
var mm = require('mm');
var should = require('should');
var EventEmitter = require('events').EventEmitter;
describe('test/koa-session.test.js', function () {
describe('init', function () {
afterEach(mm.restore);
beforeEach(function (done) {
request(app)
.get('/session/remive')
.expect(200, done);
});
it('should warn when in production', function (done) {
mm(process.env, 'NODE_ENV', 'production');
mm(console, 'warn', function (message) {
message.should.equal('Warning: koa-generic-session\'s MemoryStore is not\n' +
'designed for a production environment, as it will leak\n' +
'memory, and will not scale past a single process.');
done();
});
Session({secret: 'secret'});
});
it('should listen disconnect and connect', function () {
var store = new EventEmitter();
Session({
secret: 'secret',
store: store
});
store._events.disconnect.should.be.Function;
store._events.connect.should.be.Function;
});
});
describe('use', function () {
var cookie;
var mockCookie = 'koa.sid=s:dsfdss.PjOnUyhFG5bkeHsZ1UbEY7bDerxBINnZsD5MUguEph8; path=/; httponly';
it('should GET /session/get ok', function (done) {
request(app)
.get('/session/get')
.expect(/1/)
.end(function (err, res) {
cookie = res.headers['set-cookie'].join(';');
done();
});
});
it('should GET /session/get second ok', function (done) {
request(app)
.get('/session/get')
.set('cookie', cookie)
.expect(/2/, done);
});
it('should GET /session/httponly ok', function (done) {
request(app)
.get('/session/httponly')
.set('cookie', cookie)
.expect(/httpOnly: false/, function (err, res) {
should.not.exist(err);
cookie = res.headers['set-cookie'].join(';');
cookie.indexOf('httponly').should.equal(-1);
cookie.indexOf('expires=').should.above(0);
request(app)
.get('/session/get')
.set('cookie', cookie)
.expect(/3/, done);
});
});
it('should GET /session/httponly twice ok', function (done) {
request(app)
.get('/session/httponly')
.set('cookie', cookie)
.expect(/httpOnly: true/, function (err, res) {
should.not.exist(err);
cookie = res.headers['set-cookie'].join(';');
cookie.indexOf('httponly').should.above(0);
cookie.indexOf('expires=').should.above(0);
done();
});
});
it('should another user GET /session/get ok', function (done) {
request(app)
.get('/session/get')
.expect(/1/, done);
});
it('should GET /session/nothing ok', function (done) {
request(app)
.get('/session/nothing')
.set('cookie', cookie)
.expect(/3/, done);
});
it('should wrong cookie GET /session/get ok', function (done) {
request(app)
.get('/session/get')
.set('cookie', mockCookie)
.expect(/1/, done);
});
it('should wrong cookie GET /session/get twice ok', function (done) {
request(app)
.get('/session/get')
.set('cookie', mockCookie)
.expect(/1/, done);
});
it('should GET /wrongpath response no session', function (done) {
request(app)
.get('/wrongpath')
.set('cookie', cookie)
.expect(/no session/, done);
});
it('should GET /session/remove ok', function (done) {
request(app)
.get('/session/remove')
.set('cookie', cookie)
.expect(/0/, function () {
request(app)
.get('/session/get')
.set('cookie', cookie)
.expect(/1/, done);
});
});
it('should GET / error by session ok', function (done) {
request(app)
.get('/')
.expect(/no session/, done);
});
it('should GET /session ok', function (done) {
request(app)
.get('/session')
.expect(/has session/, done);
});
<|fim▁hole|> request(app)
.get('/session/rewrite')
.expect({foo: 'bar', path: '/session/rewrite'}, done);
});
it('should regenerate a new session when session invalid', function (done) {
request(app)
.get('/session/get')
.expect('1', function (err) {
should.not.exist(err);
request(app)
.get('/session/nothing?valid=false')
.expect('', function (err) {
should.not.exist(err);
request(app)
.get('/session/get')
.expect('1', done);
});
});
});
it('should GET /session ok', function (done) {
request(app)
.get('/session/id?test_sid_append=test')
.expect(/test$/, done);
});
it('should force a session id ok', function (done) {
request(app)
.get('/session/get')
.expect(/.*/, function(err, res) {
should.not.exist(err);
cookie = res.headers['set-cookie'][0].split(';');
var val = cookie[0].split('=').pop();
request(app)
.get('/session/id?force_session_id=' + val)
.expect(new RegExp(val), done);
});
});
it('should regenerate existing sessions', function (done) {
var agent = request.agent(app)
agent
.get('/session/get')
.expect(/.+/, function(err, res) {
var firstId = res.body;
agent
.get('/session/regenerate')
.expect(/.+/, function(err, res) {
var secondId = res.body;
secondId.should.not.equal(firstId);
done();
});
});
});
it('should regenerate a new session', function (done) {
request(app)
.get('/session/regenerateWithData')
.expect({ /* foo: undefined, */ hasSession: true }, done);
});
});
});<|fim▁end|>
|
it('should rewrite session before get ok', function (done) {
|
<|file_name|>xblock_module.py<|end_file_name|><|fim▁begin|># Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes supporting creation and use of content using XBlocks.
Dependencies:
1. XBlock (https://github.com/edx/XBlock)
2. App Engine XBlock runtime
(https://github.com/google/appengine_xblock_runtime)
The appropriate versions of both of these libraries must be installed the the
lib/ folder. See README.rst for more details.
"""
__author__ = 'John Orr ([email protected])'
import cgi
from cStringIO import StringIO
import logging
import mimetypes
import os
import re
import tarfile
import urllib
import uuid
from xml.etree import cElementTree
import appengine_config
from appengine_xblock_runtime import store
import appengine_xblock_runtime.runtime
from common import jinja_utils
from common import safe_dom
from common import schema_fields
from common import tags
from controllers import sites<|fim▁hole|>import django.conf
import django.template.loader
from lxml import etree
import messages
from models import courses
from models import custom_modules
from models import jobs
from models import transforms
import models.models as m_models
from modules.dashboard import filer
from modules.dashboard import unit_lesson_editor
import modules.dashboard.dashboard as dashboard
from modules.oeditor import oeditor
import webapp2
import workbench.runtime
import xblock.core
import xblock.exceptions
import xblock.field_data
import xblock.fields
import xblock.fragment
import xblock.plugin
import xblock.runtime
from google.appengine.ext import blobstore
from google.appengine.ext import db
from google.appengine.ext import ndb
# URI routing for resources belonging to this module
RESOURCES_URI = '/modules/xblock_module/resources'
# Base URI routing used by Course Builder for XBlock static resources
XBLOCK_RESOURCES_URI = '/modules/xblock_module/xblock_resources'
# Base URI routing used by Course Builder for XBlock static resources
XBLOCK_LOCAL_RESOURCES_URI = '/modules/xblock_module/xblock_local_resources'
# URI routing used by Course Builder for call-backs to server-side XBlock code
HANDLER_URI = '/modules/xblock_module/handler'
# URI routing the the MathJax package
MATHJAX_URI = '/modules/xblock_module/MathJax'
# Allow images of up to 5Mb
MAX_ASSET_UPLOAD_SIZE_K = 5 * 1024
# The location of the static workbench files used by the XBlocks
WORKBENCH_STATIC_PATH = os.path.normpath('lib/XBlock/workbench/static')
# The location of the DJango templates used by XBlocks
XBLOCK_TEMPLATES_PATH = 'lib/XBlock/xblock/templates'
# XSRF protection token for handler callbacks
XBLOCK_XSRF_TOKEN_NAME = 'xblock_handler'
XBLOCK_EVENT_SOURCE_NAME = 'xblock-event'
XBLOCK_TAG_EVENT_SOURCE_NAME = 'tag-xblock-event'
XBLOCK_WHITELIST = [
'sequential = cb_xblocks_core.cb_xblocks_core:SequenceBlock',
'video = cb_xblocks_core.cb_xblocks_core:VideoBlock',
'cbquestion = cb_xblocks_core.cb_xblocks_core:QuestionBlock',
'html = cb_xblocks_core.cb_xblocks_core:HtmlBlock',
'vertical = cb_xblocks_core.cb_xblocks_core:VerticalBlock',
'problem = cb_xblocks_core.problem:ProblemBlock'
]
# XBlock runtime section
class StudentFieldData(xblock.field_data.SplitFieldData):
"""A field data manager for use in student (i.e., non-admin) context.
This field data manager prevents students from modifying a field which is
stored as UserScope.NONE, even if an XBlock includes code which sets it.
Thus it defends against poorly-written XBlocks which grant students too
wide permissions.
"""
def __init__(self, db_data):
authored_data = xblock.field_data.ReadOnlyFieldData(db_data)
student_data = db_data
super(StudentFieldData, self).__init__({
xblock.fields.Scope.content: authored_data,
xblock.fields.Scope.settings: authored_data,
xblock.fields.Scope.parent: authored_data,
xblock.fields.Scope.children: authored_data,
xblock.fields.Scope.user_state_summary: student_data,
xblock.fields.Scope.user_state: student_data,
xblock.fields.Scope.user_info: student_data,
xblock.fields.Scope.preferences: student_data})
class ForbiddenXBlockError(Exception):
"""Raised when a non-whitelisted XBlock is requested."""
def select_xblock(identifier, entry_points):
"""Hook called when loading XBlock classes, which enforces whitelist."""
entry_point = xblock.plugin.default_select(identifier, entry_points)
if str(entry_point) not in XBLOCK_WHITELIST:
raise ForbiddenXBlockError(
'Attempted to load forbidden XBlock: %s' % str(entry_point))
return entry_point
class MemoryIdManager(xblock.runtime.MemoryIdManager):
def create_usage(self, def_id, usage_id=None):
"""Extend the method definition to allow a specified usage_id."""
usage_id = usage_id or appengine_xblock_runtime.runtime.generate_id()
self._usages[usage_id] = def_id
return usage_id
def create_definition(self, block_type, def_id=None):
"""Extend the method definition to allow a specified def_id."""
def_id = def_id or appengine_xblock_runtime.runtime.generate_id()
self._definitions[def_id] = block_type
return def_id
class Runtime(appengine_xblock_runtime.runtime.Runtime):
"""A XBlock runtime which uses the App Engine datastore."""
def __init__(
self, handler, id_reader=None, field_data=None, student_id=None,
is_admin=False):
field_data = field_data or xblock.runtime.KvsFieldData(
store.KeyValueStore())
if is_admin:
pass
elif student_id:
field_data = StudentFieldData(field_data)
else:
field_data = xblock.field_data.ReadOnlyFieldData(field_data)
def get_jinja_template(template_name, dirs):
locale = handler.app_context.get_environ()['course']['locale']
return jinja_utils.get_template(template_name, dirs, locale=locale)
services = {'jinja': get_jinja_template}
super(Runtime, self).__init__(
id_reader=id_reader, field_data=field_data, student_id=student_id,
services=services, select=select_xblock)
self.handler = handler
def render_template(self, template_name, **kwargs):
"""Loads the django template for `template_name."""
template = django.template.loader.get_template(template_name)
return template.render(django.template.Context(kwargs))
def wrap_child(self, block, unused_view, frag, unused_context):
wrapped = xblock.fragment.Fragment()
wrapped.add_javascript_url(
self.resource_url('js/vendor/jquery.min.js'))
wrapped.add_javascript_url(
self.resource_url('js/vendor/jquery.cookie.js'))
data = {}
if frag.js_init_fn:
# Patch to accommodate jqueryui tabs (used by sequence XBlock)in a
# page with <base> tag set. See:
# http://stackoverflow.com/questions/13837304/jquery-ui-non-ajax-tab-loading-whole-website-into-itself
wrapped.add_javascript("""
$(function() {
$(".xblock .tabs ul li a").each(function() {
var href = $(this).attr("href");
if (href && href.charAt(0) == "#") {
$(this).attr("href", location.href.toString() + href);
}
});
});
""")
wrapped.add_javascript_url(
self.resource_url('js/runtime/%s.js' % frag.js_init_version))
wrapped.add_javascript_url(RESOURCES_URI + '/runtime.js')
data = {
'data-init': frag.js_init_fn,
'data-runtime-version': str(frag.js_init_version),
'data-usage': block.scope_ids.usage_id,
'data-block-type': block.scope_ids.block_type,
'data-xsrf-token': utils.XsrfTokenManager.create_xsrf_token(
XBLOCK_XSRF_TOKEN_NAME)}
if block.name:
data['data-name'] = block.name
class FragmentText(safe_dom.Text):
"""Class to insert the fragment content into the safe_dom node."""
def __init__(self, value):
self._value = unicode(value)
@property
def sanitized(self):
return self._value
div = safe_dom.Element('div', className='xblock', **data)
div.add_child(FragmentText(frag.body_html()))
wrapped.add_content(unicode(div))
wrapped.add_frag_resources(frag)
return wrapped
def _usage_id_from_node(self, node, parent_id, _id_generator):
"""Override import method from XBlock runtime."""
block_type = node.tag
usage_id = node.get('usage_id')
if usage_id is None:
# In Course Builder the usages and defs are in 1-1
# correspondence so for definiteness, make id's the same
def_id = _id_generator.create_definition(block_type)
usage_id = _id_generator.create_usage(def_id, usage_id=def_id)
else:
# Test whether or not the usage is already in the datastore. If it
# is not present, there will be a NoSuchUsage exception.
try:
def_id = self.id_reader.get_definition_id(usage_id)
except xblock.exceptions.NoSuchUsage:
# In Course Builder the usages and defs are in 1-1
# correspondence so for definiteness, make id's the same
def_id = usage_id
def_id = _id_generator.create_definition(
block_type, def_id=def_id)
_id_generator.create_usage(def_id, usage_id=usage_id)
keys = xblock.fields.ScopeIds(
xblock.fields.UserScope.NONE, block_type, def_id, usage_id)
block_class = self.mixologist.mix(self.load_block_type(block_type))
# Load the block's fields and clear out any existing children
block = self.construct_xblock_from_class(block_class, keys)
if hasattr(block, 'children'):
# We need to force an explict save of the 'children' field
# and so first we have to make it dirty
block.children = ['dirt']
block.save()
block.children = []
block.save()
# Reload the block and attach new children
block = block_class.parse_xml(node, self, keys, _id_generator)
block.parent = parent_id
block.save()
return usage_id
def export_to_xml(self, block, xmlfile):
"""Override export method from XBlock runtime."""
root = etree.Element('unknown_root', usage_id=block.scope_ids.usage_id)
tree = etree.ElementTree(root)
block.export_xml(root)
tree.write(
xmlfile, xml_declaration=True, encoding='utf8', pretty_print=True)
def add_block_as_child_node(self, block, node):
"""Override export method from XBlock runtime."""
child = etree.SubElement(
node, 'unknown', usage_id=block.scope_ids.usage_id)
block.export_xml(child)
def query(self, block):
# pylint: disable=protected-access
return workbench.runtime._BlockSet(self, [block])
# pylint: enable=protected-access
def handler_url(self, block, handler_name, suffix='', query=''):
return self.handler.canonicalize_url('%s?%s' % (
HANDLER_URI, urllib.urlencode({
'usage': block.scope_ids.usage_id,
'handler': handler_name,
'xsrf_token': utils.XsrfTokenManager.create_xsrf_token(
XBLOCK_XSRF_TOKEN_NAME)})))
def resource_url(self, resource):
return '%s/%s' % (XBLOCK_RESOURCES_URI, resource)
def local_resource_url(self, block, uri):
return '%s/%s/%s' % (
XBLOCK_LOCAL_RESOURCES_URI, block.scope_ids.block_type, uri)
def publish(self, block, event):
"""Log an XBlock event to the event stream.
Args:
block: XBlock. The XBlock which emitted the event.
event: dict. A JSON serializable dict containing the event data.
"""
if self.user_id is None:
return
wrapper = {
'usage': block.scope_ids.usage_id,
'type': block.scope_ids.block_type,
'event': event}
if utils.CAN_PERSIST_TAG_EVENTS.value:
m_models.EventEntity(
source=XBLOCK_EVENT_SOURCE_NAME,
user_id=self.user_id,
data=transforms.dumps(wrapper)).put()
def parse_xml_string(
self, xml_str, unused_id_generator, orig_xml_str=None,
dry_run=False, log=None):
"""Override parse_xml_string to make it asynchronous.
Calls to this method will execute using NDB's asynchronous API. In order
to ensure all the Datastore RPC's terminate successfully, it is
essential that some method higher up the call stack (e.g., the request
handler) should be decorated with @ndb.toplevel.
Args:
xml_str: str. The string of XML which will be parsed as XBlocks.
unused_id_generator: IdGenerator. The XBlock API allows the runtime
to use different usage- and definition-generators, but in this
implementation, the only write target is the App Engine
Datastore.
orig_xml_str: str. The XML representation of the existing block in
the datastore, if it exists.
dry_run: bool. If set True, then parse the XML but do not do any
datastore writes.
log: file-like. A buffer to write back the XML representation of the
XBlock tree which has been assembled.
Returns:
str. The usage id of the root block of the XML tree.
"""
if orig_xml_str is None:
orig_xml_str = ''
if log is None:
log = StringIO()
id_manager = MemoryIdManager()
dict_key_value_store = xblock.runtime.DictKeyValueStore()
old_id_reader = self.id_reader
self.id_reader = id_manager
old_field_data = self.field_data
self.field_data = xblock.runtime.KvsFieldData(dict_key_value_store)
try:
root_usage_id = super(Runtime, self).parse_xml_string(
xml_str, id_manager)
block = self.get_block(root_usage_id)
self.export_to_xml(block, log)
finally:
self.id_reader = old_id_reader
self.field_data = old_field_data
if dry_run or log.getvalue() == orig_xml_str:
return root_usage_id
entities = []
for key, value in dict_key_value_store.db_dict.iteritems():
ndb_key = ndb.Key(store.KeyValueEntity, store.key_string(key))
kv_entity = store.KeyValueEntity(key=ndb_key)
kv_entity.value = value
entities.append(kv_entity)
for def_id, block_type in id_manager._definitions.iteritems():
ndb_key = ndb.Key(store.DefinitionEntity, def_id)
def_entity = store.DefinitionEntity(key=ndb_key)
def_entity.block_type = block_type
entities.append(def_entity)
for usage_id, def_id in id_manager._usages.iteritems():
ndb_key = ndb.Key(store.UsageEntity, usage_id)
usage_entity = store.UsageEntity(key=ndb_key)
usage_entity.definition_id = def_id
entities.append(usage_entity)
ndb.put_multi_async(entities)
return root_usage_id
class XBlockActionHandler(utils.BaseHandler):
def _handle_request(self):
def fix_ajax_request_body(body):
# The XBlock ajax clients send JSON strings in the POST body, but if
# the content-type is not explicitly set to application/json then
# the handler receives name=value pairs in url-encoded
# strings.
return urllib.unquote(
body[:-1]) if body and body[-1] == '=' else body
student_id = get_enrolled_user_id_or_guest_user_id(self)
token = self.request.get('xsrf_token')
if not utils.XsrfTokenManager.is_xsrf_token_valid(
token, XBLOCK_XSRF_TOKEN_NAME):
self.error(400)
return
usage_id = self.request.get('usage')
handler_name = self.request.get('handler')
rt = Runtime(self, student_id=student_id)
block = rt.get_block(usage_id)
self.request.body = fix_ajax_request_body(self.request.body)
response = block.runtime.handle(block, handler_name, self.request)
self.response.body = response.body
self.response.headers.update(response.headers)
def get(self):
self._handle_request()
def post(self):
self._handle_request()
# Data model section
class RootUsageEntity(m_models.BaseEntity):
"""Datastore entiry for root usage objects.
Application code should not access this object direct. Use RootUsageDto
and RootUsageDao instead.
"""
data = db.TextProperty(indexed=False)
class RootUsageDto(object):
"""A root usage identifies the root of a tree of XBlocks.
Application code should use this data transfer object (DTO) class and the
associated DAO to interact with the datastore.
"""
def __init__(self, the_id, the_dict):
self.id = the_id
self.dict = the_dict
@property
def description(self):
return self.dict.get('description', '')
@property
def usage_id(self):
return self.dict.get('usage_id', '')
@property
def is_imported(self):
"""Whether the usage was created as an import of an archive file.
Imported root usage entities are wiped and re-inserted when a new
archive is merged in; non-imported entities are left alone.
Returns:
bool. Whether the usage was created as part of an import.
"""
return self.dict.get('is_imported', False)
class RootUsageDao(m_models.BaseJsonDao):
"""DAO for CRUD operations on root usage objects."""
DTO = RootUsageDto
ENTITY = RootUsageEntity
# XBlock editor section
EDITOR_HANDLERS = ['add_xblock', 'edit_xblock', 'import_xblock']
_orig_get_template = dashboard.DashboardHandler.get_template
def _get_template(the_dashboard, template_name, dirs):
return _orig_get_template(
the_dashboard, template_name, dirs + [os.path.join(
appengine_config.BUNDLE_ROOT, 'modules', 'xblock_module')])
def _add_editor_to_dashboard():
for handler in EDITOR_HANDLERS:
dashboard.DashboardHandler.get_actions.append(handler)
setattr(
dashboard.DashboardHandler, 'get_%s' % handler,
globals()['_get_%s' % handler])
setattr(dashboard.DashboardHandler, 'get_template', _get_template)
dashboard.DashboardHandler.contrib_asset_listers.append(list_xblocks)
dashboard.DashboardHandler.child_routes.append(
[XBlockEditorRESTHandler.URI, XBlockEditorRESTHandler])
dashboard.DashboardHandler.child_routes.append(
[XBlockArchiveRESTHandler.URI, XBlockArchiveRESTHandler])
dashboard.DashboardHandler.child_routes.append(
[XBlockArchiveProgressQueryHandler.URI, XBlockArchiveProgressQueryHandler])
def _remove_editor_from_dashboard():
for handler in EDITOR_HANDLERS:
dashboard.DashboardHandler.get_actions.remove(handler)
delattr(dashboard.DashboardHandler, 'get_%s' % handler)
setattr(dashboard.DashboardHandler, 'get_template', _orig_get_template)
dashboard.DashboardHandler.contrib_asset_listers.remove(list_xblocks)
dashboard.DashboardHandler.child_routes.remove(
[XBlockEditorRESTHandler.URI, XBlockEditorRESTHandler])
dashboard.DashboardHandler.child_routes.remove(
[XBlockArchiveRESTHandler.URI, XBlockArchiveRESTHandler])
dashboard.DashboardHandler.child_routes.remove(
[XBlockArchiveProgressQueryHandler.URI, XBlockArchiveProgressQueryHandler])
def list_xblocks(the_dashboard):
"""Prepare a list of the root XBlock usages installed."""
if not filer.is_editable_fs(the_dashboard.app_context):
return safe_dom.NodeList()
output = safe_dom.NodeList()
import_button_text = 'Import'
if courses.Course(the_dashboard).get_units():
import_button_text = 'Merge'
output.append(
safe_dom.Element(
'a', className='gcb-button gcb-pull-right',
href='dashboard?action=import_xblock'
).add_text(import_button_text)
)
output.append(
safe_dom.Element(
'a', className='gcb-button gcb-pull-right',
href='dashboard?action=add_xblock'
).add_text('Add XBlock')
).append(
safe_dom.Element('div', style='clear: both; padding-top: 2px;')
).append(safe_dom.Element('h3').add_text('XBlocks'))
root_usages = sorted(
RootUsageDao.get_all(), key=lambda x: x.description.lower())
if root_usages:
ol = safe_dom.Element('ol')
for root_usage in root_usages:
edit_url = 'dashboard?action=edit_xblock&key=%s' % root_usage.id
li = safe_dom.Element('li')
li.add_text(root_usage.description).add_child(
safe_dom.Entity(' ')
).add_child(
safe_dom.Element('a', href=edit_url).add_text('[Edit]'))
ol.add_child(li)
output.append(ol)
else:
output.append(safe_dom.Element('blockquote').add_text('< none >'))
return output
def _render_editor(the_dashboard, key=None, title=None, description=None):
key = key or ''
rest_url = the_dashboard.canonicalize_url(XBlockEditorRESTHandler.URI)
exit_url = the_dashboard.canonicalize_url('/dashboard?action=assets')
delete_url = None
if key:
delete_url = '%s?%s' % (
the_dashboard.canonicalize_url(XBlockEditorRESTHandler.URI),
urllib.urlencode({
'key': key,
'xsrf_token': cgi.escape(the_dashboard.create_xsrf_token(
XBlockEditorRESTHandler.XSRF_TOKEN))}))
main_content = oeditor.ObjectEditor.get_html_for(
the_dashboard,
XBlockEditorRESTHandler.SCHEMA.get_json_schema(),
XBlockEditorRESTHandler.SCHEMA.get_schema_dict(),
key, rest_url, exit_url,
delete_url=delete_url, delete_method='delete',
required_modules=XBlockEditorRESTHandler.REQUIRED_MODULES)
template_values = {
'page_title': the_dashboard.format_title(title),
'page_title_linked': the_dashboard.format_title(title, as_link=True),
'page_description': description,
'main_content': main_content}
the_dashboard.render_page(template_values)
def _get_add_xblock(the_dashboard):
_render_editor(
the_dashboard, title=messages.ADD_XBLOCK_TITLE,
description=messages.ADD_XBLOCK_DESCRIPTION)
def _get_edit_xblock(the_dashboard):
_render_editor(
the_dashboard, key=the_dashboard.request.get('key'),
title=messages.EDIT_XBLOCK_TITLE,
description=messages.EDIT_XBLOCK_DESCRIPTION)
def _get_import_xblock(the_dashboard):
"""Render the screen for uploading an XBlock course tar.gx file."""
rest_url = the_dashboard.canonicalize_url(XBlockArchiveRESTHandler.URI)
exit_url = the_dashboard.canonicalize_url('/dashboard?action=assets')
extra_js_files = []
extra_js_files.append('resources/import.js')
if courses.Course(the_dashboard).get_units():
extra_js_files.append('resources/merge.js')
main_content = oeditor.ObjectEditor.get_html_for(
the_dashboard,
XBlockArchiveRESTHandler.SCHEMA.get_json_schema(),
XBlockArchiveRESTHandler.SCHEMA.get_schema_dict(),
None, rest_url, exit_url,
delete_url=None,
auto_return=False,
save_method='upload',
save_button_caption='Import',
required_modules=XBlockArchiveRESTHandler.REQUIRED_MODULES,
extra_css_files=['resources/import.css'],
extra_js_files=extra_js_files)
template_values = {
'page_title': messages.IMPORT_COURSE_PAGE_TITLE,
'page_description': messages.IMPORT_COURSE_PAGE_DESCRIPTION,
'main_content': main_content}
the_dashboard.render_page(template_values)
class XBlockEditorRESTHandler(utils.BaseRESTHandler):
URI = '/rest/xblock'
SCHEMA = schema_fields.FieldRegistry('XBlock', description='XBlock XML')
SCHEMA.add_property(
schema_fields.SchemaField('xml', 'XML', 'text', optional=True))
SCHEMA.add_property(
schema_fields.SchemaField(
'description', 'Description', 'string', optional=True,
description=messages.XBLOCK_DESCRIPTION_FIELD))
REQUIRED_MODULES = []
XSRF_TOKEN = 'xblock-edit'
def get(self):
key = self.request.get('key')
if not unit_lesson_editor.CourseOutlineRights.can_view(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
payload_dict = {'xml': '', 'description': ''}
if key:
root_usage = RootUsageDao.load(key)
rt = Runtime(self, is_admin=True)
block = rt.get_block(root_usage.usage_id)
xml_buffer = StringIO()
rt.export_to_xml(block, xml_buffer)
payload_dict = {
'xml': xml_buffer.getvalue(),
'description': root_usage.description}
transforms.send_json_response(
self, 200, 'Success',
payload_dict=payload_dict,
xsrf_token=utils.XsrfTokenManager.create_xsrf_token(
self.XSRF_TOKEN))
def import_and_validate(self, key, unvalidated_dict):
errors = []
try:
validated_dict = transforms.json_to_dict(
unvalidated_dict, self.SCHEMA.get_json_schema_dict())
except ValueError as err:
errors.append(str(err))
return (None, errors)
if not validated_dict.get('description'):
errors.append('Missing description field')
descriptions = {
root.description for root in RootUsageDao.get_all()
if not key or root.id != long(key)}
if validated_dict['description'] in descriptions:
errors.append(
'The description must be different from existing XBlocks.')
if not validated_dict.get('xml'):
errors.append('Missing XML data')
return validated_dict, errors
@ndb.toplevel
def put(self):
request = transforms.loads(self.request.get('request'))
key = request.get('key') or None
if not self.assert_xsrf_token_or_fail(
request, self.XSRF_TOKEN, {'key': key}):
return
if not unit_lesson_editor.CourseOutlineRights.can_edit(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
payload, errors = self.import_and_validate(
key, transforms.loads(request.get('payload')))
if errors:
self.validation_error('\n'.join(errors), key=key)
return
try:
rt = Runtime(self, is_admin=True)
usage_id = rt.parse_xml_string(
unicode(payload['xml']).encode('utf_8'), None)
except Exception as e: # pylint: disable=broad-except
transforms.send_json_response(self, 412, str(e))
return
root_usage = RootUsageDto(
key, {'description': payload['description'], 'usage_id': usage_id})
key = RootUsageDao.save(root_usage)
transforms.send_json_response(
self, 200, 'Saved.', payload_dict={'key': key})
def delete(self):
key = self.request.get('key')
if not self.assert_xsrf_token_or_fail(
self.request, self.XSRF_TOKEN, {'key': key}):
return
if not unit_lesson_editor.CourseOutlineRights.can_edit(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
# TODO(jorr): Remove the tree from the UsageStore?
RootUsageDao.delete(RootUsageDto(key, {}))
transforms.send_json_response(self, 200, 'Deleted.')
class XBlockArchiveRESTHandler(utils.BaseRESTHandler):
"""Provide the REST API for importing XBlock archives."""
URI = '/rest/xblock_archive'
SCHEMA = schema_fields.FieldRegistry('XBlock', description='XBlock XML')
SCHEMA.add_property(
schema_fields.SchemaField(
'file', 'File', 'string', optional=True,
description=messages.XBLOCK_ARCHIVE_FIELD,
extra_schema_dict_values={'_type': 'file'}))
SCHEMA.add_property(
schema_fields.SchemaField(
'dry_run', 'Dry Run', 'boolean', optional=True,
description=messages.XBLOCK_ARCHIVE_DRY_RUN))
REQUIRED_MODULES = ['inputex-file', 'io-upload-iframe', 'inputex-checkbox']
XSRF_TOKEN = 'xblock-import'
def get(self):
"""Provide empty inital content for import editor."""
transforms.send_json_response(
self, 200, 'Success',
payload_dict={
'file': '',
'upload_url': blobstore.create_upload_url(
self.canonicalize_url(self.URI)),
'poller_url': self.canonicalize_url(
XBlockArchiveProgressQueryHandler.URI)
},
xsrf_token=utils.XsrfTokenManager.create_xsrf_token(
self.XSRF_TOKEN))
def post(self):
assert courses.is_editable_fs(self.app_context)
request = transforms.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(
request, self.XSRF_TOKEN, {'key': ''}):
return
if (not unit_lesson_editor.CourseOutlineRights.can_edit(self) or
not filer.FilesRights.can_add(self)):
transforms.send_file_upload_response(
self, 401, 'Access denied.')
return
try:
payload = transforms.json_to_dict(
transforms.loads(request.get('payload')),
self.SCHEMA.get_json_schema_dict())
except ValueError as err:
transforms.send_file_upload_response(self, 412, str(err))
return
dry_run = payload.get('dry_run', False)
upload = self.request.POST['file']
if not isinstance(upload, cgi.FieldStorage):
transforms.send_file_upload_response(
self, 403, 'No file specified.')
return
blob_key = blobstore.parse_blob_info(upload).key()
XBlockArchiveJob(
self.app_context, blob_key=blob_key, dry_run=dry_run).submit()
# Pass a new upload url back to the page for future uploads
new_upload_url = blobstore.create_upload_url(
self.canonicalize_url(self.URI))
transforms.send_file_upload_response(
self, 200, 'Processing upload...',
payload_dict={'new_upload_url': new_upload_url})
class XBlockArchiveJob(jobs.DurableJob):
"""The offline job which handles installing an uploaded archive file."""
def __init__(self, app_context, blob_key=None, dry_run=True):
super(XBlockArchiveJob, self).__init__(app_context)
self.app_context = app_context
self.blob_key = blob_key
self.dry_run = dry_run
@ndb.toplevel
def run(self):
def status(success_flag, message):
return {
'success': success_flag,
'message': message}
blob_info = blobstore.BlobInfo.get(self.blob_key)
try:
fileobj = blobstore.BlobReader(
self.blob_key, buffer_size=1024 * 1024)
archive = tarfile.open(fileobj=fileobj, mode='r:gz')
except Exception as e: # pylint: disable=broad-except
return status(False, 'Unable to read the archive file: %s' % e)
try:
course = courses.Course(None, app_context=self.app_context)
rt = Runtime(self, is_admin=True)
journal = []
importer = Importer(
archive=archive, course=course, fs=self.app_context.fs.impl,
rt=rt, dry_run=self.dry_run, journal=journal)
importer.parse()
validation_errors = importer.validate()
if validation_errors:
return status(
False, 'Import failed: %s' % '\n'.join(validation_errors))
importer.do_import()
if self.dry_run:
return status(
True,
'Upload successfully validated:\n%s' % '\n'.join(journal))
course.save()
except Exception as e: # pylint: disable=broad-except
logging.exception('Import failed')
return status(False, 'Import failed: %s' % e)
finally:
archive.close()
return status(
True, 'Upload successfully imported:\n%s' % '\n'.join(journal))
class XBlockArchiveProgressQueryHandler(utils.BaseRESTHandler):
"""A handler to respond to Ajax polling on the progress of the import."""
URI = '/rest/xblock_archive_progress'
def get(self):
job = XBlockArchiveJob(self.app_context)
if job.is_active():
payload_dict = {'complete': False}
else:
payload_dict = {
'complete': True,
'output': job.load().output}
transforms.send_json_response(
self, 200, 'Polling', payload_dict=payload_dict)
class BadImportException(Exception):
"""Exception raised when in Importer."""
pass
class Differ(object):
"""Base class for tracking the difference between two lists of objects.
The types of object in the two lists need not be the same, and so subclasses
must implements methods which extract an 'id' from members of the 'old list'
and the 'new list'. The result will be three classes:
unbound: the set of objects in the old list whioch have no ids.
bindings: a dict of mappings of ids from the new list to objects with the
same id in the old list.
orphans: the set of objects in the old list which have ids but are do not
correspond to the ids of any elements in the new list.
"""
def __init__(self, new_objects, old_objects):
self.unbound = set()
self._new_ids = set()
self.bindings = {}
self.orphans = set()
for new in new_objects:
_id = self.get_new_id(new)
assert _id
self._new_ids.add(_id)
for old in old_objects:
_id = self.get_old_id(old)
if not _id:
self.unbound.add(old)
elif _id in self._new_ids:
self.bindings[_id] = old
else:
self.orphans.add(old)
def get_new_id(self, new):
raise NotImplementedError()
def get_old_id(self, old):
raise NotImplementedError()
def bind(self, new, old):
raise NotImplementedError()
class Sequential2LessonMapper(Differ):
"""A class that handles mapping sequentials to lessons."""
def __init__(self, importer, chapter, unit):
super(Sequential2LessonMapper, self).__init__(
chapter, importer.course.get_lessons(unit.unit_id))
def get_new_id(self, sequential):
return sequential.attrib['usage_id']
def get_old_id(self, lesson):
return lesson.properties.get('xblock.usage_id')
def bind(self, sequential, lesson):
lesson.properties['xblock.usage_id'] = sequential.attrib['usage_id']
class Chapter2UnitMapper(Differ):
"""A class that handles mapping chapters to units."""
def __init__(self, importer):
super(Chapter2UnitMapper, self).__init__(
importer.course_root, importer.course.get_units())
def get_new_id(self, chapter):
return chapter.attrib['usage_id']
def get_old_id(self, unit):
return unit.properties.get('xblock.usage_id')
def bind(self, chapter, unit):
unit.properties['xblock.usage_id'] = chapter.attrib['usage_id']
class Importer(object):
"""Manages the import of an XBlock archive file."""
def __init__(
self, archive=None, course=None, fs=None, rt=None, dry_run=False,
journal=None):
self.archive = archive
self.course = course
self.fs = fs
self.rt = rt
self.dry_run = dry_run
self.base = self._get_base_folder_name()
self.course_root = None
self.journal = journal if journal is not None else []
def parse(self):
"""Assemble the XML files in the archive into a single DOM."""
course_file = self.archive.extractfile('%s/course.xml' % self.base)
self.course_root = self._walk_tree(
cElementTree.parse(course_file).getroot())
def validate(self):
"""Check that the course structure is compatible with CB."""
errors = []
# the root must be a course
if self.course_root.tag != 'course':
errors.append('There is no root course tag.')
# The immediate children must be chapters
for child in self.course_root:
if child.tag != 'chapter':
errors.append('All content must be in chapters.')
break
# The grandchildren must be sequentials
for grandchild in child:
if grandchild.tag != 'sequential':
errors.append('Chapters may only contain sequentials.')
break
return errors
def _update_unit(self, chapter, unit):
new_title = chapter.attrib['display_name']
old_title = unit.title
unit.title = new_title
self.journal.append('Update unit title from \'%s\' to \'%s\'' % (
old_title, new_title))
def _create_unit(self, chapter):
assert chapter.tag == 'chapter'
unit = self.course.add_unit()
unit.title = chapter.attrib['display_name']
self.journal.append('Create unit \'%s\'' % unit.title)
return unit
def _update_lesson(self, sequential, lesson):
new_title = sequential.attrib['display_name']
old_title = lesson.title
lesson.title = new_title
self.journal.append('Update lesson title from \'%s\' to \'%s\'' % (
old_title, new_title))
def _create_lesson(self, sequential, unit):
assert sequential.tag == 'sequential'
lesson = self.course.add_lesson(unit)
lesson.title = sequential.attrib['display_name']
self.journal.append('Create lesson \'%s\'' % lesson.title)
return lesson
def _update_lesson_xblock_content(self, sequential, unit, lesson):
xml_buffer = StringIO()
cElementTree.ElementTree(element=sequential).write(xml_buffer)
orig_xml_buff = StringIO()
new_xml_buff = StringIO()
# Get the original XML repr of this sequential for comparison
usage_id = sequential.attrib['usage_id']
try:
orig_xml = self.rt.get_block(usage_id)
self.rt.export_to_xml(orig_xml, orig_xml_buff)
except xblock.exceptions.NoSuchUsage:
pass # Buffer will be empty
usage_id = self.rt.parse_xml_string(
xml_buffer.getvalue(), None, orig_xml_str=orig_xml_buff.getvalue(),
dry_run=self.dry_run, log=new_xml_buff)
# Journal the effect of the update
if orig_xml_buff.getvalue() == new_xml_buff.getvalue():
action = 'unchanged'
elif not orig_xml_buff.getvalue():
action = 'inserted'
else:
action = 'updated'
self.journal.append(
'XBlock content %(action)s in \'%(title)s\' (%(id)s)' % {
'action': action, 'title': lesson.title, 'id': usage_id})
# Insert a RootUsageEntity to link the lesson to the XBlock
description = 'Unit %s, Lesson %s: %s' % (
unit.index, lesson.index, lesson.title)
root_usage = RootUsageDto(
None, {
'description': description,
'usage_id': usage_id,
'is_imported': True})
root_id = RootUsageDao.save(root_usage) if not self.dry_run else 'xxx'
# insert the xblock asset into lesson content
lesson.objectives = '<xblock root_id="%s"></xblock>' % root_id
def _delete_all_imported_root_usage_dtos(self):
dao = RootUsageDao()
for dto in RootUsageDao.get_all():
if dto.is_imported:
dao.delete(dto)
def do_import(self):
"""Perform the import and create resources in CB."""
finalize_writes_callback = self._import_static_files()
if not self.dry_run:
self._delete_all_imported_root_usage_dtos()
cu_mapper = Chapter2UnitMapper(self)
for chapter in self.course_root:
chapter_usage_id = chapter.attrib['usage_id']
unit = cu_mapper.bindings.get(chapter_usage_id)
if unit:
self._update_unit(chapter, unit)
else:
unit = self._create_unit(chapter)
cu_mapper.bind(chapter, unit)
sl_mapper = Sequential2LessonMapper(self, chapter, unit)
for sequential in chapter:
sequential_usage_id = sequential.attrib['usage_id']
lesson = sl_mapper.bindings.get(sequential_usage_id)
if lesson:
self._update_lesson(sequential, lesson)
else:
lesson = self._create_lesson(sequential, unit)
sl_mapper.bind(sequential, lesson)
self._update_lesson_xblock_content(sequential, unit, lesson)
for lesson in sl_mapper.orphans:
self.journal.append('Delete lesson \'%s\'' % lesson.title)
self.course.delete_lesson(lesson)
for unit in cu_mapper.orphans:
self.journal.append('Delete unit \'%s\'' % unit.title)
self.course.delete_unit(unit)
# Wait for async db operations to complete
finalize_writes_callback()
def _get_base_folder_name(self):
for member in self.archive.getmembers():
if member.isdir() and '/' not in member.name:
return member.name
return None
def _walk_tree(self, node):
if 'url_name' in node.attrib:
# If the node refers to another file. open it and merge it in
target_path = '%s/%s/%s.xml' % (
self.base, node.tag, node.attrib['url_name'])
target_file = self.archive.extractfile(target_path)
sub_tree = self._walk_tree(
cElementTree.parse(target_file).getroot())
sub_tree.attrib['usage_id'] = node.attrib['url_name']
return sub_tree
elif node.tag == 'html':
if 'filename' in node.attrib:
# If the node is an <html/> block with externalized content,
# read it in.
target_path = '%s/html/%s.html' % (
self.base, node.attrib['filename'])
target_file = self.archive.extractfile(target_path)
node.append(tags.html_string_to_element_tree(
target_file.read().decode('utf8')))
del node.attrib['filename']
self._rebase_html_refs(node)
return node
else:
for index, child in enumerate(node):
new_child = self._walk_tree(child)
node.remove(child)
node.insert(index, new_child)
return node
def _rebase_html_refs(self, node):
"""Rebase HTML references based on /static to use CB namespace."""
for attr in ['href', 'src']:
if node.attrib.get(attr, '').startswith('/static/'):
node.attrib[attr] = 'assets/img%s' % node.attrib[attr]
for child in node:
self._rebase_html_refs(child)
def _import_static_files(self):
filedata_list = []
for member in self.archive.getmembers():
if member.isfile() and member.name.startswith(
'%s/static/' % self.base):
self._insert_filedata(filedata_list, member)
return self.fs.put_multi_async(filedata_list)
def _insert_filedata(self, filedata_list, member):
"""Extract the tarfile member into /assets/img/static."""
ph_path = '/assets/img/%s' % member.name[len(self.base) + 1:]
path = self.fs.physical_to_logical(ph_path)
if self.fs.isfile(path):
self.journal.append('Updating file \'%s\'' % ph_path)
else:
self.journal.append('Inserting file \'%s\'' % ph_path)
if member.size > MAX_ASSET_UPLOAD_SIZE_K * 1024:
raise BadImportException(
'Cannot upload files bigger than %s K' %
MAX_ASSET_UPLOAD_SIZE_K)
if self.dry_run:
return
filedata_list.append((path, self.archive.extractfile(member)))
# XBlock component tag section
GUEST_USER_SESSION_COOKIE = 'cb-guest-session'
GUEST_USER_SESSION_COOKIE_MAX_AGE_SEC = 48 * 60 * 60 # 48 hours
def get_session_id_for_guest_user(handler):
session_cookie = handler.request.cookies.get(
GUEST_USER_SESSION_COOKIE, '')
# If the session cookie is missing or invalid, generate a new one
if not re.match('^[0-9a-f]{32}$', session_cookie):
session_cookie = uuid.uuid4().hex
handler.response.set_cookie(
GUEST_USER_SESSION_COOKIE, session_cookie,
max_age=GUEST_USER_SESSION_COOKIE_MAX_AGE_SEC)
return 'guest-%s' % session_cookie
def get_enrolled_user_id_or_guest_user_id(handler):
"""Return a workable user id in every case.
If there is a user in session who has registered for the course, then return
their user id. Otherwise return a guest user id.
Args:
handler: BaseHandler. The request handler for the user session.
Returns:
string. A user ID.
"""
user = handler.get_user()
if user is None:
return get_session_id_for_guest_user(handler)
elif m_models.Student.get_enrolled_student_by_email(user.email()) is None:
return get_session_id_for_guest_user(handler)
else:
return str(user.user_id())
class XBlockTag(tags.ContextAwareTag):
binding_name = 'xblock'
@classmethod
def name(cls):
return 'Embedded XBlocks'
@classmethod
def vendor(cls):
return 'google'
def get_icon_url(self):
return RESOURCES_URI + '/xblock.png'
def get_schema(self, unused_handler):
"""Get the schema for specifying the question."""
root_list = [
(unicode(root.id), root.description)
for root in RootUsageDao.get_all()]
root_list.sort(key=lambda x: x[1].lower())
if not root_list:
return self.unavailable_schema('No XBlocks available')
reg = schema_fields.FieldRegistry('XBlocks')
reg.add_property(schema_fields.SchemaField(
'root_id', messages.XBLOCK_INSTANCE, 'string', optional=True,
select_data=root_list))
return reg
def render(self, node, context):
root_id = node.attrib.get('root_id')
usage_id = RootUsageDao.load(root_id).usage_id
student_id = get_enrolled_user_id_or_guest_user_id(context.handler)
runtime = Runtime(context.handler, student_id=student_id)
block = runtime.get_block(usage_id)
fragment = runtime.render(block, 'student_view')
fragment_list = context.env.get('fragment_list')
if fragment_list is None:
fragment_list = []
context.env['fragment_list'] = fragment_list
fragment_list.append(fragment)
return tags.html_string_to_element_tree(
'<div>%s</div>' % fragment.body_html())
def rollup_header_footer(self, context):
wrapper = xblock.fragment.Fragment()
for frag in context.env.get('fragment_list', []):
wrapper.add_frag_resources(frag)
return (
tags.html_string_to_element_tree(
'<div>%s</div>' % wrapper.head_html()),
tags.html_string_to_element_tree(
'<div>%s</div>' % wrapper.foot_html()))
class XBlockResourcesHandler(tags.ResourcesHandler):
"""Resource handler to serve static files from XBlock workbench."""
def rebase_path(self, path):
assert path.startswith(XBLOCK_RESOURCES_URI)
return os.path.join(
WORKBENCH_STATIC_PATH,
os.path.normpath(path[len(XBLOCK_RESOURCES_URI) + 1:]))
class XBlockLocalResourceHandler(webapp2.RequestHandler):
"""Router for requests for a block's local resources."""
def get(self, block_type, resource):
xblock_class = xblock.core.XBlock.load_class(block_type)
mimetype = mimetypes.guess_type(resource)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
self.response.status = 200
self.response.headers['Content-Type'] = mimetype
self.response.cache_control.no_cache = None
self.response.cache_control.public = 'public'
self.response.cache_control.max_age = 600
self.response.write(xblock_class.open_local_resource(resource).read())
# Data sanitization section
XBLOCK_EVENT_EXPORT_WHITELIST = {
'sequential', 'video', 'cbquestion', 'html', 'vertical'}
_orig_event_entity_for_export = None
def _set_new_event_entity_for_export_method():
"""Register the new for_export method on EventEntity."""
global _orig_event_entity_for_export
_orig_event_entity_for_export = m_models.EventEntity.for_export
m_models.EventEntity.for_export = _event_entity_for_export
def _set_orig_event_entity_for_export_method():
"""Restore the original for_export method on EventEntity."""
global _orig_event_entity_for_export
m_models.EventEntity.for_export = _orig_event_entity_for_export
_orig_event_entity_for_export = None
def _event_entity_for_export(model, transform_fn):
global _orig_event_entity_for_export
model = _orig_event_entity_for_export(model, transform_fn)
if model.source == XBLOCK_EVENT_SOURCE_NAME:
wrapper = transforms.loads(model.data)
if wrapper.get('type') not in XBLOCK_EVENT_EXPORT_WHITELIST:
model.data = transforms.dumps({
'usage': wrapper.get('usage'),
'type': wrapper.get('type'),
'event': transform_fn(transforms.dumps(wrapper.get('event')))
})
elif model.source == XBLOCK_TAG_EVENT_SOURCE_NAME:
wrapper = transforms.loads(model.data)
model.data = transforms.dumps({
'event': wrapper.get('event'),
'message': transform_fn(wrapper.get('message')),
'location': wrapper.get('location'),
'data': transform_fn(transforms.dumps(wrapper.get('data')))})
return model
# Module registration section
custom_module = None
def register_module():
"""Registers this module for use."""
def on_module_disabled():
_remove_editor_from_dashboard()
tags.Registry.remove_tag_binding(XBlockTag.binding_name)
for entity in [
dbmodels.DefinitionEntity, dbmodels.UsageEntity,
dbmodels.KeyValueEntity, RootUsageEntity]:
courses.COURSE_CONTENT_ENTITIES.remove(entity)
_set_orig_event_entity_for_export_method()
def on_module_enabled():
_add_editor_to_dashboard()
tags.Registry.add_tag_binding(
XBlockTag.binding_name, XBlockTag)
if not django.conf.settings.configured:
django.conf.settings.configure(
TEMPLATE_DIRS=[XBLOCK_TEMPLATES_PATH])
courses.COURSE_CONTENT_ENTITIES += [
dbmodels.DefinitionEntity, dbmodels.UsageEntity,
dbmodels.KeyValueEntity, RootUsageEntity]
_set_new_event_entity_for_export_method()
global_routes = [
(RESOURCES_URI + '/.*', tags.ResourcesHandler),
(XBLOCK_RESOURCES_URI + '/.*', XBlockResourcesHandler),
(
XBLOCK_LOCAL_RESOURCES_URI + r'/([^/]*)/(.*)',
XBlockLocalResourceHandler),
(MATHJAX_URI + '/(fonts/.*)', sites.make_zip_handler(os.path.join(
appengine_config.BUNDLE_ROOT, 'lib', 'MathJax-fonts.zip'))),
(MATHJAX_URI + '/(.*)', sites.make_zip_handler(os.path.join(
appengine_config.BUNDLE_ROOT, 'lib', 'MathJax.zip')))]
namespaced_routes = [(HANDLER_URI, XBlockActionHandler)]
global custom_module
custom_module = custom_modules.Module(
'Support for XBlocks within Course Builder',
'Adds the ability to use XBlock content within Course Builder.',
global_routes, namespaced_routes,
notify_module_disabled=on_module_disabled,
notify_module_enabled=on_module_enabled,
)
return custom_module<|fim▁end|>
|
from controllers import utils
import dbmodels
|
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package server
import (
"crypto/tls"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"runtime"
"strconv"
"strings"
"time"
"github.com/gorilla/mux"
"golang.org/x/net/websocket"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/api"
"github.com/docker/docker/api/types"
"github.com/docker/docker/autogen/dockerversion"
"github.com/docker/docker/builder"
"github.com/docker/docker/cliconfig"
"github.com/docker/docker/daemon"
"github.com/docker/docker/graph"
"github.com/docker/docker/pkg/ioutils"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/pkg/parsers"
"github.com/docker/docker/pkg/parsers/filters"
"github.com/docker/docker/pkg/parsers/kernel"
"github.com/docker/docker/pkg/signal"
"github.com/docker/docker/pkg/sockets"
"github.com/docker/docker/pkg/stdcopy"
"github.com/docker/docker/pkg/streamformatter"
"github.com/docker/docker/pkg/version"
"github.com/docker/docker/runconfig"
"github.com/docker/docker/utils"
)
type ServerConfig struct {
Logging bool
EnableCors bool
CorsHeaders string
Version string
SocketGroup string
TLSConfig *tls.Config
}
type Server struct {
daemon *daemon.Daemon
cfg *ServerConfig
router *mux.Router
start chan struct{}
servers []serverCloser
}
func New(cfg *ServerConfig) *Server {
srv := &Server{
cfg: cfg,
start: make(chan struct{}),
}
r := createRouter(srv)
srv.router = r
return srv
}
func (s *Server) Close() {
for _, srv := range s.servers {
if err := srv.Close(); err != nil {
logrus.Error(err)
}
}
}
type serverCloser interface {
Serve() error
Close() error
}
// ServeApi loops through all of the protocols sent in to docker and spawns
// off a go routine to setup a serving http.Server for each.
func (s *Server) ServeApi(protoAddrs []string) error {
var chErrors = make(chan error, len(protoAddrs))
for _, protoAddr := range protoAddrs {
protoAddrParts := strings.SplitN(protoAddr, "://", 2)
if len(protoAddrParts) != 2 {
return fmt.Errorf("bad format, expected PROTO://ADDR")
}
srv, err := s.newServer(protoAddrParts[0], protoAddrParts[1])
if err != nil {
return err
}
s.servers = append(s.servers, srv...)
for _, s := range srv {
logrus.Infof("Listening for HTTP on %s (%s)", protoAddrParts[0], protoAddrParts[1])
go func(s serverCloser) {
if err := s.Serve(); err != nil && strings.Contains(err.Error(), "use of closed network connection") {
err = nil
}
chErrors <- err
}(s)
}
}
for i := 0; i < len(protoAddrs); i++ {
err := <-chErrors
if err != nil {
return err
}
}
return nil
}
type HttpServer struct {
srv *http.Server
l net.Listener
}
func (s *HttpServer) Serve() error {
return s.srv.Serve(s.l)
}
func (s *HttpServer) Close() error {
return s.l.Close()
}
type HttpApiFunc func(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error
func hijackServer(w http.ResponseWriter) (io.ReadCloser, io.Writer, error) {
conn, _, err := w.(http.Hijacker).Hijack()
if err != nil {
return nil, nil, err
}
// Flush the options to make sure the client sets the raw mode
conn.Write([]byte{})
return conn, conn, nil
}
func closeStreams(streams ...interface{}) {
for _, stream := range streams {
if tcpc, ok := stream.(interface {
CloseWrite() error
}); ok {
tcpc.CloseWrite()
} else if closer, ok := stream.(io.Closer); ok {
closer.Close()
}
}
}
// Check to make sure request's Content-Type is application/json
func checkForJson(r *http.Request) error {
ct := r.Header.Get("Content-Type")
// No Content-Type header is ok as long as there's no Body
if ct == "" {
if r.Body == nil || r.ContentLength == 0 {
return nil
}
}
// Otherwise it better be json
if api.MatchesContentType(ct, "application/json") {
return nil
}
return fmt.Errorf("Content-Type specified (%s) must be 'application/json'", ct)
}
//If we don't do this, POST method without Content-type (even with empty body) will fail
func parseForm(r *http.Request) error {
if r == nil {
return nil
}
if err := r.ParseForm(); err != nil && !strings.HasPrefix(err.Error(), "mime:") {
return err
}
return nil
}
func parseMultipartForm(r *http.Request) error {
if err := r.ParseMultipartForm(4096); err != nil && !strings.HasPrefix(err.Error(), "mime:") {
return err
}
return nil
}
func httpError(w http.ResponseWriter, err error) {
if err == nil || w == nil {
logrus.WithFields(logrus.Fields{"error": err, "writer": w}).Error("unexpected HTTP error handling")
return
}
statusCode := http.StatusInternalServerError
// FIXME: this is brittle and should not be necessary.
// If we need to differentiate between different possible error types, we should
// create appropriate error types with clearly defined meaning.
errStr := strings.ToLower(err.Error())
for keyword, status := range map[string]int{
"not found": http.StatusNotFound,
"no such": http.StatusNotFound,
"bad parameter": http.StatusBadRequest,
"conflict": http.StatusConflict,
"impossible": http.StatusNotAcceptable,
"wrong login/password": http.StatusUnauthorized,
"hasn't been activated": http.StatusForbidden,
} {
if strings.Contains(errStr, keyword) {
statusCode = status
break
}
}
logrus.WithFields(logrus.Fields{"statusCode": statusCode, "err": err}).Error("HTTP Error")
http.Error(w, err.Error(), statusCode)
}
// writeJSON writes the value v to the http response stream as json with standard
// json encoding.
func writeJSON(w http.ResponseWriter, code int, v interface{}) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(code)
return json.NewEncoder(w).Encode(v)
}
func (s *Server) postAuth(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
var config *cliconfig.AuthConfig
err := json.NewDecoder(r.Body).Decode(&config)
r.Body.Close()
if err != nil {
return err
}
status, err := s.daemon.RegistryService.Auth(config)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, &types.AuthResponse{
Status: status,
})
}
func (s *Server) getVersion(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
v := &types.Version{
Version: dockerversion.VERSION,
ApiVersion: api.Version,
GitCommit: dockerversion.GITCOMMIT,
GoVersion: runtime.Version(),
Os: runtime.GOOS,
Arch: runtime.GOARCH,
BuildTime: dockerversion.BUILDTIME,
}
if version.GreaterThanOrEqualTo("1.19") {
v.Experimental = utils.ExperimentalBuild()
}
if kernelVersion, err := kernel.GetKernelVersion(); err == nil {
v.KernelVersion = kernelVersion.String()
}
return writeJSON(w, http.StatusOK, v)
}
func (s *Server) postContainersKill(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := parseForm(r); err != nil {
return err
}
var sig uint64
name := vars["name"]
// If we have a signal, look at it. Otherwise, do nothing
if sigStr := r.Form.Get("signal"); sigStr != "" {
// Check if we passed the signal as a number:
// The largest legal signal is 31, so let's parse on 5 bits
sigN, err := strconv.ParseUint(sigStr, 10, 5)
if err != nil {
// The signal is not a number, treat it as a string (either like
// "KILL" or like "SIGKILL")
syscallSig, ok := signal.SignalMap[strings.TrimPrefix(sigStr, "SIG")]
if !ok {
return fmt.Errorf("Invalid signal: %s", sigStr)
}
sig = uint64(syscallSig)
} else {
sig = sigN
}
if sig == 0 {
return fmt.Errorf("Invalid signal: %s", sigStr)
}
}
if err := s.daemon.ContainerKill(name, sig); err != nil {
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainersPause(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := parseForm(r); err != nil {
return err
}
if err := s.daemon.ContainerPause(vars["name"]); err != nil {
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainersUnpause(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := parseForm(r); err != nil {
return err
}
if err := s.daemon.ContainerUnpause(vars["name"]); err != nil {
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) getContainersExport(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
return s.daemon.ContainerExport(vars["name"], w)
}
func (s *Server) getImagesJSON(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
imagesConfig := graph.ImagesConfig{
Filters: r.Form.Get("filters"),
// FIXME this parameter could just be a match filter
Filter: r.Form.Get("filter"),
All: boolValue(r, "all"),
}
images, err := s.daemon.Repositories().Images(&imagesConfig)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, images)
}
func (s *Server) getInfo(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
info, err := s.daemon.SystemInfo()
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, info)
}
func (s *Server) getEvents(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
var since int64 = -1
if r.Form.Get("since") != "" {
s, err := strconv.ParseInt(r.Form.Get("since"), 10, 64)
if err != nil {
return err
}
since = s
}
var until int64 = -1
if r.Form.Get("until") != "" {
u, err := strconv.ParseInt(r.Form.Get("until"), 10, 64)
if err != nil {
return err
}
until = u
}
timer := time.NewTimer(0)
timer.Stop()
if until > 0 {
dur := time.Unix(until, 0).Sub(time.Now())
timer = time.NewTimer(dur)
}
ef, err := filters.FromParam(r.Form.Get("filters"))
if err != nil {
return err
}
isFiltered := func(field string, filter []string) bool {
if len(filter) == 0 {
return false
}
for _, v := range filter {
if v == field {
return false
}
if strings.Contains(field, ":") {
image := strings.Split(field, ":")
if image[0] == v {
return false
}
}
}
return true
}
d := s.daemon
es := d.EventsService
w.Header().Set("Content-Type", "application/json")
enc := json.NewEncoder(ioutils.NewWriteFlusher(w))
getContainerId := func(cn string) string {
c, err := d.Get(cn)
if err != nil {
return ""
}
return c.ID
}
sendEvent := func(ev *jsonmessage.JSONMessage) error {
//incoming container filter can be name,id or partial id, convert and replace as a full container id
for i, cn := range ef["container"] {
ef["container"][i] = getContainerId(cn)
}
if isFiltered(ev.Status, ef["event"]) || isFiltered(ev.From, ef["image"]) ||
isFiltered(ev.ID, ef["container"]) {
return nil
}
return enc.Encode(ev)
}
current, l := es.Subscribe()
if since == -1 {<|fim▁hole|> if ev.Time < since {
continue
}
if err := sendEvent(ev); err != nil {
return err
}
}
var closeNotify <-chan bool
if closeNotifier, ok := w.(http.CloseNotifier); ok {
closeNotify = closeNotifier.CloseNotify()
}
for {
select {
case ev := <-l:
jev, ok := ev.(*jsonmessage.JSONMessage)
if !ok {
continue
}
if err := sendEvent(jev); err != nil {
return err
}
case <-timer.C:
return nil
case <-closeNotify:
logrus.Debug("Client disconnected, stop sending events")
return nil
}
}
}
func (s *Server) getImagesHistory(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
name := vars["name"]
history, err := s.daemon.Repositories().History(name)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, history)
}
func (s *Server) getContainersChanges(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
changes, err := s.daemon.ContainerChanges(vars["name"])
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, changes)
}
func (s *Server) getContainersTop(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := parseForm(r); err != nil {
return err
}
procList, err := s.daemon.ContainerTop(vars["name"], r.Form.Get("ps_args"))
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, procList)
}
func (s *Server) getContainersJSON(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
config := &daemon.ContainersConfig{
All: boolValue(r, "all"),
Size: boolValue(r, "size"),
Since: r.Form.Get("since"),
Before: r.Form.Get("before"),
Filters: r.Form.Get("filters"),
}
if tmpLimit := r.Form.Get("limit"); tmpLimit != "" {
limit, err := strconv.Atoi(tmpLimit)
if err != nil {
return err
}
config.Limit = limit
}
containers, err := s.daemon.Containers(config)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, containers)
}
func (s *Server) getContainersStats(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
stream := boolValueOrDefault(r, "stream", true)
var out io.Writer
if !stream {
w.Header().Set("Content-Type", "application/json")
out = w
} else {
out = ioutils.NewWriteFlusher(w)
}
var closeNotifier <-chan bool
if notifier, ok := w.(http.CloseNotifier); ok {
closeNotifier = notifier.CloseNotify()
}
config := &daemon.ContainerStatsConfig{
Stream: stream,
OutStream: out,
Stop: closeNotifier,
}
return s.daemon.ContainerStats(vars["name"], config)
}
func (s *Server) getContainersLogs(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
// Validate args here, because we can't return not StatusOK after job.Run() call
stdout, stderr := boolValue(r, "stdout"), boolValue(r, "stderr")
if !(stdout || stderr) {
return fmt.Errorf("Bad parameters: you must choose at least one stream")
}
var since time.Time
if r.Form.Get("since") != "" {
s, err := strconv.ParseInt(r.Form.Get("since"), 10, 64)
if err != nil {
return err
}
since = time.Unix(s, 0)
}
var closeNotifier <-chan bool
if notifier, ok := w.(http.CloseNotifier); ok {
closeNotifier = notifier.CloseNotify()
}
logsConfig := &daemon.ContainerLogsConfig{
Follow: boolValue(r, "follow"),
Timestamps: boolValue(r, "timestamps"),
Since: since,
Tail: r.Form.Get("tail"),
UseStdout: stdout,
UseStderr: stderr,
OutStream: ioutils.NewWriteFlusher(w),
Stop: closeNotifier,
}
if err := s.daemon.ContainerLogs(vars["name"], logsConfig); err != nil {
fmt.Fprintf(w, "Error running logs job: %s\n", err)
}
return nil
}
func (s *Server) postImagesTag(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
repo := r.Form.Get("repo")
tag := r.Form.Get("tag")
force := boolValue(r, "force")
name := vars["name"]
if err := s.daemon.Repositories().Tag(repo, tag, name, force); err != nil {
return err
}
s.daemon.EventsService.Log("tag", utils.ImageReference(repo, tag), "")
w.WriteHeader(http.StatusCreated)
return nil
}
func (s *Server) postCommit(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if err := checkForJson(r); err != nil {
return err
}
cname := r.Form.Get("container")
pause := boolValue(r, "pause")
if r.FormValue("pause") == "" && version.GreaterThanOrEqualTo("1.13") {
pause = true
}
c, _, err := runconfig.DecodeContainerConfig(r.Body)
if err != nil && err != io.EOF { //Do not fail if body is empty.
return err
}
commitCfg := &builder.BuilderCommitConfig{
Pause: pause,
Repo: r.Form.Get("repo"),
Tag: r.Form.Get("tag"),
Author: r.Form.Get("author"),
Comment: r.Form.Get("comment"),
Changes: r.Form["changes"],
Config: c,
}
imgID, err := builder.Commit(cname, s.daemon, commitCfg)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, &types.ContainerCommitResponse{
ID: imgID,
})
}
// Creates an image from Pull or from Import
func (s *Server) postImagesCreate(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
var (
image = r.Form.Get("fromImage")
repo = r.Form.Get("repo")
tag = r.Form.Get("tag")
)
authEncoded := r.Header.Get("X-Registry-Auth")
authConfig := &cliconfig.AuthConfig{}
if authEncoded != "" {
authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded))
if err := json.NewDecoder(authJson).Decode(authConfig); err != nil {
// for a pull it is not an error if no auth was given
// to increase compatibility with the existing api it is defaulting to be empty
authConfig = &cliconfig.AuthConfig{}
}
}
var (
err error
output = ioutils.NewWriteFlusher(w)
)
w.Header().Set("Content-Type", "application/json")
if image != "" { //pull
if tag == "" {
image, tag = parsers.ParseRepositoryTag(image)
}
metaHeaders := map[string][]string{}
for k, v := range r.Header {
if strings.HasPrefix(k, "X-Meta-") {
metaHeaders[k] = v
}
}
imagePullConfig := &graph.ImagePullConfig{
MetaHeaders: metaHeaders,
AuthConfig: authConfig,
OutStream: output,
}
err = s.daemon.Repositories().Pull(image, tag, imagePullConfig)
} else { //import
if tag == "" {
repo, tag = parsers.ParseRepositoryTag(repo)
}
src := r.Form.Get("fromSrc")
imageImportConfig := &graph.ImageImportConfig{
Changes: r.Form["changes"],
InConfig: r.Body,
OutStream: output,
}
// 'err' MUST NOT be defined within this block, we need any error
// generated from the download to be available to the output
// stream processing below
var newConfig *runconfig.Config
newConfig, err = builder.BuildFromConfig(s.daemon, &runconfig.Config{}, imageImportConfig.Changes)
if err != nil {
return err
}
imageImportConfig.ContainerConfig = newConfig
err = s.daemon.Repositories().Import(src, repo, tag, imageImportConfig)
}
if err != nil {
if !output.Flushed() {
return err
}
sf := streamformatter.NewJSONStreamFormatter()
output.Write(sf.FormatError(err))
}
return nil
}
func (s *Server) getImagesSearch(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
var (
config *cliconfig.AuthConfig
authEncoded = r.Header.Get("X-Registry-Auth")
headers = map[string][]string{}
)
if authEncoded != "" {
authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded))
if err := json.NewDecoder(authJson).Decode(&config); err != nil {
// for a search it is not an error if no auth was given
// to increase compatibility with the existing api it is defaulting to be empty
config = &cliconfig.AuthConfig{}
}
}
for k, v := range r.Header {
if strings.HasPrefix(k, "X-Meta-") {
headers[k] = v
}
}
query, err := s.daemon.RegistryService.Search(r.Form.Get("term"), config, headers)
if err != nil {
return err
}
return json.NewEncoder(w).Encode(query.Results)
}
func (s *Server) postImagesPush(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
metaHeaders := map[string][]string{}
for k, v := range r.Header {
if strings.HasPrefix(k, "X-Meta-") {
metaHeaders[k] = v
}
}
if err := parseForm(r); err != nil {
return err
}
authConfig := &cliconfig.AuthConfig{}
authEncoded := r.Header.Get("X-Registry-Auth")
if authEncoded != "" {
// the new format is to handle the authConfig as a header
authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded))
if err := json.NewDecoder(authJson).Decode(authConfig); err != nil {
// to increase compatibility to existing api it is defaulting to be empty
authConfig = &cliconfig.AuthConfig{}
}
} else {
// the old format is supported for compatibility if there was no authConfig header
if err := json.NewDecoder(r.Body).Decode(authConfig); err != nil {
return fmt.Errorf("Bad parameters and missing X-Registry-Auth: %v", err)
}
}
name := vars["name"]
output := ioutils.NewWriteFlusher(w)
imagePushConfig := &graph.ImagePushConfig{
MetaHeaders: metaHeaders,
AuthConfig: authConfig,
Tag: r.Form.Get("tag"),
OutStream: output,
}
w.Header().Set("Content-Type", "application/json")
if err := s.daemon.Repositories().Push(name, imagePushConfig); err != nil {
if !output.Flushed() {
return err
}
sf := streamformatter.NewJSONStreamFormatter()
output.Write(sf.FormatError(err))
}
return nil
}
func (s *Server) getImagesGet(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := parseForm(r); err != nil {
return err
}
w.Header().Set("Content-Type", "application/x-tar")
output := ioutils.NewWriteFlusher(w)
imageExportConfig := &graph.ImageExportConfig{Outstream: output}
if name, ok := vars["name"]; ok {
imageExportConfig.Names = []string{name}
} else {
imageExportConfig.Names = r.Form["names"]
}
if err := s.daemon.Repositories().ImageExport(imageExportConfig); err != nil {
if !output.Flushed() {
return err
}
sf := streamformatter.NewJSONStreamFormatter()
output.Write(sf.FormatError(err))
}
return nil
}
func (s *Server) postImagesLoad(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
return s.daemon.Repositories().Load(r.Body, w)
}
func (s *Server) postContainersCreate(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if err := checkForJson(r); err != nil {
return err
}
var (
warnings []string
name = r.Form.Get("name")
)
config, hostConfig, err := runconfig.DecodeContainerConfig(r.Body)
if err != nil {
return err
}
adjustCpuShares(version, hostConfig)
containerId, warnings, err := s.daemon.ContainerCreate(name, config, hostConfig)
if err != nil {
return err
}
return writeJSON(w, http.StatusCreated, &types.ContainerCreateResponse{
ID: containerId,
Warnings: warnings,
})
}
func (s *Server) postContainersRestart(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
timeout, _ := strconv.Atoi(r.Form.Get("t"))
if err := s.daemon.ContainerRestart(vars["name"], timeout); err != nil {
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainerRename(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
name := vars["name"]
newName := r.Form.Get("name")
if err := s.daemon.ContainerRename(name, newName); err != nil {
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) deleteContainers(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
name := vars["name"]
config := &daemon.ContainerRmConfig{
ForceRemove: boolValue(r, "force"),
RemoveVolume: boolValue(r, "v"),
RemoveLink: boolValue(r, "link"),
}
if err := s.daemon.ContainerRm(name, config); err != nil {
// Force a 404 for the empty string
if strings.Contains(strings.ToLower(err.Error()), "prefix can't be empty") {
return fmt.Errorf("no such id: \"\"")
}
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) deleteImages(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
name := vars["name"]
force := boolValue(r, "force")
noprune := boolValue(r, "noprune")
list, err := s.daemon.ImageDelete(name, force, noprune)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, list)
}
func (s *Server) postContainersStart(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
// If contentLength is -1, we can assumed chunked encoding
// or more technically that the length is unknown
// https://golang.org/src/pkg/net/http/request.go#L139
// net/http otherwise seems to swallow any headers related to chunked encoding
// including r.TransferEncoding
// allow a nil body for backwards compatibility
var hostConfig *runconfig.HostConfig
if r.Body != nil && (r.ContentLength > 0 || r.ContentLength == -1) {
if err := checkForJson(r); err != nil {
return err
}
c, err := runconfig.DecodeHostConfig(r.Body)
if err != nil {
return err
}
hostConfig = c
}
if err := s.daemon.ContainerStart(vars["name"], hostConfig); err != nil {
if err.Error() == "Container already started" {
w.WriteHeader(http.StatusNotModified)
return nil
}
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainersStop(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
seconds, _ := strconv.Atoi(r.Form.Get("t"))
if err := s.daemon.ContainerStop(vars["name"], seconds); err != nil {
if err.Error() == "Container already stopped" {
w.WriteHeader(http.StatusNotModified)
return nil
}
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainersWait(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
status, err := s.daemon.ContainerWait(vars["name"], -1*time.Second)
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, &types.ContainerWaitResponse{
StatusCode: status,
})
}
func (s *Server) postContainersResize(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
height, err := strconv.Atoi(r.Form.Get("h"))
if err != nil {
return err
}
width, err := strconv.Atoi(r.Form.Get("w"))
if err != nil {
return err
}
return s.daemon.ContainerResize(vars["name"], height, width)
}
func (s *Server) postContainersAttach(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
inStream, outStream, err := hijackServer(w)
if err != nil {
return err
}
defer closeStreams(inStream, outStream)
if _, ok := r.Header["Upgrade"]; ok {
fmt.Fprintf(outStream, "HTTP/1.1 101 UPGRADED\r\nContent-Type: application/vnd.docker.raw-stream\r\nConnection: Upgrade\r\nUpgrade: tcp\r\n\r\n")
} else {
fmt.Fprintf(outStream, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
}
attachWithLogsConfig := &daemon.ContainerAttachWithLogsConfig{
InStream: inStream,
OutStream: outStream,
UseStdin: boolValue(r, "stdin"),
UseStdout: boolValue(r, "stdout"),
UseStderr: boolValue(r, "stderr"),
Logs: boolValue(r, "logs"),
Stream: boolValue(r, "stream"),
}
if err := s.daemon.ContainerAttachWithLogs(vars["name"], attachWithLogsConfig); err != nil {
fmt.Fprintf(outStream, "Error attaching: %s\n", err)
}
return nil
}
func (s *Server) wsContainersAttach(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
h := websocket.Handler(func(ws *websocket.Conn) {
defer ws.Close()
wsAttachWithLogsConfig := &daemon.ContainerWsAttachWithLogsConfig{
InStream: ws,
OutStream: ws,
ErrStream: ws,
Logs: boolValue(r, "logs"),
Stream: boolValue(r, "stream"),
}
if err := s.daemon.ContainerWsAttachWithLogs(vars["name"], wsAttachWithLogsConfig); err != nil {
logrus.Errorf("Error attaching websocket: %s", err)
}
})
h.ServeHTTP(w, r)
return nil
}
func (s *Server) getContainersByName(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if version.LessThan("1.19") {
containerJSONRaw, err := s.daemon.ContainerInspectRaw(vars["name"])
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, containerJSONRaw)
}
containerJSON, err := s.daemon.ContainerInspect(vars["name"])
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, containerJSON)
}
func (s *Server) getExecByID(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter 'id'")
}
eConfig, err := s.daemon.ContainerExecInspect(vars["id"])
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, eConfig)
}
func (s *Server) getImagesByName(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
imageInspect, err := s.daemon.Repositories().Lookup(vars["name"])
if err != nil {
return err
}
return writeJSON(w, http.StatusOK, imageInspect)
}
func (s *Server) postBuild(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
var (
authConfigs = map[string]cliconfig.AuthConfig{}
authConfigsEncoded = r.Header.Get("X-Registry-Config")
buildConfig = builder.NewBuildConfig()
)
if authConfigsEncoded != "" {
authConfigsJSON := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authConfigsEncoded))
if err := json.NewDecoder(authConfigsJSON).Decode(&authConfigs); err != nil {
// for a pull it is not an error if no auth was given
// to increase compatibility with the existing api it is defaulting
// to be empty.
}
}
w.Header().Set("Content-Type", "application/json")
if boolValue(r, "forcerm") && version.GreaterThanOrEqualTo("1.12") {
buildConfig.Remove = true
} else if r.FormValue("rm") == "" && version.GreaterThanOrEqualTo("1.12") {
buildConfig.Remove = true
} else {
buildConfig.Remove = boolValue(r, "rm")
}
if boolValue(r, "pull") && version.GreaterThanOrEqualTo("1.16") {
buildConfig.Pull = true
}
output := ioutils.NewWriteFlusher(w)
buildConfig.Stdout = output
buildConfig.Context = r.Body
buildConfig.RemoteURL = r.FormValue("remote")
buildConfig.DockerfileName = r.FormValue("dockerfile")
buildConfig.RepoName = r.FormValue("t")
buildConfig.SuppressOutput = boolValue(r, "q")
buildConfig.NoCache = boolValue(r, "nocache")
buildConfig.ForceRemove = boolValue(r, "forcerm")
buildConfig.AuthConfigs = authConfigs
buildConfig.MemorySwap = int64ValueOrZero(r, "memswap")
buildConfig.Memory = int64ValueOrZero(r, "memory")
buildConfig.CpuShares = int64ValueOrZero(r, "cpushares")
buildConfig.CpuPeriod = int64ValueOrZero(r, "cpuperiod")
buildConfig.CpuQuota = int64ValueOrZero(r, "cpuquota")
buildConfig.CpuSetCpus = r.FormValue("cpusetcpus")
buildConfig.CpuSetMems = r.FormValue("cpusetmems")
buildConfig.CgroupParent = r.FormValue("cgroupparent")
// Job cancellation. Note: not all job types support this.
if closeNotifier, ok := w.(http.CloseNotifier); ok {
finished := make(chan struct{})
defer close(finished)
go func() {
select {
case <-finished:
case <-closeNotifier.CloseNotify():
logrus.Infof("Client disconnected, cancelling job: build")
buildConfig.Cancel()
}
}()
}
if err := builder.Build(s.daemon, buildConfig); err != nil {
// Do not write the error in the http output if it's still empty.
// This prevents from writing a 200(OK) when there is an interal error.
if !output.Flushed() {
return err
}
sf := streamformatter.NewJSONStreamFormatter()
w.Write(sf.FormatError(err))
}
return nil
}
func (s *Server) postContainersCopy(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if vars == nil {
return fmt.Errorf("Missing parameter")
}
if err := checkForJson(r); err != nil {
return err
}
cfg := types.CopyConfig{}
if err := json.NewDecoder(r.Body).Decode(&cfg); err != nil {
return err
}
if cfg.Resource == "" {
return fmt.Errorf("Path cannot be empty")
}
data, err := s.daemon.ContainerCopy(vars["name"], cfg.Resource)
if err != nil {
if strings.Contains(strings.ToLower(err.Error()), "no such id") {
w.WriteHeader(http.StatusNotFound)
return nil
}
if os.IsNotExist(err) {
return fmt.Errorf("Could not find the file %s in container %s", cfg.Resource, vars["name"])
}
return err
}
defer data.Close()
w.Header().Set("Content-Type", "application/x-tar")
if _, err := io.Copy(w, data); err != nil {
return err
}
return nil
}
func (s *Server) postContainerExecCreate(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
name := vars["name"]
execConfig := &runconfig.ExecConfig{}
if err := json.NewDecoder(r.Body).Decode(execConfig); err != nil {
return err
}
execConfig.Container = name
if len(execConfig.Cmd) == 0 {
return fmt.Errorf("No exec command specified")
}
// Register an instance of Exec in container.
id, err := s.daemon.ContainerExecCreate(execConfig)
if err != nil {
logrus.Errorf("Error setting up exec command in container %s: %s", name, err)
return err
}
return writeJSON(w, http.StatusCreated, &types.ContainerExecCreateResponse{
ID: id,
})
}
// TODO(vishh): Refactor the code to avoid having to specify stream config as part of both create and start.
func (s *Server) postContainerExecStart(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
var (
execName = vars["name"]
stdin io.ReadCloser
stdout io.Writer
stderr io.Writer
)
execStartCheck := &types.ExecStartCheck{}
if err := json.NewDecoder(r.Body).Decode(execStartCheck); err != nil {
return err
}
if !execStartCheck.Detach {
// Setting up the streaming http interface.
inStream, outStream, err := hijackServer(w)
if err != nil {
return err
}
defer closeStreams(inStream, outStream)
var errStream io.Writer
if _, ok := r.Header["Upgrade"]; ok {
fmt.Fprintf(outStream, "HTTP/1.1 101 UPGRADED\r\nContent-Type: application/vnd.docker.raw-stream\r\nConnection: Upgrade\r\nUpgrade: tcp\r\n\r\n")
} else {
fmt.Fprintf(outStream, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
}
if !execStartCheck.Tty {
errStream = stdcopy.NewStdWriter(outStream, stdcopy.Stderr)
outStream = stdcopy.NewStdWriter(outStream, stdcopy.Stdout)
}
stdin = inStream
stdout = outStream
stderr = errStream
}
// Now run the user process in container.
if err := s.daemon.ContainerExecStart(execName, stdin, stdout, stderr); err != nil {
logrus.Errorf("Error starting exec command in container %s: %s", execName, err)
return err
}
w.WriteHeader(http.StatusNoContent)
return nil
}
func (s *Server) postContainerExecResize(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
if err := parseForm(r); err != nil {
return err
}
if vars == nil {
return fmt.Errorf("Missing parameter")
}
height, err := strconv.Atoi(r.Form.Get("h"))
if err != nil {
return err
}
width, err := strconv.Atoi(r.Form.Get("w"))
if err != nil {
return err
}
return s.daemon.ContainerExecResize(vars["name"], height, width)
}
func (s *Server) optionsHandler(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
w.WriteHeader(http.StatusOK)
return nil
}
func writeCorsHeaders(w http.ResponseWriter, r *http.Request, corsHeaders string) {
logrus.Debugf("CORS header is enabled and set to: %s", corsHeaders)
w.Header().Add("Access-Control-Allow-Origin", corsHeaders)
w.Header().Add("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, X-Registry-Auth")
w.Header().Add("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT, OPTIONS")
}
func (s *Server) ping(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
_, err := w.Write([]byte{'O', 'K'})
return err
}
func (s *Server) initTcpSocket(addr string) (l net.Listener, err error) {
if s.cfg.TLSConfig == nil || s.cfg.TLSConfig.ClientAuth != tls.RequireAndVerifyClientCert {
logrus.Warn("/!\\ DON'T BIND ON ANY IP ADDRESS WITHOUT setting -tlsverify IF YOU DON'T KNOW WHAT YOU'RE DOING /!\\")
}
if l, err = sockets.NewTcpSocket(addr, s.cfg.TLSConfig, s.start); err != nil {
return nil, err
}
if err := allocateDaemonPort(addr); err != nil {
return nil, err
}
return
}
func makeHttpHandler(logging bool, localMethod string, localRoute string, handlerFunc HttpApiFunc, corsHeaders string, dockerVersion version.Version) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// log the request
logrus.Debugf("Calling %s %s", localMethod, localRoute)
if logging {
logrus.Infof("%s %s", r.Method, r.RequestURI)
}
if strings.Contains(r.Header.Get("User-Agent"), "Docker-Client/") {
userAgent := strings.Split(r.Header.Get("User-Agent"), "/")
// v1.20 onwards includes the GOOS of the client after the version
// such as Docker/1.7.0 (linux)
if len(userAgent) == 2 && strings.Contains(userAgent[1], " ") {
userAgent[1] = strings.Split(userAgent[1], " ")[0]
}
if len(userAgent) == 2 && !dockerVersion.Equal(version.Version(userAgent[1])) {
logrus.Debugf("Warning: client and server don't have the same version (client: %s, server: %s)", userAgent[1], dockerVersion)
}
}
version := version.Version(mux.Vars(r)["version"])
if version == "" {
version = api.Version
}
if corsHeaders != "" {
writeCorsHeaders(w, r, corsHeaders)
}
if version.GreaterThan(api.Version) {
http.Error(w, fmt.Errorf("client is newer than server (client API version: %s, server API version: %s)", version, api.Version).Error(), http.StatusBadRequest)
return
}
if version.LessThan(api.MinVersion) {
http.Error(w, fmt.Errorf("client is too old, minimum supported API version is %s, please upgrade your client to a newer version", api.MinVersion).Error(), http.StatusBadRequest)
return
}
w.Header().Set("Server", "Docker/"+dockerversion.VERSION+" ("+runtime.GOOS+")")
if err := handlerFunc(version, w, r, mux.Vars(r)); err != nil {
logrus.Errorf("Handler for %s %s returned error: %s", localMethod, localRoute, err)
httpError(w, err)
}
}
}
// we keep enableCors just for legacy usage, need to be removed in the future
func createRouter(s *Server) *mux.Router {
r := mux.NewRouter()
if os.Getenv("DEBUG") != "" {
ProfilerSetup(r, "/debug/")
}
m := map[string]map[string]HttpApiFunc{
"GET": {
"/_ping": s.ping,
"/events": s.getEvents,
"/info": s.getInfo,
"/version": s.getVersion,
"/images/json": s.getImagesJSON,
"/images/search": s.getImagesSearch,
"/images/get": s.getImagesGet,
"/images/{name:.*}/get": s.getImagesGet,
"/images/{name:.*}/history": s.getImagesHistory,
"/images/{name:.*}/json": s.getImagesByName,
"/containers/ps": s.getContainersJSON,
"/containers/json": s.getContainersJSON,
"/containers/{name:.*}/export": s.getContainersExport,
"/containers/{name:.*}/changes": s.getContainersChanges,
"/containers/{name:.*}/json": s.getContainersByName,
"/containers/{name:.*}/top": s.getContainersTop,
"/containers/{name:.*}/logs": s.getContainersLogs,
"/containers/{name:.*}/stats": s.getContainersStats,
"/containers/{name:.*}/attach/ws": s.wsContainersAttach,
"/exec/{id:.*}/json": s.getExecByID,
},
"POST": {
"/auth": s.postAuth,
"/commit": s.postCommit,
"/build": s.postBuild,
"/images/create": s.postImagesCreate,
"/images/load": s.postImagesLoad,
"/images/{name:.*}/push": s.postImagesPush,
"/images/{name:.*}/tag": s.postImagesTag,
"/containers/create": s.postContainersCreate,
"/containers/{name:.*}/kill": s.postContainersKill,
"/containers/{name:.*}/pause": s.postContainersPause,
"/containers/{name:.*}/unpause": s.postContainersUnpause,
"/containers/{name:.*}/restart": s.postContainersRestart,
"/containers/{name:.*}/start": s.postContainersStart,
"/containers/{name:.*}/stop": s.postContainersStop,
"/containers/{name:.*}/wait": s.postContainersWait,
"/containers/{name:.*}/resize": s.postContainersResize,
"/containers/{name:.*}/attach": s.postContainersAttach,
"/containers/{name:.*}/copy": s.postContainersCopy,
"/containers/{name:.*}/exec": s.postContainerExecCreate,
"/exec/{name:.*}/start": s.postContainerExecStart,
"/exec/{name:.*}/resize": s.postContainerExecResize,
"/containers/{name:.*}/rename": s.postContainerRename,
},
"DELETE": {
"/containers/{name:.*}": s.deleteContainers,
"/images/{name:.*}": s.deleteImages,
},
"OPTIONS": {
"": s.optionsHandler,
},
}
// If "api-cors-header" is not given, but "api-enable-cors" is true, we set cors to "*"
// otherwise, all head values will be passed to HTTP handler
corsHeaders := s.cfg.CorsHeaders
if corsHeaders == "" && s.cfg.EnableCors {
corsHeaders = "*"
}
for method, routes := range m {
for route, fct := range routes {
logrus.Debugf("Registering %s, %s", method, route)
// NOTE: scope issue, make sure the variables are local and won't be changed
localRoute := route
localFct := fct
localMethod := method
// build the handler function
f := makeHttpHandler(s.cfg.Logging, localMethod, localRoute, localFct, corsHeaders, version.Version(s.cfg.Version))
// add the new route
if localRoute == "" {
r.Methods(localMethod).HandlerFunc(f)
} else {
r.Path("/v{version:[0-9.]+}" + localRoute).Methods(localMethod).HandlerFunc(f)
r.Path(localRoute).Methods(localMethod).HandlerFunc(f)
}
}
}
return r
}<|fim▁end|>
|
current = nil
}
defer es.Evict(l)
for _, ev := range current {
|
<|file_name|>sistema.js<|end_file_name|><|fim▁begin|>$(function(){
$('#telefone').mask('(99)9999-9999');
$('.editar').on({
click : function(){
var url = URI+"sistema/editar/"+$(this).attr('data-item');
window.location.href = url;
}
});
$('.deletar').on({
click : function(){<|fim▁hole|> {
var $url = URI+"sistema/remover/";
if (window.confirm("deseja apagar os ("+$selecionados.length+") itens selecionados? "))
{
$.post($url, { 'selecionados': $selecionados}, function(data){
pop_up(data, setTimeout(function(){location.reload()}, 100));
});
}
}
else
{
pop_up('nenhum item selecionado');
}
}
});
$('#description').on('keyup',function(){
var alvo = $("#char-digitado");
var max = 140;
var digitados = $(this).val().length;
var restante = max - digitados;
if(digitados > max)
{
var val = $(this).val();
$(this).val(val.substr(0, max));
restante = 0;
}
alvo.html(restante);
});
});<|fim▁end|>
|
var $selecionados = get_selecionados();
if($selecionados.length > 0)
|
<|file_name|>ConstraintCommand.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.shell.commands;
import java.util.Map.Entry;
import org.apache.accumulo.core.data.constraints.Constraint;
import org.apache.accumulo.shell.Shell;
import org.apache.accumulo.shell.Shell.Command;
import org.apache.accumulo.shell.ShellCommandException;
import org.apache.accumulo.shell.ShellCommandException.ErrorCode;
import org.apache.accumulo.shell.ShellOptions;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
public class ConstraintCommand extends Command {
protected Option namespaceOpt;
@Override
public int execute(final String fullCommand, final CommandLine cl, final Shell shellState)
throws Exception {
final String tableName;
final String namespace;
if (cl.hasOption(namespaceOpt.getOpt())) {
namespace = cl.getOptionValue(namespaceOpt.getOpt());
} else {
namespace = null;
}
if (cl.hasOption(OptUtil.tableOpt().getOpt()) || !shellState.getTableName().isEmpty()) {
tableName = OptUtil.getTableOpt(cl, shellState);<|fim▁hole|> }
int i;
switch (OptUtil.getAldOpt(cl)) {
case ADD:
for (String constraint : cl.getArgs()) {
if (namespace != null) {
if (!shellState.getAccumuloClient().namespaceOperations().testClassLoad(namespace,
constraint, Constraint.class.getName())) {
throw new ShellCommandException(ErrorCode.INITIALIZATION_FAILURE,
"Servers are unable to load " + constraint + " as type "
+ Constraint.class.getName());
}
i = shellState.getAccumuloClient().namespaceOperations().addConstraint(namespace,
constraint);
shellState.getWriter().println("Added constraint " + constraint + " to namespace "
+ namespace + " with number " + i);
} else if (tableName != null && !tableName.isEmpty()) {
if (!shellState.getAccumuloClient().tableOperations().testClassLoad(tableName,
constraint, Constraint.class.getName())) {
throw new ShellCommandException(ErrorCode.INITIALIZATION_FAILURE,
"Servers are unable to load " + constraint + " as type "
+ Constraint.class.getName());
}
i = shellState.getAccumuloClient().tableOperations().addConstraint(tableName,
constraint);
shellState.getWriter().println(
"Added constraint " + constraint + " to table " + tableName + " with number " + i);
} else {
throw new IllegalArgumentException("Please specify either a table or a namespace");
}
}
break;
case DELETE:
for (String constraint : cl.getArgs()) {
i = Integer.parseInt(constraint);
if (namespace != null) {
shellState.getAccumuloClient().namespaceOperations().removeConstraint(namespace, i);
shellState.getWriter()
.println("Removed constraint " + i + " from namespace " + namespace);
} else if (tableName != null) {
shellState.getAccumuloClient().tableOperations().removeConstraint(tableName, i);
shellState.getWriter().println("Removed constraint " + i + " from table " + tableName);
} else {
throw new IllegalArgumentException("Please specify either a table or a namespace");
}
}
break;
case LIST:
if (namespace != null) {
for (Entry<String,Integer> property : shellState.getAccumuloClient().namespaceOperations()
.listConstraints(namespace).entrySet()) {
shellState.getWriter().println(property.toString());
}
} else if (tableName != null) {
for (Entry<String,Integer> property : shellState.getAccumuloClient().tableOperations()
.listConstraints(tableName).entrySet()) {
shellState.getWriter().println(property.toString());
}
} else {
throw new IllegalArgumentException("Please specify either a table or a namespace");
}
}
return 0;
}
@Override
public String description() {
return "adds, deletes, or lists constraints for a table";
}
@Override
public int numArgs() {
return Shell.NO_FIXED_ARG_LENGTH_CHECK;
}
@Override
public String usage() {
return getName() + " <constraint>{ <constraint>}";
}
@Override
public Options getOptions() {
final Options o = new Options();
o.addOptionGroup(OptUtil.addListDeleteGroup("constraint"));
OptionGroup grp = new OptionGroup();
grp.addOption(OptUtil.tableOpt("table to add, delete, or list constraints for"));
namespaceOpt = new Option(ShellOptions.namespaceOption, "namespace", true,
"name of a namespace to operate on");
namespaceOpt.setArgName("namespace");
grp.addOption(namespaceOpt);
o.addOptionGroup(grp);
return o;
}
}<|fim▁end|>
|
} else {
tableName = null;
|
<|file_name|>009.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from circuits import Component, Event
class bark(Event):
"""bark Event"""
<|fim▁hole|>
self.bob = Bob().register(self)
self.fred = Fred().register(self)
class Dog(Component):
def started(self, *args):
self.fire(bark())
def bark(self):
print("Woof! I'm %s!" % name) # noqa
class Bob(Dog):
"""Bob"""
channel = "bob"
class Fred(Dog):
"""Fred"""
channel = "fred"
Pound().run()<|fim▁end|>
|
class Pound(Component):
def __init__(self):
super(Pound, self).__init__()
|
<|file_name|>bake_and_deploy_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# See testable_service/integration_test.py and spinnaker_testing/spinnaker.py
# for more details.
#
# This test will use ssh to peek at the spinnaker configuration
# to determine the managed project it should verify, and to determine
# the spinnaker account name to use when sending it commands.
# Sample Usage:
# Assuming you have created $PASSPHRASE_FILE (which you should chmod 400)
# and $CITEST_ROOT points to the root directory of this repository
# (which is . if you execute this from the root). The passphrase file
# can be ommited if you run ssh-agent and add .ssh/compute_google_engine.
#
# Since this test runs a pipeline from a Jenkins trigger, you need to
# configure Jenkins in the following way.
# 1. Take note of your Jenkins server baseUrl,
# i.e <protocol>://<host>[:port]/[basePath]
# and store it as $JENKINS_URL.
#
# 2. Create a file, fill it with
# <username> <password>
# corresponding to valid Jenkins credentials, and store its path
# as $JENKINS_AUTH_PATH (also chmod 400).
# Or, set JENKINS_USER and JENKINS_PASSWORD environment variables.
#
# 3. Take note of the Jenkins master you have configured in Igor,
# and store its name as $JENKINS_MASTER.
#
# 4. Choose a name for your jenkins job and store it in $JENKINS_JOB.
#
# 5. On your Jenkins server, navigate to /job/$JENKINS_JOB/configure
# a) Under "Build Triggers", check "Trigger builds remotely".
# b) In the "Authentication Token" field, write some token
# and store it as $JENKINS_TOKEN.
# c) Add a build step that produces a file.
# mkdir -p somedir
# touch somedir/vim_2:7.4.052-1ubuntu3_amd64.deb
# Note that this might need to be consistent with the
# platform the bakery is on. The above works on Ubuntu 14.04
# d) Add post build action to archive the artifacts
# files to archive: somedir/vim_2:7.4.052-1ubuntu3_amd64.deb
#
#
# PYTHONPATH=$CITEST_ROOT/testing/citest \
# python $CITEST_ROOT/testing/citest/tests/bake_and_deploy_test.py \
# --gce_ssh_passphrase_file=$PASSPHRASE_FILE \
# --gce_project=$PROJECT \
# --gce_zone=$ZONE \
# --gce_instance=$INSTANCE \
# --jenkins_master=$JENKINS_MASTER \
# --jenkins_url=$JENKINS_URL \
# --jenkins_auth_path=$JENKINS_AUTH_PATH \
# --jenkins_job=$JENKINS_JOB \
# --jenkins_token=$JENKINS_TOKEN \
# --test_google \
# --test_aws
# or
# PYTHONPATH=$CITEST_ROOT/testing/citest \
# python $CITEST_ROOT/testing/citest/tests/bake_and_deploy_test.py \
# --native_hostname=host-running-smoke-test
# --managed_gce_project=$PROJECT \
# --test_gce_zone=$ZONE
# --jenkins_url=$JENKINS_URL \
# --jenkins_auth_path=$JENKINS_AUTH_PATH \
# --jenkins_job=$JENKINS_JOB \
# --jenkins_token=$JENKINS_TOKEN
# --test_google \
# --test_aws
# pylint: disable=bad-continuation
# pylint: disable=invalid-name
# pylint: disable=missing-docstring
# Standard python modules.
import logging
import os
import sys
import time
# citest modules.
import citest.base
import citest.gcp_testing as gcp
import citest.json_contract as jc
import citest.json_predicate as jp
import citest.service_testing as st
# Spinnaker modules.
import spinnaker_testing as sk
import spinnaker_testing.gate as gate
ov_factory = jc.ObservationPredicateFactory()
class BakeAndDeployTestScenario(sk.SpinnakerTestScenario):
MINIMUM_PROJECT_QUOTA = {
'INSTANCE_TEMPLATES': 1,
'HEALTH_CHECKS': 1,
'FORWARDING_RULES': 1,
'IN_USE_ADDRESSES': 1,
'TARGET_POOLS': 1,
'IMAGES': 1,
}
MINIMUM_REGION_QUOTA = {
'CPUS': 1,
'IN_USE_ADDRESSES': 1,
'INSTANCE_GROUP_MANAGERS': 1,
'INSTANCES': 1,
}
@classmethod
def new_agent(cls, bindings):
return gate.new_agent(bindings)
@classmethod
def initArgumentParser(cls, parser, defaults=None):
"""Initialize command line argument parser.
Args:
parser: argparse.ArgumentParser
"""
super(BakeAndDeployTestScenario, cls).initArgumentParser(
parser, defaults=defaults)
defaults = defaults or {}
parser.add_argument(
'--jenkins_master', default='',
help='The name of the jenkins master as configured in igor.'
' You may need to override this to an alias depending on firewalls.'
' The Spinnaker server may have permissions, but the citest machine'
' may not. Otherwise, this defaults to Spinnaker\'s binding.')
parser.add_argument(
'--jenkins_job', default='NoOpTrigger',
help='The name of the jenkins job to trigger off.'
' You will need to add this to your --jenkins_master.')
parser.add_argument(
'--jenkins_auth_path', default=None,
help='The path to a file containing the jenkins username password pair.'
'The contents should look like: <username> <password>.')
parser.add_argument(
'--jenkins_token', default='TRIGGER_TOKEN',
help='The authentication token for the jenkins build trigger.'
' This corresponds to the --jenkins_job on the --jenkins_url server')
parser.add_argument(
'--jenkins_url', default='',
help='The baseUrl of the jenkins service,'
' i.e. <protocol>://<host>[:port]/[basePath].'
' You may need to override this to an alias depending on firewalls.'
' The Spinnaker server may have permissions, but the citest machine'
' may not. Otherwise, this can be empty for Spinnaker\'s current'
' binding.')
parser.add_argument(
'--test_google', action='store_true',
help='Test Google pipelines.')
parser.add_argument(
'--test_aws', action='store_true',
help='Test AWS pipelines.')
def _do_init_bindings(self):
logger = logging.getLogger(__name__)
bindings = self.bindings
deployed = self.agent.deployed_config
yaml_node_path = 'services.jenkins.defaultMaster'
if not bindings.get('JENKINS_MASTER'):
bindings['JENKINS_MASTER'] = deployed[yaml_node_path + '.name']
logger.info('Infering JENKINS_MASTER %s', bindings['JENKINS_MASTER'])
if not bindings.get('JENKINS_URL'):
bindings['JENKINS_URL'] = deployed[yaml_node_path + '.baseUrl']
logger.info('Infering JENKINS_URL %s', bindings['JENKINS_URL'])
def __init__(self, bindings, agent=None):
super(BakeAndDeployTestScenario, self).__init__(bindings, agent)
self.logger = logging.getLogger(__name__)
bindings = self.bindings
# We'll call out the app name because it is widely used
# because it scopes the context of our activities.
self.TEST_APP = bindings['TEST_APP']
self.__short_lb_name = 'lb'
self.__full_lb_name = '{app}-{stack}-{detail}'.format(
app=self.TEST_APP, stack=bindings['TEST_STACK'],
detail=self.__short_lb_name)
self.aws_bake_pipeline_id = None
self.aws_destroy_pipeline_id = None
self.google_bake_pipeline_id = None
self.google_destroy_pipeline_id = None
self.__image_id_to_delete = None # Id of the baked image we need to clean up after the B & D pipelines run.
self.docker_pipeline_id = None
self.test_google = bindings['TEST_GOOGLE']
self.test_aws = bindings['TEST_AWS']
# This test has been exceeding the default timeout of 13 minutes for the Jenkins agent,
# so increase the timeout to 20 minutes.
self.jenkins_agent = sk.JenkinsAgent(bindings['JENKINS_URL'],
bindings['JENKINS_AUTH_PATH'],
self.agent, None, 1200)
self.run_tests = True
if not (self.test_google or self.test_aws):
self.run_tests = False
self.logger.warning(
'Neither --test_google nor --test_aws were set. '
'No tests will be run.')
def create_app(self):
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Application', retryable_for_secs=60)
.get_url_path('applications')
.contains_path_value('name', self.TEST_APP))
return st.OperationContract(
self.agent.make_create_app_operation(
bindings=self.bindings, application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_GOOGLE_ACCOUNT']),
builder.build())
def delete_app(self):
contract = jc.Contract()
return st.OperationContract(
self.agent.make_delete_app_operation(
application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_GOOGLE_ACCOUNT']),
contract=contract)
def create_load_balancer(self):
bindings = self.bindings
load_balancer_name = self.__full_lb_name
spec = {
'checkIntervalSec': 5,
'healthyThreshold': 2,
'unhealthyThreshold': 2,
'timeoutSec': 5,
'port': 80
}
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'cloudProvider': 'gce',
'provider': 'gce',
'stack': bindings['TEST_STACK'],
'detail': self.__short_lb_name,
'credentials': bindings['SPINNAKER_GOOGLE_ACCOUNT'],
'region': bindings['TEST_GCE_REGION'],
'ipProtocol': 'TCP',
'portRange': spec['port'],
'loadBalancerName': load_balancer_name,
'healthCheck': {
'port': spec['port'],
'timeoutSec': spec['timeoutSec'],
'checkIntervalSec': spec['checkIntervalSec'],
'healthyThreshold': spec['healthyThreshold'],
'unhealthyThreshold': spec['unhealthyThreshold'],
},
'type': 'upsertLoadBalancer',
'availabilityZones': {bindings['TEST_GCE_REGION']: []},
'user': '[anonymous]'
}],
description='Create Load Balancer: ' + load_balancer_name,
application=self.TEST_APP)
# We arent testing load balancers, so assume it is working,
# but we'll look for at the health check to know it is ready.
builder = gcp.GcpContractBuilder(self.gcp_observer)
(builder.new_clause_builder('Health Check Added',
retryable_for_secs=30)
.list_resource('httpHealthChecks')
.contains_path_value('name', load_balancer_name + '-hc'))
(builder.new_clause_builder('Load Balancer Created',
retryable_for_secs=60)
.list_resource('forwardingRules')
.contains_path_value('name', self.__full_lb_name))
return st.OperationContract(
self.new_post_operation(
title='create_load_balancer', data=payload,
path=('applications/{app}/tasks').format(app=self.TEST_APP)),
contract=builder.build())
def delete_load_balancer(self):
bindings = self.bindings
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'type': 'deleteLoadBalancer',
'cloudProvider': 'gce',
'loadBalancerName': self.__full_lb_name,
'region': bindings['TEST_GCE_REGION'],
'regions': [bindings['TEST_GCE_REGION']],
'credentials': bindings['SPINNAKER_GOOGLE_ACCOUNT'],
'user': '[anonymous]'
}],
description='Delete Load Balancer: {0} in {1}:{2}'.format(
self.__full_lb_name,
bindings['SPINNAKER_GOOGLE_ACCOUNT'],
bindings['TEST_GCE_REGION']),
application=self.TEST_APP)
builder = gcp.GcpContractBuilder(self.gcp_observer)
(builder.new_clause_builder('Health Check Removed', retryable_for_secs=30)
.list_resource('httpHealthChecks')
.excludes_path_value('name', self.__full_lb_name + '-hc'))
return st.OperationContract(
self.new_post_operation(
title='delete_load_balancer', data=payload,
path=('applications/{app}/tasks').format(app=self.TEST_APP)),
contract=builder.build())
def make_jenkins_trigger(self):
return {
'enabled': True,
'type': 'jenkins',
'master': self.bindings['JENKINS_MASTER'],
'job': self.bindings['JENKINS_JOB']
}
def make_bake_stage(self, package, providerType, requisiteStages=None,
**kwargs):
result = {
'requisiteStageRefIds':requisiteStages or [],
'refId': 'BAKE',
'type': 'bake',
'name': 'Bake',
'user': '[anonymous]',
'baseOs': 'trusty',
'baseLabel': 'release',
'cloudProviderType': providerType,
'package': package,
'rebake': True
}
result.update(kwargs)
return result
def make_deploy_google_stage(self, requisiteStages=None):
return {
'requisiteStageRefIds': requisiteStages or [],
'refId': 'DEPLOY',
'type': 'deploy',
'name': 'Deploy',
'clusters':[{
'application': self.TEST_APP,
'strategy': '',
'stack': self.bindings['TEST_STACK'],
'freeFormDetails': '',
'loadBalancers': [self.__full_lb_name],
'securityGroups': [],
'capacity': {
'min':1,
'max':1,
'desired':1
},
'zone': self.bindings['TEST_GCE_ZONE'],
'network': 'default',
'instanceMetadata': {
'startup-script':
'sudo apt-get update && sudo apt-get install apache2 -y',
'load-balancer-names': self.__full_lb_name
},
'tags': [],
'availabilityZones': {
self.bindings['TEST_GCE_REGION']: [self.bindings['TEST_GCE_ZONE']]
},
'cloudProvider': 'gce',
'provider': 'gce',
'instanceType': 'f1-micro',
'targetSize': 1,
'account': self.bindings['SPINNAKER_GOOGLE_ACCOUNT']
}]
}
def make_destroy_group_stage(self, cloudProvider, requisiteStages,
**kwargs):
result = {
'cloudProvider': cloudProvider,
'cloudProviderType': cloudProvider,
'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
'name': 'Destroy Server Group',
'refId': 'DESTROY',
'requisiteStageRefIds': requisiteStages or [],
'target': 'current_asg_dynamic',
'regions': [self.bindings['TEST_GCE_REGION']],
'cluster': '{app}-{stack}'.format(
app=self.TEST_APP, stack=self.bindings['TEST_STACK']),
'type': 'destroyServerGroup'
}
result.update(kwargs)
return result
def make_disable_group_stage(self, cloudProvider, requisiteStages=None,
**kwargs):
result = {
'requisiteStageRefIds': requisiteStages or [],
'refId': 'DISABLE',
'type': 'disableServerGroup',
'name': 'Disable Server Group',
'cloudProviderType': cloudProvider,
'cloudProvider': cloudProvider,
'target': 'current_asg_dynamic',
'cluster': '{app}-{stack}'.format(
app=self.TEST_APP, stack=self.bindings['TEST_STACK']),
'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT']
}
result.update(kwargs)
return result
def create_bake_docker_pipeline(self):
name = 'BakeDocker'
self.docker_pipeline_id = name
bake_stage = self.make_bake_stage(
package='vim', providerType='docker', region='global')
pipeline_spec = dict(
name=name,
stages=[bake_stage],
triggers=[self.make_jenkins_trigger()],
application=self.TEST_APP,
stageCounter=1,
parallel=True,
limitConcurrent=True,
appConfig={}
)
payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline')
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.contains_path_value(None, pipeline_spec))
return st.OperationContract(
self.new_post_operation(
title='create_bake_docker_pipeline', data=payload, path='pipelines',
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def create_bake_and_deploy_google_pipeline(self):
name = 'BakeAndDeployGoogle'
self.google_bake_pipeline_id = name
bake_stage = self.make_bake_stage(
package='vim', providerType='gce', region='global')
deploy_stage = self.make_deploy_google_stage(requisiteStages=['BAKE'])
pipeline_spec = dict(
name=name,
stages=[bake_stage, deploy_stage],
triggers=[self.make_jenkins_trigger()],
application=self.TEST_APP,
stageCounter=2,
parallel=True,
limitConcurrent=True,
appConfig={}
)
payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline')
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.contains_path_value(None, pipeline_spec))
return st.OperationContract(
self.new_post_operation(
title='create_bake_google_pipeline', data=payload, path='pipelines',
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def create_disable_and_destroy_google_pipeline(self):
name = 'DisableAndDestroyGoogle'
self.google_destroy_pipeline_id = name
disable_stage = self.make_disable_group_stage(
cloudProvider='gce', regions=[self.bindings['TEST_GCE_REGION']])
destroy_stage = self.make_destroy_group_stage(
cloudProvider='gce', requisiteStages=['DISABLE'])
pipeline_spec = dict(
name=name,
stages=[disable_stage, destroy_stage],
application=self.TEST_APP,
stageCounter=2,
parallel=True,
limitConcurrent=True,
appConfig={}
)
payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline', retryable_for_secs=5)
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.contains_path_value(None, pipeline_spec))
return st.OperationContract(
self.new_post_operation(
title='create_destroy_google_pipeline',
data=payload, path='pipelines',
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def create_bake_and_deploy_aws_pipeline(self):
name = 'BakeAndDeployAws'
self.aws_bake_pipeline_id = name
bake_stage = self.make_bake_stage(
package='vim',
providerType='aws',
regions=[self.bindings['TEST_AWS_REGION']],
vmType='hvm', storeType='ebs')
# FIXME(jacobkiefer): this is creating a gce deploy stage in an aws
# pipeline. Not good.
deploy_stage = self.make_deploy_google_stage(requisiteStages=['BAKE'])
pipeline_spec = dict(
name=name,
stages=[bake_stage, deploy_stage],
triggers=[self.make_jenkins_trigger()],
application=self.TEST_APP,
stageCounter=2,
parallel=True
)
payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline')
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.contains_path_value(None, pipeline_spec))
return st.OperationContract(
self.new_post_operation(
title='create_bake_aws_pipeline', data=payload, path='pipelines',
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def create_disable_and_destroy_aws_pipeline(self):
name = 'DisableAndDestroyAws'
self.aws_destroy_pipeline_id = name
disable_stage = self.make_disable_group_stage(
cloudProvider='aws', regions=[self.bindings['TEST_AWS_REGION']])
destroy_stage = self.make_destroy_group_stage(
cloudProvider='aws', zones=[self.bindings['TEST_AWS_ZONE']],
requisiteStages=['DISABLE'])
pipeline_spec = dict(
name=name,
stages=[disable_stage, destroy_stage],
triggers=[self.make_jenkins_trigger()],
application=self.TEST_APP,
stageCounter=2,
parallel=True
)
payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline')
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.contains_path_value(None, pipeline_spec))
return st.OperationContract(
self.new_post_operation(
title='create_destroy_aws_pipeline', data=payload, path='pipelines',
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def delete_pipeline(self, pipeline_id):
payload = self.agent.make_json_payload_from_kwargs(id=pipeline_id)
path = os.path.join('pipelines', self.TEST_APP, pipeline_id)
builder = st.HttpContractBuilder(self.agent)
(builder.new_clause_builder('Has Pipeline',
retryable_for_secs=5)
.get_url_path(
'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP))
.excludes_path_value('name', pipeline_id))
return st.OperationContract(
self.new_delete_operation(
title='delete_bake_pipeline', data=payload, path=path,
status_class=st.SynchronousHttpOperationStatus),
contract=builder.build())
def trigger_bake_and_deploy_google_pipeline(self):
path = 'applications/{app}/pipelines'.format(app=self.TEST_APP)
group_name = '{app}-{stack}-v000'.format(
app=self.TEST_APP, stack=self.bindings['TEST_STACK'])
builder = gcp.GcpContractBuilder(self.gcp_observer)
(builder.new_clause_builder('Managed Instance Group Deployed',
retryable_for_secs=30)
.inspect_resource('instanceGroupManagers', group_name)
.EXPECT(ov_factory.value_list_path_contains('targetSize', jp.NUM_EQ(1))))
return st.OperationContract(
self.jenkins_agent.new_jenkins_trigger_operation(
title='monitor_bake_pipeline',
job=self.bindings['JENKINS_JOB'],
token=self.bindings['JENKINS_TOKEN'],
status_class=gate.GatePipelineStatus,
status_path=path,
max_wait_secs=1080), # Allow 18 mins to bake and deploy.
contract=builder.build(),
cleanup=self.capture_baked_image)
def run_disable_and_destroy_google_pipeline(self, pipeline_id):
path = 'pipelines/{app}/{id}'.format(app=self.TEST_APP,
id=self.google_destroy_pipeline_id)
group_name = '{app}-{stack}-v000'.format(
app=self.TEST_APP, stack=self.bindings['TEST_STACK'])
builder = gcp.GcpContractBuilder(self.gcp_observer)
(builder.new_clause_builder('Managed Instance Group Destroyed')
.inspect_resource('instanceGroupManagers', group_name)
.EXPECT(ov_factory.error_list_contains(
gcp.HttpErrorPredicate(http_code=404)))
.OR(ov_factory.value_list_path_contains('targetSize', jp.NUM_EQ(0))))
return st.OperationContract(
self.new_post_operation(
title='run_destroy_pipeline',
data='',
path=path,
max_wait_secs=1080), # Allow 18 mins to disable and destroy.
contract=jc.Contract(),
cleanup=self.delete_baked_image)
def new_jenkins_build_operation(self):
return None
def capture_baked_image(self, execution_context):
"""Saves the baked image name from the triggered Bake & Deploy pipeline to delete later."""
status = execution_context.get('OperationStatus', None)
if status is None:
self.logger.info(
'Operation could not be performed so there is no image to delete.')
return
status = status.trigger_status
detail = status.detail_doc
if isinstance(detail, list):
if not detail:
self.logger.error('No trigger_status, so baked image is unknown\n'
'%s\n\n', status)
return
self.logger.info('Using first status.')
detail = detail[0]
stages = detail.get('stages', [])
image_id = (stages[0].get('context', {}).get('imageId')
if stages
else None)
self.logger.info('Capturing the baked image="%s" to delete', image_id)
self.__image_id_to_delete = image_id
def delete_baked_image(self, _unused_execution_context):
"""Deletes the baked image when we are done using it."""
if self.__image_id_to_delete:
execution_context = citest.base.ExecutionContext()
self.gcp_observer.invoke_resource(
execution_context, 'delete', 'images', resource_id=self.__image_id_to_delete)
class BakeAndDeployTest(st.AgentTestCase):
@staticmethod
def setUpClass():
runner = citest.base.TestRunner.global_runner()
scenario = runner.get_shared_data(BakeAndDeployTestScenario)
if not scenario.test_google:
return
managed_region = scenario.bindings['TEST_GCE_REGION']
title = 'Check Quota for {0}'.format(scenario.__class__.__name__)
verify_results = gcp.verify_quota(
title,
scenario.gcp_observer,
project_quota=BakeAndDeployTestScenario.MINIMUM_PROJECT_QUOTA,
regions=[(managed_region,
BakeAndDeployTestScenario.MINIMUM_REGION_QUOTA)])
if not verify_results:
raise RuntimeError('Insufficient Quota: {0}'.format(verify_results))
@property
def scenario(self):
return citest.base.TestRunner.global_runner().get_shared_data(
BakeAndDeployTestScenario)
@property
def testing_agent(self):
return self.scenario.agent
def test_a_create_app(self):
if not self.scenario.run_tests:
self.skipTest("No --test_{google, aws} flags were set")
else:
self.run_test_case(self.scenario.create_app())
def test_b_create_load_balancer(self):
if not self.scenario.run_tests:
self.skipTest("No --test_{google, aws} flags were set")
else:
self.run_test_case(self.scenario.create_load_balancer())
def test_c1_create_bake_and_deploy_google_pipeline(self):
if not self.scenario.test_google:
self.skipTest("--test_google flag not set")
else:
self.run_test_case(self.scenario.create_bake_and_deploy_google_pipeline())
def test_d1_create_disable_and_destroy_google_pipeline(self):
if not self.scenario.test_google:
self.skipTest("--test_google flag not set")
else:
self.run_test_case(
self.scenario.create_disable_and_destroy_google_pipeline())
def test_c2_create_bake_and_deploy_aws_pipeline(self):
if not self.scenario.test_aws:
self.skipTest("--test_aws flag not set")
else:
self.run_test_case(self.scenario.create_bake_and_deploy_aws_pipeline())
def test_d2_create_disable_and_destroy_aws_pipeline(self):
if not self.scenario.test_aws:
self.skipTest("--test_aws flag not set")
else:
self.run_test_case(
self.scenario.create_disable_and_destroy_aws_pipeline())
def test_e1_trigger_bake_and_deploy_google_pipeline(self):
if not self.scenario.test_google:
self.skipTest("--test_google flag not set")
else:
# Wait for Echo's cache to pick up the pipeline.
time.sleep(20)
self.run_test_case(
self.scenario.trigger_bake_and_deploy_google_pipeline(),
poll_every_secs=5)
def test_w1_run_disable_and_destroy_google_pipeline(self):
if not self.scenario.test_google:
self.skipTest("--test_google flag not set")
else:
self.run_test_case(
self.scenario.run_disable_and_destroy_google_pipeline(
self.scenario.google_destroy_pipeline_id),
poll_every_secs=5)
def test_x1_delete_google_bake_pipeline(self):
if not self.scenario.test_google:
self.skipTest("--test_google flag not set")
else:
self.run_test_case(
self.scenario.delete_pipeline(self.scenario.google_bake_pipeline_id))
def test_x1_delete_google_destroy_pipeline(self):
if not self.scenario.test_google:<|fim▁hole|>
def test_x2_delete_aws_pipeline(self):
if not self.scenario.test_aws:
self.skipTest("--test_aws flag not set")
else:
self.run_test_case(
self.scenario.delete_pipeline(self.scenario.aws_pipeline_id))
def test_y_delete_load_balancer(self):
if not self.scenario.run_tests:
self.skipTest("No --test_{google, aws} flags were set")
else:
self.run_test_case(self.scenario.delete_load_balancer(),
max_retries=5)
def test_z_delete_app(self):
if not self.scenario.run_tests:
self.skipTest("No --test_{google, aws} flags were set")
# Give a total of a minute because it might also need
# an internal cache update
else:
self.run_test_case(self.scenario.delete_app(),
retry_interval_secs=8, max_retries=8)
def main():
defaults = {
'TEST_STACK': 'baketest' + BakeAndDeployTestScenario.DEFAULT_TEST_ID,
'TEST_APP': 'baketest' + BakeAndDeployTestScenario.DEFAULT_TEST_ID
}
return citest.base.TestRunner.main(
parser_inits=[BakeAndDeployTestScenario.initArgumentParser],
default_binding_overrides=defaults,
test_case_list=[BakeAndDeployTest])
if __name__ == '__main__':
sys.exit(main())<|fim▁end|>
|
self.skipTest("--test_google flag not set")
else:
self.run_test_case(
self.scenario.delete_pipeline(self.scenario.google_destroy_pipeline_id))
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># (c) 2012, Daniel Hokka Zakrisson <[email protected]>
# (c) 2012-2014, Michael DeHaan <[email protected]> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
from ansible import constants as C
from ansible.utils.display import Display
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# cache and return the result
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, suffixes=None):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
suffixes = ['.py', '']
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
found = None
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
if os.path.isdir(path):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
d = Display()
d.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f)):
for suffix in suffixes:
if full_path.endswith(suffix):
full_name = os.path.basename(full_path)
break
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
continue
if full_name not in self._plugin_path_cache:
self._plugin_path_cache[full_name] = full_path
self._searched_paths.add(path)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
for alias_name in ('_%s' % n for n in potential_names):
# We've already cached all the paths at this point
if alias_name in self._plugin_path_cache:
return self._plugin_path_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(<|fim▁hole|> 'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategies',
None,
'strategy_plugins',
required_base_class='StrategyBase',
)<|fim▁end|>
| |
<|file_name|>shared_space_audit.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""TODO"""
<|fim▁hole|>#
# LinShare cli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LinShare cli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LinShare cli. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2019 Frédéric MARTIN
#
# Contributors list :
#
# Frédéric MARTIN [email protected]
#
import copy
import json
from argparse import RawTextHelpFormatter
from linshareapi.cache import Time
from vhatable.cell import CellBuilder
from vhatable.cell import ComplexCell
from vhatable.cell import ComplexCellBuilder
from vhatable.filters import PartialOr
from linsharecli.user.core import DefaultCommand as Command
from linsharecli.common.core import add_list_parser_options
from linsharecli.common.cell import ActorCell
from linsharecli.common.cell import AuthUserCell
from linsharecli.common.tables import TableBuilder
class DefaultCommand(Command):
"""TODO"""
IDENTIFIER = "name"
MSG_RS_UPDATED = "The shared space member '%(account)s' (%(uuid)s) was successfully updated."
MSG_RS_CREATED = "The shared space member '%(account)s' (%(uuid)s) was successfully created."
CFG_DELETE_MODE = 1
CFG_DELETE_ARG_ATTR = "ss_uuid"
def complete(self, args, prefix):
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.shared_spaces.list()
return (v.get(self.RESOURCE_IDENTIFIER)
for v in json_obj if v.get(self.RESOURCE_IDENTIFIER).startswith(prefix))
def complete_shared_spaces(self, args, prefix):
"""TODO"""
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.shared_spaces.list()
return (v.get(self.RESOURCE_IDENTIFIER)
for v in json_obj if v.get(self.RESOURCE_IDENTIFIER).startswith(prefix))
class SharedSpaceCompleter(object):
"""TODO"""
# pylint: disable=too-few-public-methods
def __init__(self, config):
self.config = config
def __call__(self, prefix, **kwargs):
from argcomplete import debug
try:
debug("\n------------ SharedSpaceCompleter -----------------")
debug("Kwargs content :")
for i, j in list(kwargs.items()):
debug("key : " + str(i))
debug("\t - " + str(j))
debug("\n------------ SharedSpaceCompleter -----------------\n")
args = kwargs.get('parsed_args')
cmd = DefaultCommand(self.config)
return cmd.complete_shared_spaces(args, prefix)
# pylint: disable=broad-except
except Exception as ex:
debug("\nERROR:An exception was caught :" + str(ex) + "\n")
import traceback
traceback.print_exc()
debug("\n------\n")
return ["comlete-error"]
class ResourceCell(ComplexCell):
"""TODO"""
_format_filter = '{uuid}'
def __unicode__(self):
if self.raw:
return str(self.value)
if self.value is None:
return self.none
action = self.row['action']
resource_type = self.row['type']
fmt = 'Missing format. {raw}'
data = {}
data['action'] = action
data['raw'] = "?"
if self.extended:
fmt = 'Missing format.\n{raw}'
data['raw'] = json.dumps(
copy.deepcopy(self.value),
sort_keys=True, indent=2
)
if resource_type == "WORKGROUP":
if action == "CREATE":
fmt = 'New workGroup : {name} ({uuid:.8})'
data.update(self.value)
elif resource_type == "WORKGROUP_MEMBER":
if action == "CREATE":
fmt = 'New member : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New member : {name} ({uuid})'
data.update(self.value['user'])
elif resource_type == "WORKGROUP_FOLDER":
if action == "CREATE":
fmt = 'New folder : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New folder : {name} ({uuid})'
data.update(self.value)
elif resource_type == "WORKGROUP_DOCUMENT":
if action == "CREATE":
fmt = 'New document : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New document : {name} ({uuid})'
data.update(self.value)
elif resource_type == "WORKGROUP_DOCUMENT_REVISION":
if action == "CREATE":
fmt = 'New version : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New version : {name} ({uuid})'
data.update(self.value)
return fmt.format(**data)
class ListCommand(Command):
""" List all Jwt token."""
IDENTIFIER = "creationDate"
RESOURCE_IDENTIFIER = "uuid"
@Time('linsharecli.shared_spaces.audit', label='Global time : %(time)s')
def __call__(self, args):
super(ListCommand, self).__call__(args)
endpoint = self.ls.shared_spaces.audit
tbu = TableBuilder(self.ls, endpoint, self.DEFAULT_SORT)
tbu.load_args(args)
tbu.add_filters(
PartialOr(self.IDENTIFIER, args.identifiers, True),
PartialOr(self.RESOURCE_IDENTIFIER, args.uuids, True, match_raw=True),
PartialOr("resource", [args.resource], True, match_raw=False),
)
tbu.add_custom_cell("actor", ActorCell)
tbu.add_custom_cell("authUser", AuthUserCell)
tbu.add_custom_cell("uuid", CellBuilder('{value:.8}', '{value}'))
tbu.add_custom_cell("resource", ResourceCell)
tbu.add_custom_cell(
"workGroup",
ComplexCellBuilder(
'{name}\n({uuid:.8})',
'{name} ({uuid:})',
'{name}',
)
)
table = tbu.build().load_v2(endpoint.list(args.ss_uuid))
table.align['resource'] = "l"
return table.render()
def complete_fields(self, args, prefix):
"""TODO"""
# pylint: disable=unused-argument
super(ListCommand, self).__call__(args)
cli = self.ls.shared_spaces.audit
return cli.get_rbu().get_keys(True)
def add_parser(subparsers, name, desc, config):
"""TODO"""
parser_tmp = subparsers.add_parser(name, help=desc)
parser_tmp.add_argument(
'ss_uuid',
help="shared_space uuid"
).completer = SharedSpaceCompleter(config)
subparsers2 = parser_tmp.add_subparsers()
# command : list
parser = subparsers2.add_parser(
'list',
formatter_class=RawTextHelpFormatter,
help="list shared space audit traces")
parser.add_argument('identifiers', nargs="*", help="filter by fragments of date")
parser.add_argument('-u', '--uuid', dest="uuids", action="append",
help="Filter by uuid fragments.")
parser.add_argument('-e', '--resource', action="store",
help="Filter by resource uuid")
add_list_parser_options(parser, cdate=True)
parser.set_defaults(__func__=ListCommand(config))<|fim▁end|>
|
# This file is part of Linshare cli.
|
<|file_name|>enroll_node_not_found.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.<|fim▁hole|># See the License for the specific language governing permissions and
# limitations under the License.
from ironicclient import exceptions
from ironic_inspector import node_cache
from ironic_inspector import utils
def hook(introspection_data, **kwargs):
ironic = utils.get_client()
try:
node = ironic.node.create(**{'driver': 'fake'})
except exceptions.HttpError as exc:
raise utils.Error(_("Can not create node in ironic for unknown"
"node: %s") % exc)
return node_cache.add_node(node.uuid, ironic=ironic)<|fim▁end|>
| |
<|file_name|>json.ts<|end_file_name|><|fim▁begin|>import viser from '../../../packages/viser/src/index';
import { data, scale } from './data'
const DataSet = require('@antv/data-set');
const ds = new DataSet();
const dv = ds.createView()
.source(data)
.transform({
type: 'percent',
field: 'value',
dimension: 'country',
groupBy: ['year'],
as: 'percent'
});
viser({
data: dv.rows,
scale: scale,
axis: true,
tooltip: true,
legend: {
dataKey: 'country',
position: 'top-left',
onItemMouseEnter: (ev) => {
console.log(5, ev);
}
},
series: [{
quickType: 'stackBar',
style: {
stroke: '#fff',
lineWidth: 1
},
position: 'year*percent',
color: 'country',
onMouseEnter: (ev) => {
console.log(3, ev);
},
}],
brush: {
type: 'X',
onBrushStart(ev, chart) {
chart.hideTooltip();
},
onBrushMove(ev, chart) {
chart.hideTooltip();
}
},
chart: {
container: 'mount',
forceFit: true,
height: 400,<|fim▁hole|> chart.get('options').filters = {};
chart.repaint();
}
},
});<|fim▁end|>
|
padding: [100, 80, 80, 80],
renderer: 'svg',
onPlotdblclick: (ev: any, chart: any) => {
|
<|file_name|>horrorkino.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
"""
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import dom_parser
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.genre_filter = ['horror']
self.domains = ['horrorkino.do.am']
self.base_link = 'http://horrorkino.do.am/'
self.search_link = 'video/shv'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases), year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
r = client.request(urlparse.urljoin(self.base_link, url))
r = re.findall('''vicode\s*=\s*["'](.*?)["'];''', r)[0].decode('string_escape')
r = dom_parser.parse_dom(r, 'iframe', req='src')
r = [i.attrs['src'] for i in r]
for i in r:
valid, host = source_utils.is_host_valid(i, hostDict)
if not valid: continue
sources.append({'source': host, 'quality': 'SD', 'language': 'de', 'url': i, 'direct': False, 'debridonly': False, 'checkquality': True})
return sources
except:
return sources
def resolve(self, url):
return url
def __search(self, titles, year):
try:
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(urlparse.urljoin(self.base_link, self.search_link), post={'query': cleantitle.query(titles[0])})
r = dom_parser.parse_dom(r, 'li', attrs={'class': 'entTd'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 've-screen'}, req='title')
r = [(dom_parser.parse_dom(i, 'a', req='href'), i.attrs['title'].split(' - ')[0]) for i in r]
r = [(i[0][0].attrs['href'], i[1], re.findall('(.+?) \(*(\d{4})', i[1])) for i in r]
r = [(i[0], i[2][0][0] if len(i[2]) > 0 else i[1], i[2][0][1] if len(i[2]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[2]), reverse=True) # with year > no year
r = [i[0] for i in r if cleantitle.get(i[1]) in t and i[2] in y][0]
return source_utils.strip_domain(r)
except:
return<|fim▁end|>
| |
<|file_name|>PointsPlugin.ts<|end_file_name|><|fim▁begin|>import { Plugin } from "../Plugin";
import { CommandHandler } from "../../matrix/CommandHandler";<|fim▁hole|>
/**
* Plugin for tracking an arbitrary number of points
*/
export class PointsPlugin implements Plugin {
private pointsTracker: PointsTracker;
/**
* Creates a new points plugin
* @param config the config to use
* @param admins the admins for the bot
*/
constructor(private config: PointsConfig, private admins: string[]) {
}
public init(matrixClient: MatrixClient): void {
LogService.info("PointsPlugin", "Setting up points tracker");
this.pointsTracker = new PointsTracker(matrixClient, this.config);
LogService.info("PointsPlugin", "Registering command handler");
CommandHandler.registerCommand("!points", this.pointsCommand.bind(this), "!points - Shows the current points standing");
CommandHandler.registerCommand("!yay", this.yayCommand.bind(this), "!yay <user> <points> [task] - Awards points arbitrarily");
}
private async pointsCommand(_cmd: string, _args: string[], roomId: string, event, matrixClient: MatrixClient) {
LogService.debug("PointsPlugin", "Sending current points standing to " + roomId);
try {
const points = this.pointsTracker.getCount();
matrixClient.replyNotice(roomId, event, `Points: ${points}/${this.config.goal}`);
} catch (e) {
LogService.error("PointsPlugin", e);
matrixClient.replyNotice(roomId, event, "Error processing command");
}
}
private async yayCommand(_cmd: string, args: string[], roomId: string, event, matrixClient: MatrixClient) {
if (this.admins.indexOf(event['sender']) === -1) {
matrixClient.replyNotice(roomId, event, "You do not have permission to run that command");
return;
}
if (args.length < 2) {
matrixClient.replyNotice(roomId, event, "Not enough arguments. Try !yay <name> <amount> [reason]");
return;
}
const user = args[0];
const points = Number(args[1]);
const reason = args.splice(2).join(" ").trim();
LogService.debug("PointsPlugin", `Incrementing points by ${points} due to ${user} doing ${reason} - done by ${event['sender']}`);
this.pointsTracker.incrementPoints(user, points, reason).then(() => {
if (this.config.advertiseRoom !== roomId) {
return this.pointsCommand(null, null, roomId, event, matrixClient);
}
}).catch(err => {
LogService.error("PointsPlugin", err);
matrixClient.replyNotice(roomId, event, "There was an error processing your command");
});
}
}
export interface PointsConfig {
advertiseRoom: string;
statsRoom: string;
milestoneId: string;
goal: number;
widgetUrl: string;
widgetName: string;
}<|fim▁end|>
|
import { PointsTracker } from "../../points/PointsTracker";
import { LogService, MatrixClient } from "matrix-bot-sdk";
|
<|file_name|>backup.go<|end_file_name|><|fim▁begin|>package gostuff
import (
"fmt"
"log"
"os"
"os/exec"
"runtime"
)
//exports database(without Grandmaster games) to an .sql file as a hot backup
//@param isTemplate If true then export template database
func ExportDatabase(isTemplate bool) {
problems, _ := os.OpenFile("logs/errors.txt", os.O_APPEND|os.O_WRONLY, 0666)
defer problems.Close()
log.SetOutput(problems)
command := "mysqldump --databases gochess --ignore-table=gochess.grandmaster > ./../backup/gochessNoGrandmaster.sql"
if isTemplate {
command = "cd config && mysqldump --databases gochess --no-data > ./../backup/gochessTemplate.sql"
}
if runtime.GOOS == "windows" {
_, err := exec.Command("cmd.exe", "/C", command).Output()
if err != nil {
log.Println(err)
fmt.Println("Error in exporting database, please check logs")
}
} else {
_, err := exec.Command("/bin/bash", "-c", command).Output()
if err != nil {
log.Println(err)
fmt.Println("Error in exporting database, please check logs")
}<|fim▁hole|>
// zips up exported database
func CompressDatabase() {
result := compress("./backup/gochess.zip", []string{"./backup/gochess.sql"})
if result {
fmt.Println("Exported database file succesfully compressed!")
}
}
//imports the main gochess database, returns true if successful
func importDatabase() bool {
problems, _ := os.OpenFile("logs/errors.txt", os.O_APPEND|os.O_WRONLY, 0666)
defer problems.Close()
log.SetOutput(problems)
result := unzip("./backup/gochess.zip", "./backup")
if result == false {
return false
}
if runtime.GOOS == "windows" {
_, err := exec.Command("cmd.exe", "/C", "mysql < ./backup/gochess.sql").Output()
if err != nil {
log.Println(err)
fmt.Println("Error in importing gochess database, please check logs")
return false
}
} else {
_, err := exec.Command("/bin/bash", "-c", "mysql < ./backup/gochess.sql").Output()
if err != nil {
log.Println(err)
fmt.Println("Error in importing gochess database, please check logs")
return false
}
}
return true
}
//imports template database, returns true if sucessful
func importTemplateDatabase() bool {
problems, _ := os.OpenFile("logs/errors.txt", os.O_APPEND|os.O_WRONLY, 0666)
defer problems.Close()
log.SetOutput(problems)
//determine which operating system to execute appropriate shell command
if runtime.GOOS == "windows" {
_, err := exec.Command("cmd.exe", "/C", "mysql < ./backup/gochessTemplate.sql").Output()
if err != nil {
log.Println(err)
fmt.Println("Error in importing template database, please check logs")
return false
}
} else {
_, err := exec.Command("/bin/bash", "-c", "mysql < ./backup/gochessTemplate.sql").Output()
if err != nil {
log.Println(err)
fmt.Println("Error in importing template database, please check logs")
return false
}
}
return true
}<|fim▁end|>
|
}
}
|
<|file_name|>timeline.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|> }
]);<|fim▁end|>
|
angular.module('core').controller('TimelineController', ['$scope',
function($scope) {
// Timeline controller logic
// ...
|
<|file_name|>test_rule_500.py<|end_file_name|><|fim▁begin|>import os
import unittest
from vsg.rules import ieee
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_500_test_input.vhd'))
lExpected_lower = []
lExpected_lower.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_lower.vhd'), lExpected_lower)
lExpected_upper = []
lExpected_upper.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_upper.vhd'), lExpected_upper)
class test_port_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_500_lower(self):
oRule = ieee.rule_500()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
self.assertEqual(oRule.groups, ['case', 'case::keyword'])
lExpected = [66, 67, 68, 69, 70]
lExpected.extend([73, 74, 76, 77, 78,79])
lExpected.extend(range(87, 89))
lExpected.extend([91])
lExpected.extend(range(93, 95))
lExpected.extend(range(100, 105))
lExpected.extend([107, 108, 110, 111, 112, 113])
oRule.analyze(self.oFile)<|fim▁hole|> def test_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
lExpected = []
lExpected.extend(range(5, 10))
lExpected.extend([12, 13, 15, 16, 17, 18])
lExpected.extend(range(26, 28))
lExpected.extend([30])
lExpected.extend(range(32, 34))
lExpected.extend(range(39, 44))
lExpected.extend([46, 47, 49, 50, 51, 52])
oRule.analyze(self.oFile)
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
def test_fix_rule_500_lower(self):
oRule = ieee.rule_500()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_lower, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
def test_fix_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_upper, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])<|fim▁end|>
|
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
|
<|file_name|>TestDiracCommands.py<|end_file_name|><|fim▁begin|>import datetime
from collections import namedtuple
import os
import tempfile
import time
import uuid
import random
import stat
from textwrap import dedent
import pytest
from GangaCore.Utility.logging import getLogger
from GangaDirac.Lib.Utilities.DiracUtilities import execute
from GangaCore.testlib.mark import external
from GangaCore.testlib.GangaUnitTest import load_config_files, clear_config
logger = getLogger(modulename=True)
statusmapping = {
'Checking': 'submitted',
'Completed': 'running',
'Deleted': 'failed',
'Done': 'completed',
'Failed': 'failed',
'Killed': 'killed',
'Matched': 'submitted',
'Received': 'submitted',
'Running': 'running',
'Staging': 'submitted',
'Stalled': 'running',
'Waiting': 'submitted',
}
JobInfo = namedtuple('JobInfo', ['id', 'get_file_lfn', 'remove_file_lfn'])
@pytest.yield_fixture(scope='module')
def load_config():<|fim▁hole|> load_config_files()
# make sure post-boostrap hook is run to ensure Dirac config options are set correctly
# Only becomes an issue if this test is run on it's own
from GangaLHCb import postBootstrapHook
postBootstrapHook()
yield
clear_config()
@pytest.yield_fixture(scope='class')
def dirac_job(load_config):
sandbox_str = uuid.uuid4()
get_file_str = uuid.uuid4()
remove_file_str = uuid.uuid4()
exe_script = """#!/bin/bash
echo '%s' > sandboxFile.txt
echo '%s' > getFile.dst
echo '%s' > removeFile.dst
""" % (sandbox_str, get_file_str, remove_file_str)
logger.info("exe_script:\n%s\n" % str(exe_script))
exe_file, exe_path_name = tempfile.mkstemp()
with os.fdopen(exe_file, 'wb') as f:
f.write(exe_script)
st = os.stat(exe_path_name)
os.chmod(exe_path_name, st.st_mode | stat.S_IEXEC)
api_script = """
# Script written in TestDiracCommands.py
from LHCbDIRAC.Interfaces.API.Dirac import Dirac
from LHCbDIRAC.Interfaces.API.Job import Job
from DIRAC.Core.Utilities.SiteSEMapping import getSEsForCountry
uk_ses = getSEsForCountry('uk')['Value']
j = Job()
j.setName('Ganga-DiracCommands-InitTestJob')
j.setCPUTime(10)
j.setExecutable('###EXE_SCRIPT_BASE###','','Ganga_Executable.log')
j.setInputSandbox(['###EXE_SCRIPT###'])
j.setOutputSandbox(['std.out','std.err','sandboxFile.txt'])
j.setOutputData(['getFile.dst', 'removeFile.dst'], outputSE=uk_ses)
#submit the job to dirac
dirac=Dirac()
result = dirac.submitJob(j)
output(result)
"""
api_script = dedent(api_script)
final_submit_script = api_script.replace('###EXE_SCRIPT###', exe_path_name).replace('###EXE_SCRIPT_BASE###', os.path.basename(exe_path_name))
confirm = execute(final_submit_script, return_raw_dict=True)
if not isinstance(confirm, dict):
raise RuntimeError('Problem submitting job\n{0}'.format(confirm))
assert 'OK' in confirm, 'Failed to submit job!'
assert confirm['OK'], 'Failed to submit job!'
job_id = confirm['Value']
logger.info(job_id)
os.remove(exe_path_name)
logger.info('Waiting for DIRAC job to finish')
timeout = 1200
end_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=timeout)
status = execute('status([%s], %s)' % (job_id, repr(statusmapping)), return_raw_dict=True)
while (status['OK'] and statusmapping[status['Value'][0][1]] not in ['completed', 'failed']) and datetime.datetime.utcnow() < end_time:
time.sleep(5)
status = execute('status([%s], %s)' % (job_id, repr(statusmapping)), return_raw_dict=True)
print("Job status: %s" % status)
assert 'OK' in status, 'Failed to get job Status!'
assert status['OK'], 'Failed to get job Status!'
assert statusmapping[status['Value'][0][1]] == 'completed', 'job not completed properly: %s' % status
logger.info("status: %s", status)
output_data_info = execute('getOutputDataInfo("%s")' % job_id, return_raw_dict=True)
logger.info('output_data_info: %s' % output_data_info)
max_retry = 20
count = 0
while not output_data_info.get('OK', True) and count != max_retry:
time.sleep(5)
output_data_info = execute('getOutputDataInfo("%s")' % job_id, return_raw_dict=True)
logger.info("output_data_info:\n%s\n", output_data_info)
count += 1
assert 'OK' in output_data_info, 'getOutputDataInfo Failed!'
assert output_data_info['OK'], 'getOutputDataInfo Failed!'
logger.info("\n\n\noutput_data_info: %s\n\n\n" % output_data_info)
get_file_lfn = output_data_info['Value']['getFile.dst']['LFN']
remove_file_lfn = output_data_info['Value']['removeFile.dst']['LFN']
logger.info("%s %s", get_file_lfn, remove_file_lfn)
yield JobInfo(job_id, get_file_lfn, remove_file_lfn)
confirm = execute('removeFile("%s")' % get_file_lfn, return_raw_dict=True)
assert 'OK' in confirm, 'removeFile Failed!'
assert confirm['OK'], 'removeFile Failed!'
@pytest.fixture(scope='module')
def dirac_sites(load_config):
"""Grab a shuffled list of UK DIRAC storage elements"""
site_script = dedent("""
from DIRAC.Core.Utilities.SiteSEMapping import getSEsForCountry
output(getSEsForCountry('uk'))
""")
output = execute(site_script, return_raw_dict=True)
assert output['OK'], 'Could not fetch list of SEs'
sites = output['Value']
random.shuffle(sites)
return sites
@external
class TestDiracCommands(object):
def test_peek(self, dirac_job):
confirm = execute('peek("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'peek command not executed successfully'
def test_getJobCPUTime(self, dirac_job):
confirm = execute('getJobCPUTime("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getJobCPUTime command not executed successfully'
def test_getOutputData(self, dirac_job):
confirm = execute('getOutputData("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputData command not executed successfully'
def test_getOutputSandbox(self, dirac_job):
confirm = execute('getOutputSandbox("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputSandbox command not executed successfully'
def test_getOutputDataInfo(self, dirac_job):
confirm = execute('getOutputDataInfo("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputDataInfo command not executed successfully'
assert isinstance(confirm['Value']['getFile.dst'], dict), 'getOutputDataInfo command not executed successfully'
def test_getOutputDataLFNs(self, dirac_job):
confirm = execute('getOutputDataLFNs("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
logger.info(confirm)
assert confirm['OK'], 'getOutputDataLFNs command not executed successfully'
def test_normCPUTime(self, dirac_job):
confirm = execute('normCPUTime("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'normCPUTime command not executed successfully'
assert isinstance(confirm['Value'], str), 'normCPUTime ommand not executed successfully'
def test_getStateTime(self, dirac_job):
confirm = execute('getStateTime("%s", "completed")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getStateTime command not executed successfully'
assert isinstance(confirm['Value'], datetime.datetime), 'getStateTime command not executed successfully'
def test_timedetails(self, dirac_job):
confirm = execute('timedetails("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'timedetails command not executed successfully'
assert isinstance(confirm['Value'], dict), 'Command not executed successfully'
def test_y_reschedule(self, dirac_job):
confirm = execute('reschedule("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'reschedule command not executed successfully'
def test_z_kill(self, dirac_job):
# remove_files()
confirm = execute('kill("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'kill command not executed successfully'
def test_status(self, dirac_job):
confirm = execute('status([%s], %s)' % (dirac_job.id, repr(statusmapping)), return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'status command not executed successfully'
assert isinstance(confirm['Value'], list), 'Command not executed successfully'
def test_getFile(self, dirac_job):
confirm = execute('getFile("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getFile command not executed successfully'
def test_removeFile(self, dirac_job):
confirm = execute('removeFile("%s")' % dirac_job.remove_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'removeFile command not executed successfully'
def test_ping(self, dirac_job):
confirm = execute('ping("WorkloadManagement","JobManager")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'ping command not executed successfully'
def test_getMetadata(self, dirac_job):
confirm = execute('getMetadata("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getMetaData command not executed successfully'
def test_getReplicas(self, dirac_job):
confirm = execute('getReplicas("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getReplicas command not executed successfully'
def test_getAccessURL(self, dirac_job):
confirm = execute('getReplicas("%s")' % dirac_job.get_file_lfn, cred_req=dirac_job.cred_req, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getReplicas command not executed successfully'
SE = random.choice(list(confirm['Value']['Successful'][dirac_job.get_file_lfn].keys()))
accessResult = execute('getAccessURL("%s", "%s")' % (dirac_job.get_file_lfn, SE), cred_req=dirac_job.cred_req, return_raw_dict = True)
logger.info(accessResult)
assert accessResult['OK'], 'getAccessURL command not executed successfully'
accessResultProtocol = execute('getAccessURL("%s", "%s", %s)' % (dirac_job.get_file_lfn, SE, ['xroot']), cred_req=dirac_job.cred_req, return_raw_dict = True)
logger.info(accessResultProtocol)
assert accessResultProtocol['OK'], 'getAccessURL command with protocol not executed successfully'
assert ('root://' in accessResultProtocol['Value']['Successful'][dirac_job.get_file_lfn]), 'URL does not start with root protocol'
def test_replicateFile(self, dirac_job, dirac_sites):
for new_location in dirac_sites:
confirm = execute('replicateFile("%s","%s","")' % (dirac_job.get_file_lfn, new_location), return_raw_dict=True)
logger.info(confirm)
if not confirm['OK']:
continue # If we couldn't add the file, try the next site
confirm = execute('removeReplica("%s","%s")' % (dirac_job.get_file_lfn, new_location), return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_splitInputData(self, dirac_job):
confirm = execute('splitInputData("%s","1")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'splitInputData command not executed successfully'
def test_uploadFile(self, tmpdir, dirac_job, dirac_sites):
new_lfn = '%s_add_file' % os.path.dirname(dirac_job.get_file_lfn)
for location in dirac_sites:
temp_file = tmpdir.join('upload_file')
temp_file.write(uuid.uuid4())
logger.info('Adding file to %s', location)
confirm = execute('uploadFile("%s","%s",["%s"],"")' % (new_lfn, temp_file, location), return_raw_dict=True)
logger.info(confirm)
if confirm.get(location, False):
continue # If we couldn't add the file, try the next site
logger.info('Removing file from %s', location)
confirm_remove = execute('removeFile("%s")' % new_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm_remove['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_addFile(self, tmpdir, dirac_job, dirac_sites):
new_lfn = '%s_add_file' % os.path.dirname(dirac_job.get_file_lfn)
for location in dirac_sites:
temp_file = tmpdir.join('add_file')
temp_file.write(uuid.uuid4())
logger.info('Adding file to %s', location)
confirm = execute('addFile("%s","%s","%s","")' % (new_lfn, temp_file, location), return_raw_dict=True)
logger.info(confirm)
if not confirm['OK']:
continue # If we couldn't add the file, try the next site
logger.info('Removing file from %s', location)
confirm_remove = execute('removeFile("%s")' % new_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm_remove['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_getJobGroupJobs(self, dirac_job):
confirm = execute('getJobGroupJobs("")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_bkQueryDict(self, dirac_job):
confirm = execute('bkQueryDict({"FileType":"Path","ConfigName":"LHCb","ConfigVersion":"Collision09","EventType":"10","ProcessingPass":"Real Data","DataTakingConditions":"Beam450GeV-VeloOpen-MagDown"})', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'bkQuery command not executed successfully'
def test_checkSites(self, dirac_job):
confirm = execute('checkSites()', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'checkSites command not executed successfully'
def test_bkMetaData(self, dirac_job):
confirm = execute('bkMetaData("")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_getDataset(self, dirac_job):
confirm = execute('getDataset("LHCb/Collision09/Beam450GeV-VeloOpen-MagDown/Real Data/RecoToDST-07/10/DST","","Path","","","")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_checkTier1s(self, dirac_job):
confirm = execute('checkTier1s()', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_getInputDataCatalog(self, dirac_job):
confirm = execute('getInputDataCatalog("%s","","")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['Message'].startswith('Failed to access') or confirm['Message'].startswith('Exception during construction'), 'Command not executed successfully'
def test_getLHCbInputDataCatalog(self, dirac_job):
confirm = execute('getLHCbInputDataCatalog("%s",0,"","")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['Message'].startswith('Failed to access') or confirm['Message'].startswith('Exception during construction'), 'Command not executed successfully'<|fim▁end|>
|
"""Load the Ganga config files before the test and clean them up afterwards"""
|
<|file_name|>uri.rs<|end_file_name|><|fim▁begin|>use std::error::Error as StdError;
use std::fmt::{Display, self};
use std::str::{self, FromStr};
use http::ByteStr;
use bytes::{BufMut, BytesMut};
/// The Request-URI of a Request's StartLine.
///
/// From Section 5.3, Request Target:
/// > Once an inbound connection is obtained, the client sends an HTTP
/// > request message (Section 3) with a request-target derived from the
/// > target URI. There are four distinct formats for the request-target,
/// > depending on both the method being requested and whether the request
/// > is to a proxy.
/// >
/// > ```notrust
/// > request-target = origin-form
/// > / absolute-form
/// > / authority-form
/// > / asterisk-form
/// > ```
///
/// # Uri explanations
/// ```notrust
/// abc://username:[email protected]:123/path/data?key=value&key2=value2#fragid1
/// |-| |-------------------------------||--------| |-------------------| |-----|
/// | | | | |
/// scheme authority path query fragment
/// ```
#[derive(Clone, Hash)]
pub struct Uri {
source: ByteStr,
scheme_end: Option<usize>,
authority_end: Option<usize>,
query_start: Option<usize>,
fragment_start: Option<usize>,
}
impl Uri {
/// Parse a string into a `Uri`.
fn new(s: ByteStr) -> Result<Uri, UriError> {
if s.len() == 0 {
Err(UriError(ErrorKind::Empty))
} else if s.as_bytes() == b"*" {
// asterisk-form
Ok(asterisk_form())
} else if s.as_bytes() == b"/" {
// shortcut for '/'
Ok(Uri::default())
} else if s.as_bytes()[0] == b'/' {
// origin-form
let query = parse_query(&s);
let fragment = parse_fragment(&s);
Ok(Uri {
source: s,
scheme_end: None,
authority_end: None,
query_start: query,
fragment_start: fragment,
})
} else if s.contains("://") {
// absolute-form
let scheme = parse_scheme(&s);
let auth = Some(parse_authority(&s));
let scheme_end = scheme.expect("just checked for ':' above");
let auth_end = auth.expect("just checked for ://");
if scheme_end + 3 == auth_end {
// authority was empty
return Err(UriError(ErrorKind::MissingAuthority));
}
let query = parse_query(&s);
let fragment = parse_fragment(&s);
Ok(Uri {
source: s,
scheme_end: scheme,
authority_end: auth,
query_start: query,
fragment_start: fragment,
})
} else if (s.contains("/") || s.contains("?")) && !s.contains("://") {
// last possibility is authority-form, above are illegal characters
return Err(UriError(ErrorKind::Malformed))
} else {
// authority-form
let len = s.len();
Ok(Uri {
source: s,
scheme_end: None,
authority_end: Some(len),
query_start: None,
fragment_start: None,
})
}
}
/// Get the path of this `Uri`.
#[inline]
pub fn path(&self) -> &str {
let index = self.path_start();
let end = self.path_end();
if index >= end {
if self.scheme().is_some() {
"/" // absolute-form MUST have path
} else {
""
}
} else {
&self.source[index..end]
}
}
#[inline]
fn path_start(&self) -> usize {
self.authority_end.unwrap_or(self.scheme_end.unwrap_or(0))
}
#[inline]
fn path_end(&self) -> usize {
if let Some(query) = self.query_start {
query
} else if let Some(fragment) = self.fragment_start {
fragment
} else {
self.source.len()
}
}
#[inline]
fn origin_form_end(&self) -> usize {
if let Some(fragment) = self.fragment_start {
fragment
} else {
self.source.len()
}
}
/// Get the scheme of this `Uri`.
#[inline]
pub fn scheme(&self) -> Option<&str> {
if let Some(end) = self.scheme_end {
Some(&self.source[..end])
} else {
None
}
}
/// Get the authority of this `Uri`.
#[inline]
pub fn authority(&self) -> Option<&str> {
if let Some(end) = self.authority_end {
let index = self.scheme_end.map(|i| i + 3).unwrap_or(0);
Some(&self.source[index..end])
} else {
None
}
}
/// Get the host of this `Uri`.
#[inline]
pub fn host(&self) -> Option<&str> {
if let Some(auth) = self.authority() {
auth.split(":").next()
} else {
None
}
}
/// Get the port of this `Uri`.
#[inline]
pub fn port(&self) -> Option<u16> {
match self.authority() {
Some(auth) => auth.find(":").and_then(|i| u16::from_str(&auth[i+1..]).ok()),
None => None,
}
}
/// Get the query string of this `Uri`, starting after the `?`.
#[inline]
pub fn query(&self) -> Option<&str> {
self.query_start.map(|start| {
// +1 to remove '?'
let start = start + 1;
if let Some(end) = self.fragment_start {
&self.source[start..end]
} else {
&self.source[start..]
}
})
}
/// Returns whether this URI is in `absolute-form`.
///
/// An example of absolute form is `https://hyper.rs`.
#[inline]
pub fn is_absolute(&self) -> bool {
self.scheme_end.is_some()
}
#[cfg(test)]
fn fragment(&self) -> Option<&str> {<|fim▁hole|> })
}
}
fn parse_scheme(s: &str) -> Option<usize> {
s.find(':')
}
fn parse_authority(s: &str) -> usize {
let i = s.find("://").map(|p| p + 3).unwrap_or(0);
s[i..].find('/')
.or_else(|| s[i..].find('?'))
.or_else(|| s[i..].find('#'))
.map(|end| end + i)
.unwrap_or(s.len())
}
fn parse_query(s: &str) -> Option<usize> {
s.find('?').and_then(|i| {
if let Some(frag) = s.find('#') {
if frag < i {
None
} else {
Some(i)
}
} else {
Some(i)
}
})
}
fn parse_fragment(s: &str) -> Option<usize> {
s.find('#')
}
impl FromStr for Uri {
type Err = UriError;
fn from_str(s: &str) -> Result<Uri, UriError> {
//TODO: refactor such that the to_owned() is only required at the end
//of successful parsing, so an Err doesn't needlessly clone the string.
Uri::new(ByteStr::from(s))
}
}
impl PartialEq for Uri {
fn eq(&self, other: &Uri) -> bool {
self.source.as_str() == other.source.as_str()
}
}
impl<'a> PartialEq<&'a str> for Uri {
fn eq(&self, other: & &'a str) -> bool {
self.source.as_str() == *other
}
}
impl<'a> PartialEq<Uri> for &'a str{
fn eq(&self, other: &Uri) -> bool {
*self == other.source.as_str()
}
}
impl Eq for Uri {}
impl AsRef<str> for Uri {
fn as_ref(&self) -> &str {
self.source.as_str()
}
}
impl Default for Uri {
fn default() -> Uri {
Uri {
source: ByteStr::from_static("/"),
scheme_end: None,
authority_end: None,
query_start: None,
fragment_start: None,
}
}
}
impl fmt::Debug for Uri {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.as_ref(), f)
}
}
impl Display for Uri {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.as_ref())
}
}
pub fn from_byte_str(s: ByteStr) -> Result<Uri, UriError> {
Uri::new(s)
}
pub fn scheme_and_authority(uri: &Uri) -> Option<Uri> {
if uri.scheme_end.is_some() {
Some(Uri {
source: uri.source.slice_to(uri.authority_end.expect("scheme without authority")),
scheme_end: uri.scheme_end,
authority_end: uri.authority_end,
query_start: None,
fragment_start: None,
})
} else {
None
}
}
#[inline]
fn asterisk_form() -> Uri {
Uri {
source: ByteStr::from_static("*"),
scheme_end: None,
authority_end: None,
query_start: None,
fragment_start: None,
}
}
pub fn origin_form(uri: &Uri) -> Uri {
let range = Range(uri.path_start(), uri.origin_form_end());
let clone = if range.len() == 0 {
ByteStr::from_static("/")
} else if uri.source.as_bytes()[range.0] == b'*' {
return asterisk_form();
} else if uri.source.as_bytes()[range.0] != b'/' {
let mut new = BytesMut::with_capacity(range.1 - range.0 + 1);
new.put_u8(b'/');
new.put_slice(&uri.source.as_bytes()[range.0..range.1]);
// safety: the bytes are '/' + previous utf8 str
unsafe { ByteStr::from_utf8_unchecked(new.freeze()) }
} else if range.0 == 0 && range.1 == uri.source.len() {
uri.source.clone()
} else {
uri.source.slice(range.0, range.1)
};
Uri {
source: clone,
scheme_end: None,
authority_end: None,
query_start: uri.query_start,
fragment_start: None,
}
}
struct Range(usize, usize);
impl Range {
fn len(&self) -> usize {
self.1 - self.0
}
}
/// An error parsing a `Uri`.
#[derive(Clone, Debug)]
pub struct UriError(ErrorKind);
#[derive(Clone, Debug)]
enum ErrorKind {
Empty,
Malformed,
MissingAuthority,
}
impl fmt::Display for UriError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(self.description())
}
}
impl StdError for UriError {
fn description(&self) -> &str {
match self.0 {
ErrorKind::Empty => "empty Uri string",
ErrorKind::Malformed => "invalid character in Uri authority",
ErrorKind::MissingAuthority => "absolute Uri missing authority segment",
}
}
}
macro_rules! test_parse {
(
$test_name:ident,
$str:expr,
$($method:ident = $value:expr,)*
) => (
#[test]
fn $test_name() {
let uri = Uri::from_str($str).unwrap();
println!("{:?} = {:#?}", $str, uri);
$(
assert_eq!(uri.$method(), $value);
)+
}
);
}
test_parse! {
test_uri_parse_origin_form,
"/some/path/here?and=then&hello#and-bye",
scheme = None,
authority = None,
path = "/some/path/here",
query = Some("and=then&hello"),
fragment = Some("and-bye"),
}
test_parse! {
test_uri_parse_absolute_form,
"http://127.0.0.1:61761/chunks",
scheme = Some("http"),
authority = Some("127.0.0.1:61761"),
path = "/chunks",
query = None,
fragment = None,
port = Some(61761),
}
test_parse! {
test_uri_parse_absolute_form_without_path,
"https://127.0.0.1:61761",
scheme = Some("https"),
authority = Some("127.0.0.1:61761"),
path = "/",
query = None,
fragment = None,
port = Some(61761),
}
test_parse! {
test_uri_parse_asterisk_form,
"*",
scheme = None,
authority = None,
path = "*",
query = None,
fragment = None,
}
test_parse! {
test_uri_parse_authority_no_port,
"localhost",
scheme = None,
authority = Some("localhost"),
path = "",
query = None,
fragment = None,
port = None,
}
test_parse! {
test_uri_parse_authority_form,
"localhost:3000",
scheme = None,
authority = Some("localhost:3000"),
path = "",
query = None,
fragment = None,
port = Some(3000),
}
test_parse! {
test_uri_parse_absolute_with_default_port_http,
"http://127.0.0.1:80",
scheme = Some("http"),
authority = Some("127.0.0.1:80"),
path = "/",
query = None,
fragment = None,
port = Some(80),
}
test_parse! {
test_uri_parse_absolute_with_default_port_https,
"https://127.0.0.1:443",
scheme = Some("https"),
authority = Some("127.0.0.1:443"),
path = "/",
query = None,
fragment = None,
port = Some(443),
}
test_parse! {
test_uri_parse_fragment_questionmark,
"http://127.0.0.1/#?",
scheme = Some("http"),
authority = Some("127.0.0.1"),
path = "/",
query = None,
fragment = Some("?"),
port = None,
}
test_parse! {
test_uri_parse_path_with_terminating_questionmark,
"http://127.0.0.1/path?",
scheme = Some("http"),
authority = Some("127.0.0.1"),
path = "/path",
query = Some(""),
fragment = None,
port = None,
}
test_parse! {
test_uri_parse_absolute_form_with_empty_path_and_nonempty_query,
"http://127.0.0.1?foo=bar",
scheme = Some("http"),
authority = Some("127.0.0.1"),
path = "/",
query = Some("foo=bar"),
fragment = None,
port = None,
}
#[test]
fn test_uri_parse_error() {
fn err(s: &str) {
Uri::from_str(s).unwrap_err();
}
err("http://");
err("htt:p//host");
err("hyper.rs/");
err("hyper.rs?key=val");
err("?key=val");
err("localhost/");
err("localhost?key=val");
}
#[test]
fn test_uri_to_origin_form() {
let cases = vec![
("/", "/"),
("/foo?bar", "/foo?bar"),
("/foo?bar#nope", "/foo?bar"),
("http://hyper.rs", "/"),
("http://hyper.rs/", "/"),
("http://hyper.rs/path", "/path"),
("http://hyper.rs?query", "/?query"),
("*", "*"),
];
for case in cases {
let uri = Uri::from_str(case.0).unwrap();
assert_eq!(origin_form(&uri), case.1); //, "{:?}", case);
}
}<|fim▁end|>
|
self.fragment_start.map(|start| {
// +1 to remove the '#'
&self.source[start + 1..]
|
<|file_name|>teardown.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package provider
import (
"context"
"sync"
"time"
jujuclock "github.com/juju/clock"
"github.com/juju/errors"
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
k8slabels "k8s.io/apimachinery/pkg/labels"
"github.com/juju/juju/caas/kubernetes/provider/utils"
"github.com/juju/juju/core/watcher"
)
func (k *kubernetesClient) deleteClusterScopeResourcesModelTeardown(ctx context.Context, wg *sync.WaitGroup, errChan chan<- error) {
defer wg.Done()
labels := utils.LabelsForModel(k.CurrentModel(), k.IsLegacyLabels())
selector := k8slabels.NewSelector().Add(
labelSetToRequirements(labels)...,
)
// TODO(caas): Fix to only delete cluster wide resources created by this controller.
tasks := []teardownResources{
k.deleteClusterRoleBindingsModelTeardown,
k.deleteClusterRolesModelTeardown,
k.deleteClusterScopeAPIExtensionResourcesModelTeardown,
k.deleteMutatingWebhookConfigurationsModelTeardown,
k.deleteValidatingWebhookConfigurationsModelTeardown,
k.deleteStorageClassesModelTeardown,
}
var subwg sync.WaitGroup
subwg.Add(len(tasks))
defer subwg.Wait()
for _, f := range tasks {
go f(ctx, selector, k.clock, &subwg, errChan)
}
}
type teardownResources func(
context.Context,
k8slabels.Selector,
jujuclock.Clock,
*sync.WaitGroup,
chan<- error,
)
func (k *kubernetesClient) deleteClusterRoleBindingsModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteClusterRoleBindings, func(selector k8slabels.Selector) error {
_, err := k.listClusterRoleBindings(selector)
return err
},
)
}
func (k *kubernetesClient) deleteClusterRolesModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteClusterRoles, func(selector k8slabels.Selector) error {
_, err := k.listClusterRoles(selector)
return err
},
)
}
func (k *kubernetesClient) deleteClusterScopeAPIExtensionResourcesModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
defer wg.Done()
var subwg sync.WaitGroup
subwg.Add(2)
defer subwg.Wait()
selector = mergeSelectors(selector, lifecycleModelTeardownSelector)
// Delete CRs first then CRDs.
k.deleteClusterScopeCustomResourcesModelTeardown(ctx, selector, clk, &subwg, errChan)
k.deleteCustomResourceDefinitionsModelTeardown(ctx, selector, clk, &subwg, errChan)
}
func (k *kubernetesClient) deleteClusterScopeCustomResourcesModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
getSelector := func(crd apiextensionsv1.CustomResourceDefinition) k8slabels.Selector {
if !isCRDScopeNamespaced(crd.Spec.Scope) {
// We only delete cluster scope CRs here, namespaced CRs are deleted by namespace destroy process.
return selector
}
return k8slabels.NewSelector()
}
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
func(_ k8slabels.Selector) error {
return k.deleteCustomResources(getSelector)
},
func(_ k8slabels.Selector) error {
_, err := k.listCustomResources(getSelector)
return err
},
)
}
func (k *kubernetesClient) deleteCustomResourceDefinitionsModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteCustomResourceDefinitions, func(selector k8slabels.Selector) error {
_, err := k.listCustomResourceDefinitions(selector)
return err
},
)
}
func (k *kubernetesClient) deleteMutatingWebhookConfigurationsModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteMutatingWebhookConfigurations, func(selector k8slabels.Selector) error {
_, err := k.listMutatingWebhookConfigurations(selector)
return err
},
)
}
func (k *kubernetesClient) deleteValidatingWebhookConfigurationsModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteValidatingWebhookConfigurations, func(selector k8slabels.Selector) error {
_, err := k.listValidatingWebhookConfigurations(selector)
return err
},
)
}
func (k *kubernetesClient) deleteStorageClassesModelTeardown(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
) {
ensureResourcesDeletedFunc(ctx, selector, clk, wg, errChan,
k.deleteStorageClasses, func(selector k8slabels.Selector) error {
_, err := k.listStorageClasses(selector)
return err
},
)
}
type deleterChecker func(k8slabels.Selector) error
func ensureResourcesDeletedFunc(
ctx context.Context,
selector k8slabels.Selector,
clk jujuclock.Clock,
wg *sync.WaitGroup,
errChan chan<- error,
deleter, checker deleterChecker,<|fim▁hole|>
var err error
defer func() {
if err != nil {
select {
case errChan <- err:
default:
}
}
}()
if err = deleter(selector); err != nil {
if errors.IsNotFound(err) {
err = nil
}
return
}
interval := 1 * time.Second
ticker := clk.NewTimer(interval)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
err = errors.Trace(ctx.Err())
return
case <-ticker.Chan():
err = checker(selector)
if errors.IsNotFound(err) {
// Deleted already.
err = nil
return
}
if err != nil {
err = errors.Trace(err)
return
}
}
// Keep checking.
ticker.Reset(interval)
}
}
func (k *kubernetesClient) deleteNamespaceModelTeardown(ctx context.Context, wg *sync.WaitGroup, errChan chan<- error) {
defer wg.Done()
var err error
defer func() {
if err != nil {
select {
case errChan <- err:
default:
}
}
}()
var w watcher.NotifyWatcher
if w, err = k.WatchNamespace(); err != nil {
err = errors.Annotatef(err, "watching namespace %q", k.namespace)
return
}
defer w.Kill()
if err = k.deleteNamespace(); err != nil {
err = errors.Annotatef(err, "deleting model namespace %q", k.namespace)
return
}
for {
select {
case <-ctx.Done():
err = errors.Annotatef(ctx.Err(), "tearing down namespace %q", k.namespace)
return
case <-w.Changes():
// Ensures the namespace to be deleted - notfound error expected.
_, err = k.GetNamespace(k.namespace)
if errors.IsNotFound(err) {
// Namespace has been deleted.
err = nil
return
}
if err != nil {
err = errors.Trace(err)
return
}
logger.Debugf("namespace %q is still been terminating", k.namespace)
}
}
}<|fim▁end|>
|
) {
defer wg.Done()
|
<|file_name|>status_service.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="request" />
import SkypeAccount from './skype_account';
import { CookieJar } from "request";
import Status from "./status/status";<|fim▁hole|> private eventEmitter;
constructor(cookieJar: CookieJar, eventEmitter: EventEmitter);
setStatus(skypeAccount: SkypeAccount, status: Status): void;
}
export default StatusService;<|fim▁end|>
|
import { EventEmitter } from "./utils";
export declare class StatusService {
private requestWithJar;
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, url<|fim▁hole|> url(r'^add/', dr.make_waybill),
url(r'^del/', dr.remove_waybill),
url(r'^find/', dr.search_waybill),
url(r'^take/', dr.take_place),
url(r'^trips/', dr.trips),
url(r'^cancel_trip/', dr.cancel_trip),
url(r'^passangers/', dr.passangers),
url(r'^travel/', dr.travel),
)<|fim▁end|>
|
from iyoume.waybill import views as dr
urlpatterns = patterns('',
|
<|file_name|>DedicatedArticlesBreadcrumbs_Test_Query.graphql.ts<|end_file_name|><|fim▁begin|>/* tslint:disable */
/* eslint-disable */
// @ts-nocheck
import { ConcreteRequest } from "relay-runtime";
import { FragmentRefs } from "relay-runtime";
export type DedicatedArticlesBreadcrumbs_Test_QueryVariables = {};
export type DedicatedArticlesBreadcrumbs_Test_QueryResponse = {
readonly fairOrganizer: {
readonly " $fragmentRefs": FragmentRefs<"DedicatedArticlesBreadcrumbs_fairOrganizer">;
} | null;
};
export type DedicatedArticlesBreadcrumbs_Test_Query = {
readonly response: DedicatedArticlesBreadcrumbs_Test_QueryResponse;
readonly variables: DedicatedArticlesBreadcrumbs_Test_QueryVariables;
};
/*
query DedicatedArticlesBreadcrumbs_Test_Query {
fairOrganizer(id: "example") {
...DedicatedArticlesBreadcrumbs_fairOrganizer
id
}
}
fragment DedicatedArticlesBreadcrumbs_fairOrganizer on FairOrganizer {
slug
name
profile {
image {
resized(width: 30, height: 30, version: "square") {
src
srcSet
}
}
id
}
}
*/<|fim▁hole|> {
"kind": "Literal",
"name": "id",
"value": "example"
}
],
v1 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "id",
"storageKey": null
},
v2 = {
"enumValues": null,
"nullable": false,
"plural": false,
"type": "ID"
},
v3 = {
"enumValues": null,
"nullable": false,
"plural": false,
"type": "String"
};
return {
"fragment": {
"argumentDefinitions": [],
"kind": "Fragment",
"metadata": null,
"name": "DedicatedArticlesBreadcrumbs_Test_Query",
"selections": [
{
"alias": null,
"args": (v0/*: any*/),
"concreteType": "FairOrganizer",
"kind": "LinkedField",
"name": "fairOrganizer",
"plural": false,
"selections": [
{
"args": null,
"kind": "FragmentSpread",
"name": "DedicatedArticlesBreadcrumbs_fairOrganizer"
}
],
"storageKey": "fairOrganizer(id:\"example\")"
}
],
"type": "Query",
"abstractKey": null
},
"kind": "Request",
"operation": {
"argumentDefinitions": [],
"kind": "Operation",
"name": "DedicatedArticlesBreadcrumbs_Test_Query",
"selections": [
{
"alias": null,
"args": (v0/*: any*/),
"concreteType": "FairOrganizer",
"kind": "LinkedField",
"name": "fairOrganizer",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "slug",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "name",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "Profile",
"kind": "LinkedField",
"name": "profile",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"concreteType": "Image",
"kind": "LinkedField",
"name": "image",
"plural": false,
"selections": [
{
"alias": null,
"args": [
{
"kind": "Literal",
"name": "height",
"value": 30
},
{
"kind": "Literal",
"name": "version",
"value": "square"
},
{
"kind": "Literal",
"name": "width",
"value": 30
}
],
"concreteType": "ResizedImageUrl",
"kind": "LinkedField",
"name": "resized",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "src",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "srcSet",
"storageKey": null
}
],
"storageKey": "resized(height:30,version:\"square\",width:30)"
}
],
"storageKey": null
},
(v1/*: any*/)
],
"storageKey": null
},
(v1/*: any*/)
],
"storageKey": "fairOrganizer(id:\"example\")"
}
]
},
"params": {
"cacheID": "2df6ba381ddfe63972a0899de2e26f73",
"id": null,
"metadata": {
"relayTestingSelectionTypeInfo": {
"fairOrganizer": {
"enumValues": null,
"nullable": true,
"plural": false,
"type": "FairOrganizer"
},
"fairOrganizer.id": (v2/*: any*/),
"fairOrganizer.name": {
"enumValues": null,
"nullable": true,
"plural": false,
"type": "String"
},
"fairOrganizer.profile": {
"enumValues": null,
"nullable": true,
"plural": false,
"type": "Profile"
},
"fairOrganizer.profile.id": (v2/*: any*/),
"fairOrganizer.profile.image": {
"enumValues": null,
"nullable": true,
"plural": false,
"type": "Image"
},
"fairOrganizer.profile.image.resized": {
"enumValues": null,
"nullable": true,
"plural": false,
"type": "ResizedImageUrl"
},
"fairOrganizer.profile.image.resized.src": (v3/*: any*/),
"fairOrganizer.profile.image.resized.srcSet": (v3/*: any*/),
"fairOrganizer.slug": (v2/*: any*/)
}
},
"name": "DedicatedArticlesBreadcrumbs_Test_Query",
"operationKind": "query",
"text": "query DedicatedArticlesBreadcrumbs_Test_Query {\n fairOrganizer(id: \"example\") {\n ...DedicatedArticlesBreadcrumbs_fairOrganizer\n id\n }\n}\n\nfragment DedicatedArticlesBreadcrumbs_fairOrganizer on FairOrganizer {\n slug\n name\n profile {\n image {\n resized(width: 30, height: 30, version: \"square\") {\n src\n srcSet\n }\n }\n id\n }\n}\n"
}
};
})();
(node as any).hash = 'afa30fa8497e508d19f3bc05ee3a7a75';
export default node;<|fim▁end|>
|
const node: ConcreteRequest = (function(){
var v0 = [
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.