prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>LocalServicesWebRunner.java<|end_file_name|><|fim▁begin|>/*******************************************************************************<|fim▁hole|> * All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Tasktop Technologies - initial API and implementation
******************************************************************************/
package com.tasktop.c2c.server.developer.support;
import org.eclipse.jetty.ajp.Ajp13SocketConnector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.webapp.WebAppContext;
/**
* @author Clint Morgan (Tasktop Technologies Inc.)
*
*/
public class LocalServicesWebRunner {
public static void main(String[] args) throws Exception {
Server server = new Server(8080);
WebAppContext serviceContext = new WebAppContext();
serviceContext.setResourceBase("../com.tasktop.c2c.server.services.web/src/main/webapp");
serviceContext.setContextPath("/services");
serviceContext.setParentLoaderPriority(true);
WebAppContext taskContext = new WebAppContext();
taskContext.setResourceBase("../com.tasktop.c2c.server.tasks.web/src/main/webapp");
taskContext.setContextPath("/tasks");
taskContext.setParentLoaderPriority(true);
WebAppContext wikiContext = new WebAppContext();
wikiContext.setResourceBase("../com.tasktop.c2c.server.wiki.web/src/main/webapp");
wikiContext.setContextPath("/wiki");
wikiContext.setParentLoaderPriority(true);
WebAppContext hudsonConfigContext = new WebAppContext();
hudsonConfigContext.setResourceBase("../com.tasktop.c2c.server.hudson.web/src/main/webapp");
hudsonConfigContext.setContextPath("/hudson-config");
hudsonConfigContext.setParentLoaderPriority(true);
ContextHandlerCollection handlers = new ContextHandlerCollection();
handlers.setHandlers(new Handler[] { serviceContext, taskContext, wikiContext, hudsonConfigContext });
server.setHandler(handlers);
Ajp13SocketConnector ajpCon = new Ajp13SocketConnector();
ajpCon.setPort(8009);
server.addConnector(ajpCon);
server.start();
server.join();
}
}<|fim▁end|> | * Copyright (c) 2010, 2012 Tasktop Technologies
* |
<|file_name|>setup_util.py<|end_file_name|><|fim▁begin|>from builtins import str
from builtins import range
from builtins import object
import json
import os
import solnlib.utils as utils
from splunktaucclib.global_config import GlobalConfig, GlobalConfigSchema
'''
Usage Examples:
setup_util = Setup_Util(uri, session_key)
setup_util.get_log_level()
setup_util.get_proxy_settings()
setup_util.get_credential_account("my_account_name")
setup_util.get_customized_setting("my_customized_field_name")
'''
'''
setting object structure.
It is stored in self.__cached_global_settings
Note, this structure is only maintained in this util.
setup_util transforms global settings in os environment or from ucc into this structure.
{
"proxy_settings": {
"proxy_enabled": False/True,
"proxy_url": "example.com",
"proxy_port": "1234",
"proxy_username": "",
"proxy_password": "",
"proxy_type": "http",
"proxy_rdns": False/True
},
"log_settings": {
"loglevel": "DEBUG"
},
"credential_settings": [{
"name": "account_id",
"username": "example_account",
"password": "example_password"
}, { # supported by ucc, not seen any usage in AoB
"api_key": "admin",
"api_uuid": "admin",
"endpoint": "some url",
"name": "account1"
}],
"customized_settings": {
"text_name": "content",
"pass_name": "password",
"checkbox": 0/1
}
}
'''
GLOBAL_SETTING_KEY = "global_settings"
AOB_TEST_FLAG = 'AOB_TEST'
PROXY_SETTINGS = "proxy_settings"
LOG_SETTINGS = "log_settings"
CREDENTIAL_SETTINGS = "credential_settings"
CUSTOMIZED_SETTINGS = "customized_settings"
UCC_PROXY = "proxy"
UCC_LOGGING = "logging"
UCC_CUSTOMIZED = "additional_parameters"
UCC_CREDENTIAL = "account"
CONFIGS = [CREDENTIAL_SETTINGS]
SETTINGS = [PROXY_SETTINGS, LOG_SETTINGS, CUSTOMIZED_SETTINGS]
PROXY_ENABLE_KEY = 'proxy_enabled'
PROXY_RDNS_KEY = 'proxy_rdns'
LOG_LEVEL_KEY = 'loglevel'
LOG_LEVEL_KEY_ENV = 'log_level'
TYPE_CHECKBOX = "checkbox"
ALL_SETTING_TYPES = ['text', 'password', 'checkbox', 'dropdownlist', 'multi_dropdownlist', 'radiogroup']
def get_schema_path():
dirname = os.path.dirname
basedir = dirname(dirname(dirname(dirname((dirname(__file__))))))
return os.path.join(basedir, 'appserver', 'static', 'js', 'build', 'globalConfig.json')
class Setup_Util(object):
def __init__(self, uri, session_key, logger=None):
self.__uri = uri
self.__session_key = session_key
self.__logger = logger
self.scheme, self.host, self.port = utils.extract_http_scheme_host_port(
self.__uri)
self.__cached_global_settings = {}
self.__global_config = None
def init_global_config(self):
if self.__global_config is not None:
return
schema_file = get_schema_path()
if not os.path.isfile(schema_file):
self.log_error("Global config JSON file not found!")
self.__global_config = None
else:
with open(get_schema_path()) as f:
json_schema = ''.join([l for l in f])
self.__global_config = GlobalConfig(self.__uri, self.__session_key,
GlobalConfigSchema(json.loads(json_schema)))
def log_error(self, msg):
if self.__logger:
self.__logger.error(msg)
def log_info(self, msg):
if self.__logger:
self.__logger.info(msg)
def log_debug(self, msg):
if self.__logger:
self.__logger.debug(msg)
def _parse_conf(self, key):
if os.environ.get(AOB_TEST_FLAG, 'false') == 'true':
global_settings = self._parse_conf_from_env(json.loads(os.environ.get(GLOBAL_SETTING_KEY, '{}')))
return global_settings.get(key)
else:
return self._parse_conf_from_global_config(key)
def _parse_conf_from_env(self, global_settings):
'''
this is run in test env
'''
if not self.__cached_global_settings:
# format the settings, the setting from env is from global_setting
# meta
self.__cached_global_settings = {}
for s_k, s_v in list(global_settings.items()):
if s_k == PROXY_SETTINGS:
proxy_enabled = s_v.get(PROXY_ENABLE_KEY)
proxy_rdns = s_v.get(PROXY_RDNS_KEY)
if type(proxy_enabled) != bool:
s_v[PROXY_ENABLE_KEY] = utils.is_true(proxy_enabled)
if type(proxy_rdns) != bool:
s_v[PROXY_RDNS_KEY] = utils.is_true(proxy_rdns)
self.__cached_global_settings[PROXY_SETTINGS] = s_v
elif s_k == LOG_SETTINGS:
self.__cached_global_settings[LOG_SETTINGS] = {
LOG_LEVEL_KEY: s_v.get(LOG_LEVEL_KEY_ENV)
}
elif s_k == CREDENTIAL_SETTINGS:
# add account id to accounts
for i in range(0, len(s_v)):
s_v[i]['name'] = 'account' + str(i)
self.__cached_global_settings[CREDENTIAL_SETTINGS] = s_v
else: # should be customized settings
self.__cached_global_settings[CUSTOMIZED_SETTINGS] = {}
for s in s_v:
field_type = s.get('type')
if not field_type:
self.log_error(
'unknown type for customized var:{}'.format(s))
continue
self.__cached_global_settings['customized_settings'][s.get('name', '')] = self._transform(
s.get("value", ""), field_type)
return self.__cached_global_settings
def _parse_conf_from_global_config(self, key):
if self.__cached_global_settings and key in self.__cached_global_settings:
return self.__cached_global_settings.get(key)
self.init_global_config()
if self.__global_config is None:
return None
if key in CONFIGS:
accounts = self.__global_config.configs.load().get(UCC_CREDENTIAL, [])
if accounts:
for account in accounts:
if 'disabled' in account:
del account['disabled']
self.__cached_global_settings[CREDENTIAL_SETTINGS] = accounts
elif key in SETTINGS:
settings = self.__global_config.settings.load()
self.__cached_global_settings.update({UCC_PROXY: None, UCC_LOGGING: None, UCC_CUSTOMIZED: None})
customized_setting = {}
for setting in settings.get('settings', []):
# filter out disabled setting page and 'disabled' field
if setting.get('disabled', False):
continue
if setting['name'] == UCC_LOGGING:
self.__cached_global_settings[LOG_SETTINGS] = {
LOG_LEVEL_KEY: setting.get(LOG_LEVEL_KEY)
}
elif setting['name'] == UCC_PROXY:
if 'disabled' in setting:
del setting['disabled']
setting[PROXY_ENABLE_KEY] = utils.is_true(setting.get(PROXY_ENABLE_KEY, '0'))
setting[PROXY_RDNS_KEY] = utils.is_true(setting.get(PROXY_RDNS_KEY, '0'))
self.__cached_global_settings[PROXY_SETTINGS] = setting
else: # should be customized settings
if 'disabled' in setting:
del setting['disabled']
customized_setting.update(setting)
self.__cached_global_settings[CUSTOMIZED_SETTINGS] = customized_setting
return self.__cached_global_settings.get(key)
def get_log_level(self):
log_level = "INFO"
log_settings = self._parse_conf(LOG_SETTINGS)
if log_settings is None:
self.log_info("Log level is not set, use default INFO")
else:
log_level = log_settings.get(LOG_LEVEL_KEY, None)
if not log_level:
self.log_info("Log level is not set, use default INFO")
log_level = "INFO"
return log_level
def get_proxy_settings(self):
proxy_settings = self._parse_conf(PROXY_SETTINGS)
if proxy_settings is None:
self.log_info("Proxy is not set!")
return {}
proxy_enabled = proxy_settings.get(PROXY_ENABLE_KEY)
if not proxy_enabled:<|fim▁hole|> proxy_settings = {
"proxy_url": proxy_settings.get("proxy_url", ""),
"proxy_port": proxy_settings.get("proxy_port", None),
"proxy_username": proxy_settings.get("proxy_username", ""),
"proxy_password": proxy_settings.get("proxy_password", ""),
"proxy_type": proxy_settings.get("proxy_type", ""),
"proxy_rdns": proxy_settings.get("proxy_rdns")
}
self._validate_proxy_settings(proxy_settings)
return proxy_settings
def get_credential_by_id(self, account_id):
credential_settings = self._parse_conf(CREDENTIAL_SETTINGS)
for account in credential_settings:
if account.get('name', None) == account_id:
return account
self.log_error("Credential account with account id {} can not be found".format(account_id))
return None
def get_credential_by_username(self, username):
credential_settings = self._parse_conf(CREDENTIAL_SETTINGS)
for account in credential_settings:
if account.get('username', None) == username:
return account
self.log_error("Credential account with username {} can not be found".format(username))
return None
def get_customized_setting(self, key):
customized_settings = self._parse_conf(CUSTOMIZED_SETTINGS)
if customized_settings is None:
self.log_info("Customized setting is not set")
return None
if key not in customized_settings:
self.log_info("Customized key can not be found")
return None
customized_setting = customized_settings.get(key, None)
if customized_setting is None:
self.log_error("Cannot find customized setting with key %s" % key)
return customized_setting
def _validate_proxy_settings(self, proxy_settings):
if proxy_settings:
if proxy_settings.get('proxy_url') == "":
raise Exception("Proxy host must not be empty!")
proxy_port = proxy_settings.get('proxy_port')
if proxy_port is None or not proxy_port.isdigit():
raise Exception("Proxy port must be a number!")
def _transform(self, value, field_type):
'''
This is method is only used when parsing customized global params from env.
Only checkbox type needs transform. Other types will be extracted automatically when apply json.loads.
:param value:
:param field_type: can be checkbox, text, password, dropdownlist, multi_dropdownlist, radiogroup
:return:
'''
if field_type == TYPE_CHECKBOX:
return utils.is_true(value)
elif field_type in ALL_SETTING_TYPES:
return value
else:
raise Exception("Type of this customized setting is corrupted. Value: {}, type: {}"
.format(value, field_type))
'''
# the following methods is used by AoB internally
# user should not use this
# These methods returns the similiar structure like ucc libs
the output of config is like
{
"account": [
{
"username": "admin",
"credential": "a",
"name": "ddddd",
"disabled": false
}
]
}
the output of settings is like
{
"settings": [
{
"additional_parameters": {
"checkbox": "1",
"text": "msn",
"disabled": false
}
},
{
"proxy": {
"proxy_type": "http",
"proxy_port": "9999",
"proxy_url": "localhost",
"proxy_rdns": "1",
"disabled": false,
"proxy_password": "a",
"proxy_username": "admin",
"proxy_enabled": "1"
}
},
{
"logging": {
"loglevel": "ERROR",
"disabled": false
}
}
]
}
'''
def get_ucc_log_setting(self):
return {UCC_LOGGING: self._parse_conf(LOG_SETTINGS)}
def get_ucc_proxy_setting(self):
p = dict(self.get_proxy_settings())
p[PROXY_ENABLE_KEY] = True if p else False
return {
UCC_PROXY: p
}
def get_ucc_customized_setting(self):
customized_settings = self._parse_conf(CUSTOMIZED_SETTINGS)
if customized_settings:
return {
UCC_CUSTOMIZED: customized_settings
}
else:
return {}
# account belongs to the configs
def get_ucc_account_config(self):
return {
UCC_CREDENTIAL: self._parse_conf(CREDENTIAL_SETTINGS)
}<|fim▁end|> | self.log_info("Proxy is not enabled!")
return {} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { AppRegistry } from "react-native";
import App from "./App";
<|fim▁hole|>AppRegistry.registerComponent("KeepTheBallGame", () => App);<|fim▁end|> | |
<|file_name|>creader.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
//! Validates all used crates and extern libraries and loads their metadata
use back::svh::Svh;
use driver::session::Session;
use driver::{driver, config};
use metadata::cstore;
use metadata::cstore::{CStore, CrateSource};
use metadata::decoder;
use metadata::loader;
use metadata::loader::CratePaths;
use plugin::load::PluginMetadata;
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::hashmap::{Occupied, Vacant};
use syntax::ast;
use syntax::abi;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::codemap::{Span};
use syntax::diagnostic::SpanHandler;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit;
use util::fs;
struct Env<'a> {
sess: &'a Session,
next_crate_num: ast::CrateNum,
}
// Traverses an AST, reading all the information about use'd crates and extern
// libraries necessary for later resolving, typechecking, linking, etc.
pub fn read_crates(sess: &Session,
krate: &ast::Crate) {
let mut e = Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
};
visit_crate(&e, krate);
visit::walk_crate(&mut e, krate);
dump_crates(&sess.cstore);
warn_if_multiple_versions(sess.diagnostic(), &sess.cstore)
}
impl<'a, 'v> visit::Visitor<'v> for Env<'a> {
fn visit_view_item(&mut self, a: &ast::ViewItem) {
visit_view_item(self, a);
visit::walk_view_item(self, a);
}
fn visit_item(&mut self, a: &ast::Item) {
visit_item(self, a);
visit::walk_item(self, a);
}
}
fn dump_crates(cstore: &CStore) {
debug!("resolved crates:");
cstore.iter_crate_data_origins(|_, data, opt_source| {
debug!(" name: {}", data.name());
debug!(" cnum: {}", data.cnum);
debug!(" hash: {}", data.hash());
opt_source.map(|cs| {
let CrateSource { dylib, rlib, cnum: _ } = cs;
dylib.map(|dl| debug!(" dylib: {}", dl.display()));
rlib.map(|rl| debug!(" rlib: {}", rl.display()));
});
})
}
fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
let mut map = HashMap::new();
cstore.iter_crate_data(|cnum, data| {
match map.entry(data.name()) {
Vacant(entry) => { entry.set(vec![cnum]); },
Occupied(mut entry) => { entry.get_mut().push(cnum); },
}
});
for (name, dupes) in map.into_iter() {
if dupes.len() == 1 { continue }
diag.handler().warn(
format!("using multiple versions of crate `{}`", name).as_slice());
for dupe in dupes.into_iter() {
let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here");
loader::note_crate_name(diag, data.name().as_slice());
}
}
}
fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn should_link(i: &ast::ViewItem) -> bool {
i.attrs.iter().all(|attr| {
attr.name().get() != "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases.as_slice(), "link")
})
})
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
if !should_link(i) {
return;
}
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
info.name.as_slice(),
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
}
}
struct CrateInfo {
ident: String,
name: String,
id: ast::NodeId,
should_link: bool,
}
fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternCrate(ident, ref path_opt, id) => {
let ident = token::get_ident(ident);
debug!("resolving extern crate stmt. ident: {} path_opt: {}",
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span));
name
}
None => ident.get().to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
name: name,
id: id,
should_link: should_link(i),
})
}
_ => None
}
}
pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
let err = |s: &str| {
match (sp, sess) {
(_, None) => fail!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
};
if s.len() == 0 {
err("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
}
match sess {
Some(sess) => sess.abort_if_errors(),
None => {}
}
}
fn visit_item(e: &Env, i: &ast::Item) {
match i.node {
ast::ItemForeignMod(ref fm) => {
if fm.abi == abi::Rust || fm.abi == abi::RustIntrinsic {
return;
}
// First, add all of the custom link_args attributes
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
// Next, process all of the #[link(..)]-style arguments
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link")) {<|fim▁hole|> None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if (e.sess.targ_cfg.os == abi::OsMacos ||
e.sess.targ_cfg.os == abi::OsiOS) &&
k.equiv(&("framework")) {
cstore::NativeFramework
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
cstore::NativeUnknown
} else {
e.sess.span_err(m.span,
format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown
}
}
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
None => {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
InternedString::new("foo")
}
};
if n.get().is_empty() {
e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else {
e.sess
.cstore
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
}
}
}
_ => { }
}
}
fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice() != name { return }
match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency in
// which case we may have a specification on the command line for this
// library. Even though an upstream library may have loaded something of
// the same name, we have to make sure it was loaded from the exact same
// location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = e.sess.cstore.get_used_crate_source(cnum).unwrap();
match e.sess.opts.externs.find_equiv(&name) {
Some(locs) => {
let found = locs.iter().any(|l| {
let l = fs::realpath(&Path::new(l.as_slice())).ok();
l == source.dylib || l == source.rlib
});
if found {
ret = Some(cnum);
}
}
None => ret = Some(cnum),
}
});
return ret;
}
fn register_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
span: Span,
lib: loader::Library)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
// Claim this crate number and cache it
let cnum = e.next_crate_num;
e.next_crate_num += 1;
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let cnum_map = resolve_crate_deps(e, root, lib.metadata.as_slice(), span);
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
span: span,
});
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
cnum: cnum,
};
e.sess.cstore.set_crate_data(cnum, cmeta.clone());
e.sess.cstore.add_used_crate_source(source.clone());
(cnum, cmeta, source)
}
fn resolve_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
hash: Option<&Svh>,
span: Span)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
match existing_match(e, name, hash) {
None => {
let mut load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: e.sess.targ_cfg.os,
triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(),
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = load_ctxt.load_library_crate();
register_crate(e, root, ident, name, span, library)
}
Some(cnum) => (cnum,
e.sess.cstore.get_crate_data(cnum),
e.sess.cstore.get_used_crate_source(cnum).unwrap())
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: &mut Env,
root: &Option<CratePaths>,
cdata: &[u8], span : Span)
-> cstore::cnum_map {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = resolve_crate(e, root,
dep.name.as_slice(),
dep.name.as_slice(),
Some(&dep.hash),
span);
(dep.cnum, local_cnum)
}).collect()
}
pub struct PluginMetadataReader<'a> {
env: Env<'a>,
}
impl<'a> PluginMetadataReader<'a> {
pub fn new(sess: &'a Session) -> PluginMetadataReader<'a> {
PluginMetadataReader {
env: Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
}
}
}
pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
let info = extract_crate_info(&self.env, krate).unwrap();
let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice();
let is_cross = target_triple != driver::host_triple();
let mut should_link = info.should_link && !is_cross;
let os = config::get_os(driver::host_triple()).unwrap();
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident.as_slice(),
crate_name: info.name.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),
os: os,
root: &None,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = match load_ctxt.maybe_load_library_crate() {
Some(l) => l,
None if is_cross => {
// try loading from target crates (only valid if there are
// no syntax extensions)
load_ctxt.triple = target_triple;
load_ctxt.os = self.env.sess.targ_cfg.os;
load_ctxt.filesearch = self.env.sess.target_filesearch();
let lib = load_ctxt.load_library_crate();
if decoder::get_plugin_registrar_fn(lib.metadata.as_slice()).is_some() {
let message = format!("crate `{}` contains a plugin_registrar fn but \
only a version for triple `{}` could be found (need {})",
info.ident, target_triple, driver::host_triple());
self.env.sess.span_err(krate.span, message.as_slice());
// need to abort now because the syntax expansion
// code will shortly attempt to load and execute
// code from the found library.
self.env.sess.abort_if_errors();
}
should_link = info.should_link;
lib
}
None => { load_ctxt.report_load_errs(); unreachable!() },
};
let macros = decoder::get_exported_macros(library.metadata.as_slice());
let registrar = decoder::get_plugin_registrar_fn(library.metadata.as_slice()).map(|id| {
decoder::get_symbol(library.metadata.as_slice(), id)
});
if library.dylib.is_none() && registrar.is_some() {
let message = format!("plugin crate `{}` only found in rlib format, \
but must be available in dylib format",
info.ident);
self.env.sess.span_err(krate.span, message.as_slice());
// No need to abort because the loading code will just ignore this
// empty dylib.
}
let pc = PluginMetadata {
lib: library.dylib.clone(),
macros: macros,
registrar_symbol: registrar,
};
if should_link && existing_match(&self.env, info.name.as_slice(),
None).is_none() {
// register crate now to avoid double-reading metadata
register_crate(&mut self.env, &None, info.ident.as_slice(),
info.name.as_slice(), krate.span, library);
}
pc
}
}<|fim▁end|> | Some(at)
} else { |
<|file_name|>webrender_helpers.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// TODO(gw): This contains helper traits and implementations for converting Servo display lists
// into WebRender display lists. In the future, this step should be completely removed.
// This might be achieved by sharing types between WR and Servo display lists, or
// completely converting layout to directly generate WebRender display lists, for example.
use app_units::Au;
use azure::azure_hl::Color;
use euclid::{Point2D, Rect, Size2D};
use gfx::display_list::{BorderRadii, BoxShadowClipMode, ClippingRegion};
use gfx::display_list::{DisplayItem, DisplayList, DisplayListTraversal};
use gfx::display_list::{GradientStop, StackingContext, StackingContextType};
use gfx_traits::{FragmentType, ScrollPolicy, StackingContextId};
use style::computed_values::{image_rendering, mix_blend_mode};
use style::computed_values::filter::{self, Filter};
use style::values::computed::BorderStyle;
use webrender_traits::{self, AuxiliaryListsBuilder, DisplayListId, PipelineId};
trait WebRenderStackingContextConverter {
fn convert_to_webrender<'a>(&self,
traversal: &mut DisplayListTraversal<'a>,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
scroll_policy: ScrollPolicy,
frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::StackingContextId;
fn convert_children_to_webrender<'a>(&self,
traversal: &mut DisplayListTraversal<'a>,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
scroll_policy: ScrollPolicy,
builder: &mut webrender_traits::DisplayListBuilder,
frame_builder: &mut WebRenderFrameBuilder,
force_positioned_stacking_level: bool);
}
pub trait WebRenderDisplayListConverter {
fn convert_to_webrender(&self,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::StackingContextId;
}
trait WebRenderDisplayItemConverter {
fn convert_to_webrender(&self,
builder: &mut webrender_traits::DisplayListBuilder,
frame_builder: &mut WebRenderFrameBuilder);
}
trait ToBorderStyle {
fn to_border_style(&self) -> webrender_traits::BorderStyle;
}
impl ToBorderStyle for BorderStyle {
fn to_border_style(&self) -> webrender_traits::BorderStyle {
match *self {
BorderStyle::none => webrender_traits::BorderStyle::None,
BorderStyle::solid => webrender_traits::BorderStyle::Solid,
BorderStyle::double => webrender_traits::BorderStyle::Double,
BorderStyle::dotted => webrender_traits::BorderStyle::Dotted,
BorderStyle::dashed => webrender_traits::BorderStyle::Dashed,
BorderStyle::hidden => webrender_traits::BorderStyle::Hidden,
BorderStyle::groove => webrender_traits::BorderStyle::Groove,
BorderStyle::ridge => webrender_traits::BorderStyle::Ridge,
BorderStyle::inset => webrender_traits::BorderStyle::Inset,
BorderStyle::outset => webrender_traits::BorderStyle::Outset,
}
}
}
trait ToBoxShadowClipMode {
fn to_clip_mode(&self) -> webrender_traits::BoxShadowClipMode;
}
impl ToBoxShadowClipMode for BoxShadowClipMode {
fn to_clip_mode(&self) -> webrender_traits::BoxShadowClipMode {
match *self {
BoxShadowClipMode::None => webrender_traits::BoxShadowClipMode::None,
BoxShadowClipMode::Inset => webrender_traits::BoxShadowClipMode::Inset,
BoxShadowClipMode::Outset => webrender_traits::BoxShadowClipMode::Outset,
}
}
}
trait ToSizeF {
fn to_sizef(&self) -> Size2D<f32>;
}
trait ToPointF {
fn to_pointf(&self) -> Point2D<f32>;
}
impl ToPointF for Point2D<Au> {
fn to_pointf(&self) -> Point2D<f32> {
Point2D::new(self.x.to_f32_px(), self.y.to_f32_px())
}
}
impl ToSizeF for Size2D<Au> {
fn to_sizef(&self) -> Size2D<f32> {
Size2D::new(self.width.to_f32_px(), self.height.to_f32_px())
}
}
trait ToRectF {
fn to_rectf(&self) -> Rect<f32>;
}
impl ToRectF for Rect<Au> {
fn to_rectf(&self) -> Rect<f32> {
let x = self.origin.x.to_f32_px();
let y = self.origin.y.to_f32_px();
let w = self.size.width.to_f32_px();
let h = self.size.height.to_f32_px();
Rect::new(Point2D::new(x, y), Size2D::new(w, h))
}
}
trait ToColorF {
fn to_colorf(&self) -> webrender_traits::ColorF;
}
impl ToColorF for Color {
fn to_colorf(&self) -> webrender_traits::ColorF {
webrender_traits::ColorF::new(self.r, self.g, self.b, self.a)
}
}
trait ToGradientStop {
fn to_gradient_stop(&self) -> webrender_traits::GradientStop;
}
impl ToGradientStop for GradientStop {
fn to_gradient_stop(&self) -> webrender_traits::GradientStop {
webrender_traits::GradientStop {
offset: self.offset,
color: self.color.to_colorf(),
}
}
}
trait ToClipRegion {
fn to_clip_region(&self, frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::ClipRegion;
}
impl ToClipRegion for ClippingRegion {
fn to_clip_region(&self, frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::ClipRegion {
webrender_traits::ClipRegion::new(&self.main.to_rectf(),
self.complex.iter().map(|complex_clipping_region| {
webrender_traits::ComplexClipRegion::new(
complex_clipping_region.rect.to_rectf(),
complex_clipping_region.radii.to_border_radius(),
)
}).collect(),
&mut frame_builder.auxiliary_lists_builder)
}
}
trait ToBorderRadius {
fn to_border_radius(&self) -> webrender_traits::BorderRadius;
}
impl ToBorderRadius for BorderRadii<Au> {
fn to_border_radius(&self) -> webrender_traits::BorderRadius {
webrender_traits::BorderRadius {
top_left: self.top_left.to_sizef(),
top_right: self.top_right.to_sizef(),
bottom_left: self.bottom_left.to_sizef(),
bottom_right: self.bottom_right.to_sizef(),
}
}
}
trait ToBlendMode {
fn to_blend_mode(&self) -> webrender_traits::MixBlendMode;
}
impl ToBlendMode for mix_blend_mode::T {
fn to_blend_mode(&self) -> webrender_traits::MixBlendMode {
match *self {
mix_blend_mode::T::normal => webrender_traits::MixBlendMode::Normal,
mix_blend_mode::T::multiply => webrender_traits::MixBlendMode::Multiply,
mix_blend_mode::T::screen => webrender_traits::MixBlendMode::Screen,
mix_blend_mode::T::overlay => webrender_traits::MixBlendMode::Overlay,
mix_blend_mode::T::darken => webrender_traits::MixBlendMode::Darken,
mix_blend_mode::T::lighten => webrender_traits::MixBlendMode::Lighten,
mix_blend_mode::T::color_dodge => webrender_traits::MixBlendMode::ColorDodge,
mix_blend_mode::T::color_burn => webrender_traits::MixBlendMode::ColorBurn,
mix_blend_mode::T::hard_light => webrender_traits::MixBlendMode::HardLight,
mix_blend_mode::T::soft_light => webrender_traits::MixBlendMode::SoftLight,
mix_blend_mode::T::difference => webrender_traits::MixBlendMode::Difference,
mix_blend_mode::T::exclusion => webrender_traits::MixBlendMode::Exclusion,
mix_blend_mode::T::hue => webrender_traits::MixBlendMode::Hue,
mix_blend_mode::T::saturation => webrender_traits::MixBlendMode::Saturation,
mix_blend_mode::T::color => webrender_traits::MixBlendMode::Color,
mix_blend_mode::T::luminosity => webrender_traits::MixBlendMode::Luminosity,
}
}
}
trait ToImageRendering {
fn to_image_rendering(&self) -> webrender_traits::ImageRendering;
}
impl ToImageRendering for image_rendering::T {
fn to_image_rendering(&self) -> webrender_traits::ImageRendering {
match *self {
image_rendering::T::CrispEdges => webrender_traits::ImageRendering::CrispEdges,
image_rendering::T::Auto => webrender_traits::ImageRendering::Auto,
image_rendering::T::Pixelated => webrender_traits::ImageRendering::Pixelated,
}
}
}
trait ToFilterOps {
fn to_filter_ops(&self) -> Vec<webrender_traits::FilterOp>;
}
impl ToFilterOps for filter::T {
fn to_filter_ops(&self) -> Vec<webrender_traits::FilterOp> {
let mut result = Vec::with_capacity(self.filters.len());
for filter in self.filters.iter() {
match *filter {
Filter::Blur(radius) => result.push(webrender_traits::FilterOp::Blur(radius)),
Filter::Brightness(amount) => result.push(webrender_traits::FilterOp::Brightness(amount)),
Filter::Contrast(amount) => result.push(webrender_traits::FilterOp::Contrast(amount)),
Filter::Grayscale(amount) => result.push(webrender_traits::FilterOp::Grayscale(amount)),
Filter::HueRotate(angle) => result.push(webrender_traits::FilterOp::HueRotate(angle.0)),
Filter::Invert(amount) => result.push(webrender_traits::FilterOp::Invert(amount)),
Filter::Opacity(amount) => result.push(webrender_traits::FilterOp::Opacity(amount)),
Filter::Saturate(amount) => result.push(webrender_traits::FilterOp::Saturate(amount)),
Filter::Sepia(amount) => result.push(webrender_traits::FilterOp::Sepia(amount)),
}
}
result
}
}
impl WebRenderStackingContextConverter for StackingContext {
fn convert_children_to_webrender<'a>(&self,
traversal: &mut DisplayListTraversal<'a>,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
scroll_policy: ScrollPolicy,
builder: &mut webrender_traits::DisplayListBuilder,
frame_builder: &mut WebRenderFrameBuilder,
_force_positioned_stacking_level: bool) {
for child in self.children() {
while let Some(item) = traversal.advance(self) {
item.convert_to_webrender(builder, frame_builder);
}
if child.context_type == StackingContextType::Real {
let scroll_layer_id_for_children = if self.scrolls_overflow_area {
scroll_layer_id
} else {
None
};
let stacking_context_id = child.convert_to_webrender(traversal,
api,
pipeline_id,
epoch,
scroll_layer_id_for_children,
scroll_policy,
frame_builder);
builder.push_stacking_context(stacking_context_id);
} else {
child.convert_children_to_webrender(traversal,
api,
pipeline_id,
epoch,
scroll_layer_id,
scroll_policy,
builder,
frame_builder,
true);
}
}
while let Some(item) = traversal.advance(self) {
item.convert_to_webrender(builder, frame_builder);
}
}
fn convert_to_webrender<'a>(&self,
traversal: &mut DisplayListTraversal<'a>,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
mut scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
mut scroll_policy: ScrollPolicy,
frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::StackingContextId {
if let Some(ref layer_info) = self.layer_info {
scroll_policy = layer_info.scroll_policy
}
let webrender_scroll_policy = match scroll_policy {
ScrollPolicy::Scrollable => webrender_traits::ScrollPolicy::Scrollable,
ScrollPolicy::FixedPosition => webrender_traits::ScrollPolicy::Fixed,
};
let webrender_stacking_context_id = self.id.convert_to_webrender();
let mut sc =
webrender_traits::StackingContext::new(webrender_stacking_context_id,
scroll_layer_id,
webrender_scroll_policy,
self.bounds.to_rectf(),
self.overflow.to_rectf(),
self.z_index,
&self.transform,
&self.perspective,
self.establishes_3d_context,
self.blend_mode.to_blend_mode(),
self.filters.to_filter_ops(),
&mut frame_builder.auxiliary_lists_builder);
let mut builder = webrender_traits::DisplayListBuilder::new();
if self.scrolls_overflow_area {
scroll_layer_id = Some(frame_builder.next_scroll_layer_id());
}
self.convert_children_to_webrender(traversal,
api,<|fim▁hole|> scroll_layer_id,
scroll_policy,
&mut builder,
frame_builder,
false);
frame_builder.add_display_list(api, builder.finalize(), &mut sc);
frame_builder.add_stacking_context(api, pipeline_id, sc)
}
}
impl WebRenderDisplayListConverter for DisplayList {
fn convert_to_webrender(&self,
api: &mut webrender_traits::RenderApi,
pipeline_id: webrender_traits::PipelineId,
epoch: webrender_traits::Epoch,
scroll_layer_id: Option<webrender_traits::ScrollLayerId>,
frame_builder: &mut WebRenderFrameBuilder)
-> webrender_traits::StackingContextId {
let mut traversal = DisplayListTraversal {
display_list: self,
current_item_index: 0,
last_item_index: self.list.len() - 1,
};
self.root_stacking_context.convert_to_webrender(&mut traversal,
api,
pipeline_id,
epoch,
scroll_layer_id,
ScrollPolicy::Scrollable,
frame_builder)
}
}
impl WebRenderDisplayItemConverter for DisplayItem {
fn convert_to_webrender(&self,
builder: &mut webrender_traits::DisplayListBuilder,
frame_builder: &mut WebRenderFrameBuilder) {
match *self {
DisplayItem::SolidColorClass(ref item) => {
let color = item.color.to_colorf();
if color.a > 0.0 {
builder.push_rect(item.base.bounds.to_rectf(),
item.base.clip.to_clip_region(frame_builder),
color);
}
}
DisplayItem::TextClass(ref item) => {
let mut origin = item.baseline_origin.clone();
let mut glyphs = vec!();
for slice in item.text_run.natural_word_slices_in_visual_order(&item.range) {
for glyph in slice.glyphs.iter_glyphs_for_byte_range(&slice.range) {
let glyph_advance = if glyph.char_is_space() {
glyph.advance() + item.text_run.extra_word_spacing
} else {
glyph.advance()
};
if !slice.glyphs.is_whitespace() {
let glyph_offset = glyph.offset().unwrap_or(Point2D::zero());
let glyph = webrender_traits::GlyphInstance {
index: glyph.id(),
x: (origin.x + glyph_offset.x).to_f32_px(),
y: (origin.y + glyph_offset.y).to_f32_px(),
};
glyphs.push(glyph);
}
origin.x = origin.x + glyph_advance;
};
}
if glyphs.len() > 0 {
builder.push_text(item.base.bounds.to_rectf(),
item.base.clip.to_clip_region(frame_builder),
glyphs,
item.text_run.font_key.expect("Font not added to webrender!"),
item.text_color.to_colorf(),
item.text_run.actual_pt_size,
item.blur_radius,
&mut frame_builder.auxiliary_lists_builder);
}
}
DisplayItem::ImageClass(ref item) => {
if let Some(id) = item.webrender_image.key {
if item.stretch_size.width > Au(0) &&
item.stretch_size.height > Au(0) {
builder.push_image(item.base.bounds.to_rectf(),
item.base.clip.to_clip_region(frame_builder),
item.stretch_size.to_sizef(),
item.image_rendering.to_image_rendering(),
id);
}
}
}
DisplayItem::WebGLClass(ref item) => {
builder.push_webgl_canvas(item.base.bounds.to_rectf(),
item.base.clip.to_clip_region(frame_builder),
item.context_id);
}
DisplayItem::BorderClass(ref item) => {
let rect = item.base.bounds.to_rectf();
let left = webrender_traits::BorderSide {
width: item.border_widths.left.to_f32_px(),
color: item.color.left.to_colorf(),
style: item.style.left.to_border_style(),
};
let top = webrender_traits::BorderSide {
width: item.border_widths.top.to_f32_px(),
color: item.color.top.to_colorf(),
style: item.style.top.to_border_style(),
};
let right = webrender_traits::BorderSide {
width: item.border_widths.right.to_f32_px(),
color: item.color.right.to_colorf(),
style: item.style.right.to_border_style(),
};
let bottom = webrender_traits::BorderSide {
width: item.border_widths.bottom.to_f32_px(),
color: item.color.bottom.to_colorf(),
style: item.style.bottom.to_border_style(),
};
let radius = item.radius.to_border_radius();
builder.push_border(rect,
item.base.clip.to_clip_region(frame_builder),
left,
top,
right,
bottom,
radius);
}
DisplayItem::GradientClass(ref item) => {
let rect = item.base.bounds.to_rectf();
let start_point = item.start_point.to_pointf();
let end_point = item.end_point.to_pointf();
let mut stops = Vec::new();
for stop in &item.stops {
stops.push(stop.to_gradient_stop());
}
builder.push_gradient(rect,
item.base.clip.to_clip_region(frame_builder),
start_point,
end_point,
stops,
&mut frame_builder.auxiliary_lists_builder);
}
DisplayItem::LineClass(..) => {
println!("TODO DisplayItem::LineClass");
}
DisplayItem::LayeredItemClass(..) => {
panic!("Unexpected in webrender!");
}
DisplayItem::BoxShadowClass(ref item) => {
let rect = item.base.bounds.to_rectf();
let box_bounds = item.box_bounds.to_rectf();
builder.push_box_shadow(rect,
item.base.clip.to_clip_region(frame_builder),
box_bounds,
item.offset.to_pointf(),
item.color.to_colorf(),
item.blur_radius.to_f32_px(),
item.spread_radius.to_f32_px(),
item.border_radius.to_f32_px(),
item.clip_mode.to_clip_mode());
}
DisplayItem::IframeClass(ref item) => {
let rect = item.base.bounds.to_rectf();
let pipeline_id = item.iframe.to_webrender();
builder.push_iframe(rect,
item.base.clip.to_clip_region(frame_builder),
pipeline_id);
}
}
}
}
pub struct WebRenderFrameBuilder {
pub stacking_contexts: Vec<(webrender_traits::StackingContextId,
webrender_traits::StackingContext)>,
pub display_lists: Vec<(DisplayListId, webrender_traits::BuiltDisplayList)>,
pub auxiliary_lists_builder: AuxiliaryListsBuilder,
pub root_pipeline_id: PipelineId,
pub next_scroll_layer_id: usize,
}
impl WebRenderFrameBuilder {
pub fn new(root_pipeline_id: PipelineId) -> WebRenderFrameBuilder {
WebRenderFrameBuilder {
stacking_contexts: vec![],
display_lists: vec![],
auxiliary_lists_builder: AuxiliaryListsBuilder::new(),
root_pipeline_id: root_pipeline_id,
next_scroll_layer_id: 0,
}
}
pub fn add_stacking_context(&mut self,
api: &mut webrender_traits::RenderApi,
pipeline_id: PipelineId,
stacking_context: webrender_traits::StackingContext)
-> webrender_traits::StackingContextId {
assert!(pipeline_id == self.root_pipeline_id);
let id = api.next_stacking_context_id();
self.stacking_contexts.push((id, stacking_context));
id
}
pub fn add_display_list(&mut self,
api: &mut webrender_traits::RenderApi,
display_list: webrender_traits::BuiltDisplayList,
stacking_context: &mut webrender_traits::StackingContext)
-> DisplayListId {
let id = api.next_display_list_id();
stacking_context.has_stacking_contexts = stacking_context.has_stacking_contexts ||
display_list.descriptor().has_stacking_contexts;
stacking_context.display_lists.push(id);
self.display_lists.push((id, display_list));
id
}
pub fn next_scroll_layer_id(&mut self) -> webrender_traits::ScrollLayerId {
let scroll_layer_id = self.next_scroll_layer_id;
self.next_scroll_layer_id += 1;
webrender_traits::ScrollLayerId::new(self.root_pipeline_id, scroll_layer_id)
}
}
trait WebRenderStackingContextIdConverter {
fn convert_to_webrender(&self) -> webrender_traits::ServoStackingContextId;
}
impl WebRenderStackingContextIdConverter for StackingContextId {
fn convert_to_webrender(&self) -> webrender_traits::ServoStackingContextId {
webrender_traits::ServoStackingContextId(self.fragment_type().convert_to_webrender(),
self.id())
}
}
trait WebRenderFragmentTypeConverter {
fn convert_to_webrender(&self) -> webrender_traits::FragmentType;
}
impl WebRenderFragmentTypeConverter for FragmentType {
fn convert_to_webrender(&self) -> webrender_traits::FragmentType {
match *self {
FragmentType::FragmentBody => webrender_traits::FragmentType::FragmentBody,
FragmentType::BeforePseudoContent => {
webrender_traits::FragmentType::BeforePseudoContent
}
FragmentType::AfterPseudoContent => webrender_traits::FragmentType::AfterPseudoContent,
}
}
}<|fim▁end|> | pipeline_id,
epoch, |
<|file_name|>get1F88.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
pdb1f88 = getPDB("1F88")
WritePDB(pdb1f88, "1F88.pdb")<|fim▁end|> | from ptools import * |
<|file_name|>quickInput.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./quickInput';
import { Component } from 'vs/workbench/common/component';
import { IQuickInputService, IQuickPickItem, IPickOptions, IInputOptions, IQuickNavigateConfiguration, IQuickPick, IQuickInput, IQuickInputButton, IInputBox, IQuickPickItemButtonEvent, QuickPickInput, IQuickPickSeparator, IKeyMods } from 'vs/platform/quickinput/common/quickInput';
import { IPartService } from 'vs/workbench/services/part/common/partService';
import * as dom from 'vs/base/browser/dom';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { contrastBorder, widgetShadow } from 'vs/platform/theme/common/colorRegistry';
import { SIDE_BAR_BACKGROUND, SIDE_BAR_FOREGROUND } from 'vs/workbench/common/theme';
import { IQuickOpenService } from 'vs/platform/quickOpen/common/quickOpen';
import { CancellationToken } from 'vs/base/common/cancellation';
import { QuickInputList } from './quickInputList';
import { QuickInputBox } from './quickInputBox';
import { KeyCode } from 'vs/base/common/keyCodes';
import { StandardKeyboardEvent } from 'vs/base/browser/keyboardEvent';
import { localize } from 'vs/nls';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { CLOSE_ON_FOCUS_LOST_CONFIG } from 'vs/workbench/browser/quickopen';
import { CountBadge } from 'vs/base/browser/ui/countBadge/countBadge';
import { attachBadgeStyler, attachProgressBarStyler, attachButtonStyler } from 'vs/platform/theme/common/styler';
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
import { ProgressBar } from 'vs/base/browser/ui/progressbar/progressbar';
import { Emitter, Event } from 'vs/base/common/event';
import { Button } from 'vs/base/browser/ui/button/button';
import { dispose, IDisposable } from 'vs/base/common/lifecycle';
import Severity from 'vs/base/common/severity';
import { IEditorGroupsService } from 'vs/workbench/services/group/common/editorGroupsService';
import { IContextKeyService, RawContextKey, IContextKey } from 'vs/platform/contextkey/common/contextkey';
import { ICommandAndKeybindingRule, KeybindingWeight } from 'vs/platform/keybinding/common/keybindingsRegistry';
import { inQuickOpenContext } from 'vs/workbench/browser/parts/quickopen/quickopen';
import { ActionBar, ActionItem } from 'vs/base/browser/ui/actionbar/actionbar';
import { Action } from 'vs/base/common/actions';
import { URI } from 'vs/base/common/uri';
import { IKeybindingService } from 'vs/platform/keybinding/common/keybinding';
import { equals } from 'vs/base/common/arrays';
import { TimeoutTimer } from 'vs/base/common/async';
import { getIconClass } from 'vs/workbench/browser/parts/quickinput/quickInputUtils';
import { AccessibilitySupport } from 'vs/base/common/platform';
import * as browser from 'vs/base/browser/browser';
import { IEditorOptions } from 'vs/editor/common/config/editorOptions';
import { IStorageService } from 'vs/platform/storage/common/storage';
const $ = dom.$;
type Writeable<T> = { -readonly [P in keyof T]: T[P] };
const backButton = {
iconPath: {
dark: URI.parse(require.toUrl('vs/workbench/browser/parts/quickinput/media/dark/arrow-left.svg')),
light: URI.parse(require.toUrl('vs/workbench/browser/parts/quickinput/media/light/arrow-left.svg'))
},
tooltip: localize('quickInput.back', "Back"),
handle: -1 // TODO
};
interface QuickInputUI {
container: HTMLElement;
leftActionBar: ActionBar;
title: HTMLElement;
rightActionBar: ActionBar;
checkAll: HTMLInputElement;
inputBox: QuickInputBox;
visibleCount: CountBadge;
count: CountBadge;
message: HTMLElement;
progressBar: ProgressBar;
list: QuickInputList;
onDidAccept: Event<void>;
onDidTriggerButton: Event<IQuickInputButton>;
ignoreFocusOut: boolean;
keyMods: Writeable<IKeyMods>;
isScreenReaderOptimized(): boolean;
show(controller: QuickInput): void;
setVisibilities(visibilities: Visibilities): void;
setComboboxAccessibility(enabled: boolean): void;
setEnabled(enabled: boolean): void;
setContextKey(contextKey?: string): void;
hide(): void;
}
type Visibilities = {
title?: boolean;
checkAll?: boolean;
inputBox?: boolean;
visibleCount?: boolean;
count?: boolean;
message?: boolean;
list?: boolean;
ok?: boolean;
};
class QuickInput implements IQuickInput {
private _title: string;
private _steps: number;
private _totalSteps: number;
protected visible = false;
private _enabled = true;
private _contextKey: string;
private _busy = false;
private _ignoreFocusOut = false;
private _buttons: IQuickInputButton[] = [];
private buttonsUpdated = false;
private onDidTriggerButtonEmitter = new Emitter<IQuickInputButton>();
private onDidHideEmitter = new Emitter<void>();
protected visibleDisposables: IDisposable[] = [];
protected disposables: IDisposable[] = [
this.onDidTriggerButtonEmitter,
this.onDidHideEmitter,
];
private busyDelay: TimeoutTimer;
constructor(protected ui: QuickInputUI) {
}
get title() {
return this._title;
}
set title(title: string) {
this._title = title;
this.update();
}
get step() {
return this._steps;
}
set step(step: number) {
this._steps = step;
this.update();
}
get totalSteps() {
return this._totalSteps;
}
set totalSteps(totalSteps: number) {
this._totalSteps = totalSteps;
this.update();
}
get enabled() {
return this._enabled;
}
set enabled(enabled: boolean) {
this._enabled = enabled;
this.update();
}
get contextKey() {
return this._contextKey;
}
set contextKey(contextKey: string) {
this._contextKey = contextKey;
this.update();
}
get busy() {
return this._busy;
}
set busy(busy: boolean) {
this._busy = busy;
this.update();
}
get ignoreFocusOut() {
return this._ignoreFocusOut;
}
set ignoreFocusOut(ignoreFocusOut: boolean) {
this._ignoreFocusOut = ignoreFocusOut;
this.update();
}
get buttons() {
return this._buttons;
}
set buttons(buttons: IQuickInputButton[]) {
this._buttons = buttons;
this.buttonsUpdated = true;
this.update();
}
onDidTriggerButton = this.onDidTriggerButtonEmitter.event;
show(): void {
if (this.visible) {
return;
}
this.visibleDisposables.push(
this.ui.onDidTriggerButton(button => {
if (this.buttons.indexOf(button) !== -1) {
this.onDidTriggerButtonEmitter.fire(button);
}
}),
);
this.ui.show(this);
this.visible = true;
this.update();
}
hide(): void {
if (!this.visible) {
return;
}
this.ui.hide();
}
didHide(): void {
this.visible = false;
this.visibleDisposables = dispose(this.visibleDisposables);
this.onDidHideEmitter.fire();
}
onDidHide = this.onDidHideEmitter.event;
protected update() {
if (!this.visible) {
return;
}
const title = this.getTitle();
if (this.ui.title.textContent !== title) {
this.ui.title.textContent = title;
}
if (this.busy && !this.busyDelay) {
this.busyDelay = new TimeoutTimer();
this.busyDelay.setIfNotSet(() => {
if (this.visible) {
this.ui.progressBar.infinite();
}
}, 800);
}
if (!this.busy && this.busyDelay) {
this.ui.progressBar.stop();
this.busyDelay.cancel();
this.busyDelay = null;
}
if (this.buttonsUpdated) {
this.buttonsUpdated = false;
this.ui.leftActionBar.clear();
const leftButtons = this.buttons.filter(button => button === backButton);
this.ui.leftActionBar.push(leftButtons.map((button, index) => {
const action = new Action(`id-${index}`, '', button.iconClass || getIconClass(button.iconPath), true, () => this.onDidTriggerButtonEmitter.fire(button));
action.tooltip = button.tooltip;
return action;
}), { icon: true, label: false });
this.ui.rightActionBar.clear();
const rightButtons = this.buttons.filter(button => button !== backButton);
this.ui.rightActionBar.push(rightButtons.map((button, index) => {
const action = new Action(`id-${index}`, '', button.iconClass || getIconClass(button.iconPath), true, () => this.onDidTriggerButtonEmitter.fire(button));
action.tooltip = button.tooltip;
return action;
}), { icon: true, label: false });
}
this.ui.ignoreFocusOut = this.ignoreFocusOut;
this.ui.setEnabled(this.enabled);
this.ui.setContextKey(this.contextKey);
}
private getTitle() {
if (this.title && this.step) {
return `${this.title} (${this.getSteps()})`;
}
if (this.title) {
return this.title;
}
if (this.step) {
return this.getSteps();
}
return '';
}
private getSteps() {
if (this.step && this.totalSteps) {
return localize('quickInput.steps', "{0}/{1}", this.step, this.totalSteps);
}
if (this.step) {
return String(this.step);
}
return '';
}
public dispose(): void {
this.hide();
this.disposables = dispose(this.disposables);
}
}
class QuickPick<T extends IQuickPickItem> extends QuickInput implements IQuickPick<T> {
private static INPUT_BOX_ARIA_LABEL = localize('quickInputBox.ariaLabel', "Type to narrow down results.");
private _value = '';
private _placeholder;
private onDidChangeValueEmitter = new Emitter<string>();
private onDidAcceptEmitter = new Emitter<string>();
private _items: Array<T | IQuickPickSeparator> = [];
private itemsUpdated = false;
private _canSelectMany = false;
private _matchOnDescription = false;
private _matchOnDetail = false;
private _activeItems: T[] = [];
private activeItemsUpdated = false;
private activeItemsToConfirm: T[] = [];
private onDidChangeActiveEmitter = new Emitter<T[]>();
private _selectedItems: T[] = [];
private selectedItemsUpdated = false;
private selectedItemsToConfirm: T[] = [];
private onDidChangeSelectionEmitter = new Emitter<T[]>();
private onDidTriggerItemButtonEmitter = new Emitter<IQuickPickItemButtonEvent<T>>();
quickNavigate: IQuickNavigateConfiguration;
constructor(ui: QuickInputUI) {
super(ui);
this.disposables.push(
this.onDidChangeValueEmitter,
this.onDidAcceptEmitter,
this.onDidChangeActiveEmitter,
this.onDidChangeSelectionEmitter,
this.onDidTriggerItemButtonEmitter,
);
}
get value() {
return this._value;
}
set value(value: string) {
this._value = value || '';
this.update();
}
get placeholder() {
return this._placeholder;
}
set placeholder(placeholder: string) {
this._placeholder = placeholder;
this.update();
}
onDidChangeValue = this.onDidChangeValueEmitter.event;
onDidAccept = this.onDidAcceptEmitter.event;
get items() {
return this._items;
}
set items(items: Array<T | IQuickPickSeparator>) {
this._items = items;
this.itemsUpdated = true;
this.update();
}
get canSelectMany() {
return this._canSelectMany;
}
set canSelectMany(canSelectMany: boolean) {
this._canSelectMany = canSelectMany;
this.update();
}
get matchOnDescription() {
return this._matchOnDescription;
}
set matchOnDescription(matchOnDescription: boolean) {
this._matchOnDescription = matchOnDescription;
this.update();
}
get matchOnDetail() {
return this._matchOnDetail;
}
set matchOnDetail(matchOnDetail: boolean) {
this._matchOnDetail = matchOnDetail;
this.update();
}
get activeItems() {<|fim▁hole|> }
set activeItems(activeItems: T[]) {
this._activeItems = activeItems;
this.activeItemsUpdated = true;
this.update();
}
onDidChangeActive = this.onDidChangeActiveEmitter.event;
get selectedItems() {
return this._selectedItems;
}
set selectedItems(selectedItems: T[]) {
this._selectedItems = selectedItems;
this.selectedItemsUpdated = true;
this.update();
}
get keyMods() {
return this.ui.keyMods;
}
onDidChangeSelection = this.onDidChangeSelectionEmitter.event;
onDidTriggerItemButton = this.onDidTriggerItemButtonEmitter.event;
show() {
if (!this.visible) {
this.visibleDisposables.push(
this.ui.inputBox.onDidChange(value => {
if (value === this.value) {
return;
}
this._value = value;
this.ui.list.filter(this.ui.inputBox.value);
if (!this.ui.isScreenReaderOptimized() && !this.canSelectMany) {
this.ui.list.focus('First');
}
this.onDidChangeValueEmitter.fire(value);
}),
this.ui.inputBox.onKeyDown(event => {
switch (event.keyCode) {
case KeyCode.DownArrow:
this.ui.list.focus('Next');
if (this.canSelectMany) {
this.ui.list.domFocus();
}
break;
case KeyCode.UpArrow:
if (this.ui.list.getFocusedElements().length) {
this.ui.list.focus('Previous');
} else {
this.ui.list.focus('Last');
}
if (this.canSelectMany) {
this.ui.list.domFocus();
}
break;
case KeyCode.PageDown:
if (this.ui.list.getFocusedElements().length) {
this.ui.list.focus('NextPage');
} else {
this.ui.list.focus('First');
}
if (this.canSelectMany) {
this.ui.list.domFocus();
}
break;
case KeyCode.PageUp:
if (this.ui.list.getFocusedElements().length) {
this.ui.list.focus('PreviousPage');
} else {
this.ui.list.focus('Last');
}
if (this.canSelectMany) {
this.ui.list.domFocus();
}
break;
}
}),
this.ui.onDidAccept(() => {
if (!this.canSelectMany && this.activeItems[0]) {
this._selectedItems = [this.activeItems[0]];
this.onDidChangeSelectionEmitter.fire(this.selectedItems);
}
this.onDidAcceptEmitter.fire();
}),
this.ui.list.onDidChangeFocus(focusedItems => {
if (this.activeItemsUpdated) {
return; // Expect another event.
}
if (this.activeItemsToConfirm !== this._activeItems && equals(focusedItems, this._activeItems, (a, b) => a === b)) {
return;
}
this._activeItems = focusedItems as T[];
this.onDidChangeActiveEmitter.fire(focusedItems as T[]);
}),
this.ui.list.onDidChangeSelection(selectedItems => {
if (this.canSelectMany) {
if (selectedItems.length) {
this.ui.list.setSelectedElements([]);
}
return;
}
if (this.selectedItemsToConfirm !== this._selectedItems && equals(selectedItems, this._selectedItems, (a, b) => a === b)) {
return;
}
this._selectedItems = selectedItems as T[];
this.onDidChangeSelectionEmitter.fire(selectedItems as T[]);
if (selectedItems.length) {
this.onDidAcceptEmitter.fire();
}
}),
this.ui.list.onChangedCheckedElements(checkedItems => {
if (!this.canSelectMany) {
return;
}
if (this.selectedItemsToConfirm !== this._selectedItems && equals(checkedItems, this._selectedItems, (a, b) => a === b)) {
return;
}
this._selectedItems = checkedItems as T[];
this.onDidChangeSelectionEmitter.fire(checkedItems as T[]);
}),
this.ui.list.onButtonTriggered(event => this.onDidTriggerItemButtonEmitter.fire(event as IQuickPickItemButtonEvent<T>)),
this.registerQuickNavigation()
);
}
super.show(); // TODO: Why have show() bubble up while update() trickles down? (Could move setComboboxAccessibility() here.)
}
private registerQuickNavigation() {
return dom.addDisposableListener(this.ui.container, dom.EventType.KEY_UP, e => {
if (this.canSelectMany || !this.quickNavigate) {
return;
}
const keyboardEvent: StandardKeyboardEvent = new StandardKeyboardEvent(e);
const keyCode = keyboardEvent.keyCode;
// Select element when keys are pressed that signal it
const quickNavKeys = this.quickNavigate.keybindings;
const wasTriggerKeyPressed = keyCode === KeyCode.Enter || quickNavKeys.some(k => {
const [firstPart, chordPart] = k.getParts();
if (chordPart) {
return false;
}
if (firstPart.shiftKey && keyCode === KeyCode.Shift) {
if (keyboardEvent.ctrlKey || keyboardEvent.altKey || keyboardEvent.metaKey) {
return false; // this is an optimistic check for the shift key being used to navigate back in quick open
}
return true;
}
if (firstPart.altKey && keyCode === KeyCode.Alt) {
return true;
}
if (firstPart.ctrlKey && keyCode === KeyCode.Ctrl) {
return true;
}
if (firstPart.metaKey && keyCode === KeyCode.Meta) {
return true;
}
return false;
});
if (wasTriggerKeyPressed && this.activeItems[0]) {
this._selectedItems = [this.activeItems[0]];
this.onDidChangeSelectionEmitter.fire(this.selectedItems);
this.onDidAcceptEmitter.fire();
}
});
}
protected update() {
super.update();
if (!this.visible) {
return;
}
if (this.ui.inputBox.value !== this.value) {
this.ui.inputBox.value = this.value;
}
if (this.ui.inputBox.placeholder !== (this.placeholder || '')) {
this.ui.inputBox.placeholder = (this.placeholder || '');
}
if (this.itemsUpdated) {
this.itemsUpdated = false;
this.ui.list.setElements(this.items);
this.ui.list.filter(this.ui.inputBox.value);
this.ui.checkAll.checked = this.ui.list.getAllVisibleChecked();
this.ui.visibleCount.setCount(this.ui.list.getVisibleCount());
this.ui.count.setCount(this.ui.list.getCheckedCount());
if (!this.ui.isScreenReaderOptimized() && !this.canSelectMany) {
this.ui.list.focus('First');
}
}
if (this.ui.container.classList.contains('show-checkboxes') !== !!this.canSelectMany) {
if (this.canSelectMany) {
this.ui.list.clearFocus();
} else if (!this.ui.isScreenReaderOptimized()) {
this.ui.list.focus('First');
}
}
if (this.activeItemsUpdated) {
this.activeItemsUpdated = false;
this.activeItemsToConfirm = this._activeItems;
this.ui.list.setFocusedElements(this.activeItems);
if (this.activeItemsToConfirm === this._activeItems) {
this.activeItemsToConfirm = null;
}
}
if (this.selectedItemsUpdated) {
this.selectedItemsUpdated = false;
this.selectedItemsToConfirm = this._selectedItems;
if (this.canSelectMany) {
this.ui.list.setCheckedElements(this.selectedItems);
} else {
this.ui.list.setSelectedElements(this.selectedItems);
}
if (this.selectedItemsToConfirm === this._selectedItems) {
this.selectedItemsToConfirm = null;
}
}
this.ui.list.matchOnDescription = this.matchOnDescription;
this.ui.list.matchOnDetail = this.matchOnDetail;
this.ui.setComboboxAccessibility(true);
this.ui.inputBox.setAttribute('aria-label', QuickPick.INPUT_BOX_ARIA_LABEL);
this.ui.setVisibilities(this.canSelectMany ? { title: !!this.title || !!this.step, checkAll: true, inputBox: true, visibleCount: true, count: true, ok: true, list: true } : { title: !!this.title || !!this.step, inputBox: true, visibleCount: true, list: true });
}
}
class InputBox extends QuickInput implements IInputBox {
private static noPromptMessage = localize('inputModeEntry', "Press 'Enter' to confirm your input or 'Escape' to cancel");
private _value = '';
private _valueSelection: Readonly<[number, number]>;
private valueSelectionUpdated = true;
private _placeholder: string;
private _password = false;
private _prompt: string;
private noValidationMessage = InputBox.noPromptMessage;
private _validationMessage: string;
private onDidValueChangeEmitter = new Emitter<string>();
private onDidAcceptEmitter = new Emitter<string>();
constructor(ui: QuickInputUI) {
super(ui);
this.disposables.push(
this.onDidValueChangeEmitter,
this.onDidAcceptEmitter,
);
}
get value() {
return this._value;
}
set value(value: string) {
this._value = value || '';
this.update();
}
set valueSelection(valueSelection: Readonly<[number, number]>) {
this._valueSelection = valueSelection;
this.valueSelectionUpdated = true;
this.update();
}
get placeholder() {
return this._placeholder;
}
set placeholder(placeholder: string) {
this._placeholder = placeholder;
this.update();
}
get password() {
return this._password;
}
set password(password: boolean) {
this._password = password;
this.update();
}
get prompt() {
return this._prompt;
}
set prompt(prompt: string) {
this._prompt = prompt;
this.noValidationMessage = prompt
? localize('inputModeEntryDescription', "{0} (Press 'Enter' to confirm or 'Escape' to cancel)", prompt)
: InputBox.noPromptMessage;
this.update();
}
get validationMessage() {
return this._validationMessage;
}
set validationMessage(validationMessage: string) {
this._validationMessage = validationMessage;
this.update();
}
onDidChangeValue = this.onDidValueChangeEmitter.event;
onDidAccept = this.onDidAcceptEmitter.event;
show() {
if (!this.visible) {
this.visibleDisposables.push(
this.ui.inputBox.onDidChange(value => {
if (value === this.value) {
return;
}
this._value = value;
this.onDidValueChangeEmitter.fire(value);
}),
this.ui.onDidAccept(() => this.onDidAcceptEmitter.fire()),
);
this.valueSelectionUpdated = true;
}
super.show();
}
protected update() {
super.update();
if (!this.visible) {
return;
}
if (this.ui.inputBox.value !== this.value) {
this.ui.inputBox.value = this.value;
}
if (this.valueSelectionUpdated) {
this.valueSelectionUpdated = false;
this.ui.inputBox.select(this._valueSelection && { start: this._valueSelection[0], end: this._valueSelection[1] });
}
if (this.ui.inputBox.placeholder !== (this.placeholder || '')) {
this.ui.inputBox.placeholder = (this.placeholder || '');
}
if (this.ui.inputBox.password !== this.password) {
this.ui.inputBox.password = this.password;
}
if (!this.validationMessage && this.ui.message.textContent !== this.noValidationMessage) {
this.ui.message.textContent = this.noValidationMessage;
this.ui.inputBox.showDecoration(Severity.Ignore);
}
if (this.validationMessage && this.ui.message.textContent !== this.validationMessage) {
this.ui.message.textContent = this.validationMessage;
this.ui.inputBox.showDecoration(Severity.Error);
}
this.ui.setVisibilities({ title: !!this.title || !!this.step, inputBox: true, message: true });
}
}
export class QuickInputService extends Component implements IQuickInputService {
public _serviceBrand: any;
private static readonly ID = 'workbench.component.quickinput';
private static readonly MAX_WIDTH = 600; // Max total width of quick open widget
private idPrefix = 'quickInput_'; // Constant since there is still only one.
private layoutDimensions: dom.Dimension;
private titleBar: HTMLElement;
private filterContainer: HTMLElement;
private visibleCountContainer: HTMLElement;
private countContainer: HTMLElement;
private okContainer: HTMLElement;
private ok: Button;
private ui: QuickInputUI;
private comboboxAccessibility = false;
private enabled = true;
private inQuickOpenWidgets: Record<string, boolean> = {};
private inQuickOpenContext: IContextKey<boolean>;
private contexts: { [id: string]: IContextKey<boolean>; } = Object.create(null);
private onDidAcceptEmitter = this._register(new Emitter<void>());
private onDidTriggerButtonEmitter = this._register(new Emitter<IQuickInputButton>());
private keyMods: Writeable<IKeyMods> = { ctrlCmd: false, alt: false };
private controller: QuickInput;
constructor(
@IEnvironmentService private environmentService: IEnvironmentService,
@IConfigurationService private configurationService: IConfigurationService,
@IInstantiationService private instantiationService: IInstantiationService,
@IPartService private partService: IPartService,
@IQuickOpenService private quickOpenService: IQuickOpenService,
@IEditorGroupsService private editorGroupService: IEditorGroupsService,
@IKeybindingService private keybindingService: IKeybindingService,
@IContextKeyService private contextKeyService: IContextKeyService,
@IThemeService themeService: IThemeService,
@IStorageService storageService: IStorageService
) {
super(QuickInputService.ID, themeService, storageService);
this.inQuickOpenContext = new RawContextKey<boolean>('inQuickOpen', false).bindTo(contextKeyService);
this._register(this.quickOpenService.onShow(() => this.inQuickOpen('quickOpen', true)));
this._register(this.quickOpenService.onHide(() => this.inQuickOpen('quickOpen', false)));
this.registerKeyModsListeners();
}
private inQuickOpen(widget: 'quickInput' | 'quickOpen', open: boolean) {
if (open) {
this.inQuickOpenWidgets[widget] = true;
} else {
delete this.inQuickOpenWidgets[widget];
}
if (Object.keys(this.inQuickOpenWidgets).length) {
if (!this.inQuickOpenContext.get()) {
this.inQuickOpenContext.set(true);
}
} else {
if (this.inQuickOpenContext.get()) {
this.inQuickOpenContext.reset();
}
}
}
private setContextKey(id?: string) {
let key: IContextKey<boolean>;
if (id) {
key = this.contexts[id];
if (!key) {
key = new RawContextKey<boolean>(id, false)
.bindTo(this.contextKeyService);
this.contexts[id] = key;
}
}
if (key && key.get()) {
return; // already active context
}
this.resetContextKeys();
if (key) {
key.set(true);
}
}
private resetContextKeys() {
for (const key in this.contexts) {
if (this.contexts[key].get()) {
this.contexts[key].reset();
}
}
}
private registerKeyModsListeners() {
const workbench = this.partService.getWorkbenchElement();
this._register(dom.addDisposableListener(workbench, dom.EventType.KEY_DOWN, (e: KeyboardEvent) => {
const event = new StandardKeyboardEvent(e);
switch (event.keyCode) {
case KeyCode.Ctrl:
case KeyCode.Meta:
this.keyMods.ctrlCmd = true;
break;
case KeyCode.Alt:
this.keyMods.alt = true;
break;
}
}));
this._register(dom.addDisposableListener(workbench, dom.EventType.KEY_UP, (e: KeyboardEvent) => {
const event = new StandardKeyboardEvent(e);
switch (event.keyCode) {
case KeyCode.Ctrl:
case KeyCode.Meta:
this.keyMods.ctrlCmd = false;
break;
case KeyCode.Alt:
this.keyMods.alt = false;
break;
}
}));
}
private create() {
if (this.ui) {
return;
}
const workbench = this.partService.getWorkbenchElement();
const container = dom.append(workbench, $('.quick-input-widget.show-file-icons'));
container.tabIndex = -1;
container.style.display = 'none';
this.titleBar = dom.append(container, $('.quick-input-titlebar'));
const leftActionBar = this._register(new ActionBar(this.titleBar));
leftActionBar.domNode.classList.add('quick-input-left-action-bar');
const title = dom.append(this.titleBar, $('.quick-input-title'));
const rightActionBar = this._register(new ActionBar(this.titleBar));
rightActionBar.domNode.classList.add('quick-input-right-action-bar');
const headerContainer = dom.append(container, $('.quick-input-header'));
const checkAll = <HTMLInputElement>dom.append(headerContainer, $('input.quick-input-check-all'));
checkAll.type = 'checkbox';
this._register(dom.addStandardDisposableListener(checkAll, dom.EventType.CHANGE, e => {
const checked = checkAll.checked;
list.setAllVisibleChecked(checked);
}));
this._register(dom.addDisposableListener(checkAll, dom.EventType.CLICK, e => {
if (e.x || e.y) { // Avoid 'click' triggered by 'space'...
inputBox.setFocus();
}
}));
this.filterContainer = dom.append(headerContainer, $('.quick-input-filter'));
const inputBox = this._register(new QuickInputBox(this.filterContainer));
inputBox.setAttribute('aria-describedby', `${this.idPrefix}message`);
this.visibleCountContainer = dom.append(this.filterContainer, $('.quick-input-visible-count'));
this.visibleCountContainer.setAttribute('aria-live', 'polite');
this.visibleCountContainer.setAttribute('aria-atomic', 'true');
const visibleCount = new CountBadge(this.visibleCountContainer, { countFormat: localize({ key: 'quickInput.visibleCount', comment: ['This tells the user how many items are shown in a list of items to select from. The items can be anything. Currently not visible, but read by screen readers.'] }, "{0} Results") });
this.countContainer = dom.append(this.filterContainer, $('.quick-input-count'));
this.countContainer.setAttribute('aria-live', 'polite');
const count = new CountBadge(this.countContainer, { countFormat: localize({ key: 'quickInput.countSelected', comment: ['This tells the user how many items are selected in a list of items to select from. The items can be anything.'] }, "{0} Selected") });
this._register(attachBadgeStyler(count, this.themeService));
this.okContainer = dom.append(headerContainer, $('.quick-input-action'));
this.ok = new Button(this.okContainer);
attachButtonStyler(this.ok, this.themeService);
this.ok.label = localize('ok', "OK");
this._register(this.ok.onDidClick(e => {
this.onDidAcceptEmitter.fire();
}));
const message = dom.append(container, $(`#${this.idPrefix}message.quick-input-message`));
const progressBar = new ProgressBar(container);
dom.addClass(progressBar.getContainer(), 'quick-input-progress');
this._register(attachProgressBarStyler(progressBar, this.themeService));
const list = this._register(this.instantiationService.createInstance(QuickInputList, container, this.idPrefix + 'list'));
this._register(list.onChangedAllVisibleChecked(checked => {
checkAll.checked = checked;
}));
this._register(list.onChangedVisibleCount(c => {
visibleCount.setCount(c);
}));
this._register(list.onChangedCheckedCount(c => {
count.setCount(c);
}));
this._register(list.onLeave(() => {
// Defer to avoid the input field reacting to the triggering key.
setTimeout(() => {
inputBox.setFocus();
if (this.controller instanceof QuickPick && this.controller.canSelectMany) {
list.clearFocus();
}
}, 0);
}));
this._register(list.onDidChangeFocus(() => {
if (this.comboboxAccessibility) {
this.ui.inputBox.setAttribute('aria-activedescendant', this.ui.list.getActiveDescendant());
}
}));
const focusTracker = dom.trackFocus(container);
this._register(focusTracker);
this._register(focusTracker.onDidBlur(() => {
if (!this.ui.ignoreFocusOut && !this.environmentService.args['sticky-quickopen'] && this.configurationService.getValue(CLOSE_ON_FOCUS_LOST_CONFIG)) {
this.hide(true);
}
}));
this._register(dom.addDisposableListener(container, dom.EventType.KEY_DOWN, (e: KeyboardEvent) => {
const event = new StandardKeyboardEvent(e);
switch (event.keyCode) {
case KeyCode.Enter:
dom.EventHelper.stop(e, true);
this.onDidAcceptEmitter.fire();
break;
case KeyCode.Escape:
dom.EventHelper.stop(e, true);
this.hide();
break;
case KeyCode.Tab:
if (!event.altKey && !event.ctrlKey && !event.metaKey) {
const selectors = ['.action-label.icon'];
if (container.classList.contains('show-checkboxes')) {
selectors.push('input');
} else {
selectors.push('input[type=text]');
}
if (this.ui.list.isDisplayed()) {
selectors.push('.monaco-list');
}
const stops = container.querySelectorAll<HTMLElement>(selectors.join(', '));
if (event.shiftKey && event.target === stops[0]) {
dom.EventHelper.stop(e, true);
stops[stops.length - 1].focus();
} else if (!event.shiftKey && event.target === stops[stops.length - 1]) {
dom.EventHelper.stop(e, true);
stops[0].focus();
}
}
break;
}
}));
this._register(this.quickOpenService.onShow(() => this.hide(true)));
this.ui = {
container,
leftActionBar,
title,
rightActionBar,
checkAll,
inputBox,
visibleCount,
count,
message,
progressBar,
list,
onDidAccept: this.onDidAcceptEmitter.event,
onDidTriggerButton: this.onDidTriggerButtonEmitter.event,
ignoreFocusOut: false,
keyMods: this.keyMods,
isScreenReaderOptimized: () => this.isScreenReaderOptimized(),
show: controller => this.show(controller),
hide: () => this.hide(),
setVisibilities: visibilities => this.setVisibilities(visibilities),
setComboboxAccessibility: enabled => this.setComboboxAccessibility(enabled),
setEnabled: enabled => this.setEnabled(enabled),
setContextKey: contextKey => this.setContextKey(contextKey),
};
this.updateStyles();
}
pick<T extends IQuickPickItem, O extends IPickOptions<T>>(picks: Promise<QuickPickInput<T>[]> | QuickPickInput<T>[], options: O = <O>{}, token: CancellationToken = CancellationToken.None): Promise<O extends { canPickMany: true } ? T[] : T> {
return new Promise<O extends { canPickMany: true } ? T[] : T>((doResolve, reject) => {
let resolve = (result: any) => {
resolve = doResolve;
if (options.onKeyMods) {
options.onKeyMods(input.keyMods);
}
doResolve(result);
};
if (token.isCancellationRequested) {
resolve(undefined);
return;
}
const input = this.createQuickPick<T>();
let activeItem: T;
const disposables = [
input,
input.onDidAccept(() => {
if (input.canSelectMany) {
resolve(<any>input.selectedItems.slice());
input.hide();
} else {
const result = input.activeItems[0];
if (result) {
resolve(<any>result);
input.hide();
}
}
}),
input.onDidChangeActive(items => {
const focused = items[0];
if (focused && options.onDidFocus) {
options.onDidFocus(focused);
}
}),
input.onDidChangeSelection(items => {
if (!input.canSelectMany) {
const result = items[0];
if (result) {
resolve(<any>result);
input.hide();
}
}
}),
input.onDidTriggerItemButton(event => options.onDidTriggerItemButton && options.onDidTriggerItemButton({
...event,
removeItem: () => {
const index = input.items.indexOf(event.item);
if (index !== -1) {
const items = input.items.slice();
items.splice(index, 1);
input.items = items;
}
}
})),
input.onDidChangeValue(value => {
if (activeItem && !value && (input.activeItems.length !== 1 || input.activeItems[0] !== activeItem)) {
input.activeItems = [activeItem];
}
}),
token.onCancellationRequested(() => {
input.hide();
}),
input.onDidHide(() => {
dispose(disposables);
resolve(undefined);
}),
];
input.canSelectMany = options.canPickMany;
input.placeholder = options.placeHolder;
input.ignoreFocusOut = options.ignoreFocusLost;
input.matchOnDescription = options.matchOnDescription;
input.matchOnDetail = options.matchOnDetail;
input.quickNavigate = options.quickNavigate;
input.contextKey = options.contextKey;
input.busy = true;
Promise.all([picks, options.activeItem])
.then(([items, _activeItem]) => {
activeItem = _activeItem;
input.busy = false;
input.items = items;
if (input.canSelectMany) {
input.selectedItems = items.filter(item => item.type !== 'separator' && item.picked) as T[];
}
if (activeItem) {
input.activeItems = [activeItem];
}
});
input.show();
Promise.resolve(picks).then(void 0, err => {
reject(err);
input.hide();
});
});
}
input(options: IInputOptions = {}, token: CancellationToken = CancellationToken.None): Promise<string> {
return new Promise<string>((resolve, reject) => {
if (token.isCancellationRequested) {
resolve(undefined);
return;
}
const input = this.createInputBox();
const validateInput = options.validateInput || (() => <Promise<undefined>>Promise.resolve(undefined));
const onDidValueChange = Event.debounce(input.onDidChangeValue, (last, cur) => cur, 100);
let validationValue = options.value || '';
let validation = Promise.resolve(validateInput(validationValue));
const disposables = [
input,
onDidValueChange(value => {
if (value !== validationValue) {
validation = Promise.resolve(validateInput(value));
validationValue = value;
}
validation.then(result => {
if (value === validationValue) {
input.validationMessage = result;
}
});
}),
input.onDidAccept(() => {
const value = input.value;
if (value !== validationValue) {
validation = Promise.resolve(validateInput(value));
validationValue = value;
}
validation.then(result => {
if (!result) {
resolve(value);
input.hide();
} else if (value === validationValue) {
input.validationMessage = result;
}
});
}),
token.onCancellationRequested(() => {
input.hide();
}),
input.onDidHide(() => {
dispose(disposables);
resolve(undefined);
}),
];
input.value = options.value;
input.valueSelection = options.valueSelection;
input.prompt = options.prompt;
input.placeholder = options.placeHolder;
input.password = options.password;
input.ignoreFocusOut = options.ignoreFocusLost;
input.show();
});
}
backButton = backButton;
createQuickPick<T extends IQuickPickItem>(): IQuickPick<T> {
this.create();
return new QuickPick<T>(this.ui);
}
createInputBox(): IInputBox {
this.create();
return new InputBox(this.ui);
}
private show(controller: QuickInput) {
this.create();
this.quickOpenService.close();
const oldController = this.controller;
this.controller = controller;
if (oldController) {
oldController.didHide();
}
this.setEnabled(true);
this.ui.leftActionBar.clear();
this.ui.title.textContent = '';
this.ui.rightActionBar.clear();
this.ui.checkAll.checked = false;
// this.ui.inputBox.value = ''; Avoid triggering an event.
this.ui.inputBox.placeholder = '';
this.ui.inputBox.password = false;
this.ui.inputBox.showDecoration(Severity.Ignore);
this.ui.visibleCount.setCount(0);
this.ui.count.setCount(0);
this.ui.message.textContent = '';
this.ui.progressBar.stop();
this.ui.list.setElements([]);
this.ui.list.matchOnDescription = false;
this.ui.list.matchOnDetail = false;
this.ui.ignoreFocusOut = false;
this.setComboboxAccessibility(false);
this.ui.inputBox.removeAttribute('aria-label');
const keybinding = this.keybindingService.lookupKeybinding(BackAction.ID);
backButton.tooltip = keybinding ? localize('quickInput.backWithKeybinding', "Back ({0})", keybinding.getLabel()) : localize('quickInput.back', "Back");
this.inQuickOpen('quickInput', true);
this.resetContextKeys();
this.ui.container.style.display = '';
this.updateLayout();
this.ui.inputBox.setFocus();
}
private setVisibilities(visibilities: Visibilities) {
this.ui.title.style.display = visibilities.title ? '' : 'none';
this.ui.checkAll.style.display = visibilities.checkAll ? '' : 'none';
this.filterContainer.style.display = visibilities.inputBox ? '' : 'none';
this.visibleCountContainer.style.display = visibilities.visibleCount ? '' : 'none';
this.countContainer.style.display = visibilities.count ? '' : 'none';
this.okContainer.style.display = visibilities.ok ? '' : 'none';
this.ui.message.style.display = visibilities.message ? '' : 'none';
this.ui.list.display(visibilities.list);
this.ui.container.classList[visibilities.checkAll ? 'add' : 'remove']('show-checkboxes');
this.updateLayout(); // TODO
}
private setComboboxAccessibility(enabled: boolean) {
if (enabled !== this.comboboxAccessibility) {
this.comboboxAccessibility = enabled;
if (this.comboboxAccessibility) {
this.ui.inputBox.setAttribute('role', 'combobox');
this.ui.inputBox.setAttribute('aria-haspopup', 'true');
this.ui.inputBox.setAttribute('aria-autocomplete', 'list');
this.ui.inputBox.setAttribute('aria-activedescendant', this.ui.list.getActiveDescendant());
} else {
this.ui.inputBox.removeAttribute('role');
this.ui.inputBox.removeAttribute('aria-haspopup');
this.ui.inputBox.removeAttribute('aria-autocomplete');
this.ui.inputBox.removeAttribute('aria-activedescendant');
}
}
}
private isScreenReaderOptimized() {
const detected = browser.getAccessibilitySupport() === AccessibilitySupport.Enabled;
const config = this.configurationService.getValue<IEditorOptions>('editor').accessibilitySupport;
return config === 'on' || (config === 'auto' && detected);
}
private setEnabled(enabled: boolean) {
if (enabled !== this.enabled) {
this.enabled = enabled;
for (const item of this.ui.leftActionBar.items) {
(item as ActionItem).getAction().enabled = enabled;
}
for (const item of this.ui.rightActionBar.items) {
(item as ActionItem).getAction().enabled = enabled;
}
this.ui.checkAll.disabled = !enabled;
// this.ui.inputBox.enabled = enabled; Avoid loosing focus.
this.ok.enabled = enabled;
this.ui.list.enabled = enabled;
}
}
private hide(focusLost?: boolean) {
const controller = this.controller;
if (controller) {
this.controller = null;
this.inQuickOpen('quickInput', false);
this.resetContextKeys();
this.ui.container.style.display = 'none';
if (!focusLost) {
this.editorGroupService.activeGroup.focus();
}
controller.didHide();
}
}
focus() {
if (this.isDisplayed()) {
this.ui.inputBox.setFocus();
}
}
toggle() {
if (this.isDisplayed() && this.controller instanceof QuickPick && this.controller.canSelectMany) {
this.ui.list.toggleCheckbox();
}
}
navigate(next: boolean, quickNavigate?: IQuickNavigateConfiguration) {
if (this.isDisplayed() && this.ui.list.isDisplayed()) {
this.ui.list.focus(next ? 'Next' : 'Previous');
if (quickNavigate && this.controller instanceof QuickPick) {
this.controller.quickNavigate = quickNavigate;
}
}
}
accept() {
this.onDidAcceptEmitter.fire();
return Promise.resolve(undefined);
}
back() {
this.onDidTriggerButtonEmitter.fire(this.backButton);
return Promise.resolve(undefined);
}
cancel() {
this.hide();
return Promise.resolve(undefined);
}
layout(dimension: dom.Dimension): void {
this.layoutDimensions = dimension;
this.updateLayout();
}
private updateLayout() {
if (this.layoutDimensions && this.ui) {
const titlebarOffset = this.partService.getTitleBarOffset();
this.ui.container.style.top = `${titlebarOffset}px`;
const style = this.ui.container.style;
const width = Math.min(this.layoutDimensions.width * 0.62 /* golden cut */, QuickInputService.MAX_WIDTH);
style.width = width + 'px';
style.marginLeft = '-' + (width / 2) + 'px';
this.ui.inputBox.layout();
this.ui.list.layout();
}
}
protected updateStyles() {
const theme = this.themeService.getTheme();
if (this.ui) {
// TODO
const titleColor = { dark: 'rgba(255, 255, 255, 0.105)', light: 'rgba(0,0,0,.06)', hc: 'black' }[theme.type];
this.titleBar.style.backgroundColor = titleColor ? titleColor.toString() : null;
this.ui.inputBox.style(theme);
const sideBarBackground = theme.getColor(SIDE_BAR_BACKGROUND);
this.ui.container.style.backgroundColor = sideBarBackground ? sideBarBackground.toString() : null;
const sideBarForeground = theme.getColor(SIDE_BAR_FOREGROUND);
this.ui.container.style.color = sideBarForeground ? sideBarForeground.toString() : null;
const contrastBorderColor = theme.getColor(contrastBorder);
this.ui.container.style.border = contrastBorderColor ? `1px solid ${contrastBorderColor}` : null;
const widgetShadowColor = theme.getColor(widgetShadow);
this.ui.container.style.boxShadow = widgetShadowColor ? `0 5px 8px ${widgetShadowColor}` : null;
}
}
private isDisplayed() {
return this.ui && this.ui.container.style.display !== 'none';
}
}
export const QuickPickManyToggle: ICommandAndKeybindingRule = {
id: 'workbench.action.quickPickManyToggle',
weight: KeybindingWeight.WorkbenchContrib,
when: inQuickOpenContext,
primary: undefined,
handler: accessor => {
const quickInputService = accessor.get(IQuickInputService);
quickInputService.toggle();
}
};
export class BackAction extends Action {
public static readonly ID = 'workbench.action.quickInputBack';
public static readonly LABEL = localize('back', "Back");
constructor(id: string, label: string, @IQuickInputService private quickInputService: IQuickInputService) {
super(id, label);
}
public run(): Promise<any> {
this.quickInputService.back();
return Promise.resolve(null);
}
}<|fim▁end|> | return this._activeItems; |
<|file_name|>unicode-regexp-unanchored-advance.js<|end_file_name|><|fim▁begin|>// Copyright 2013 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --harmony-unicode-regexps
var s = "a".repeat(1E7) + "\u1234";<|fim▁hole|><|fim▁end|> | assertEquals(["\u1234", "\u1234"], /(\u1234)/u.exec(s)); |
<|file_name|>dynamicControls.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-console */
import * as WebSocket from 'ws';
import * as faker from 'faker';
import {
delay,
GameClient,
IButtonData,
IControlData,
IParticipant,
setWebSocket,
} from '../lib';
if (process.argv.length < 4) {
console.log('Usage gameClient.exe <token> <versionId>');
process.exit();
}
// We need to tell the interactive client what type of websocket we are using.
setWebSocket(WebSocket);
// As we're on the Streamer's side we need a "GameClient" instance
const client = new GameClient();
// Log when we're connected to interactive
client.on('open', () => console.log('Connected to interactive'));
// These can be un-commented to see the raw JSON messages under the hood
// client.on('message', (err: any) => console.log('<<<', err));
// client.on('send', (err: any) => console.log('>>>', err));
// client.on('error', (err: any) => console.log(err));
/**
* This makes button objects, it will make the amount of buttons we tell it to
* we'll use it to create controls dynamically!
*/
function makeControls(amount: number): IControlData[] {
const controls: IButtonData[] = [];
const size = 10;
for (let i = 0; i < amount; i++) {
controls.push({
controlID: `${i}`,
kind: 'button',
text: faker.name.firstName(),
cost: 1,
position: [
{
size: 'large',
width: size,
height: size / 2,
x: i * size,
y: 1,
},
{
size: 'small',
width: size,
height: size / 2,
x: i * size,
y: 1,
},
{<|fim▁hole|> width: size,
height: size,
x: i * size,
y: 1,
},
],
},
);
}
return controls;
}
const delayTime = 2000;
/* Loop creates 5 controls and adds them to the default scene.
* It then waits delayTime milliseconds and then deletes them,
* before calling itself again.
*/
function loop() {
const scene = client.state.getScene('default');
scene.createControls(makeControls(5))
.then(() => delay(delayTime))
.then(() => scene.deleteAllControls())
.then(() => delay(delayTime))
.then(() => loop());
}
// Now we open the connection passing in our authentication details and an experienceId.
client.open({
authToken: process.argv[2],
versionId: parseInt(process.argv[3], 10),
})
.then(() => {
/* Pull in the scenes stored on the server
* then call ready so our controls show up.
* then call loop() to begin our loop.
*/
return client.synchronizeState();
})
.then(() => client.ready(true))
.then(() => loop());
client.state.on('participantJoin', (participant: IParticipant ) => {
console.log(`${participant.username}(${participant.sessionID}) Joined`);
});
client.state.on('participantLeave', (participant: string ) => {
console.log(`${participant} Left`);
});
/* tslint:enable:no-console */<|fim▁end|> | size: 'medium', |
<|file_name|>start_mininet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from mininet.cli import CLI
from mininet.net import Mininet
from mininet.node import RemoteController, OVSSwitch
def start_of13_switches(controller, switches):
for s in switches:
s.start([controller])
s.sendCmd('ovs-vsctl set bridge %s protocols=OpenFlow13' % s)
if '__main__' == __name__:
net = Mininet(controller=RemoteController, autoStaticArp=True, switch=OVSSwitch)
c1 = net.addController('c1', ip='127.0.0.1', port=6653)
s1 = net.addSwitch('s1')
s2 = net.addSwitch('s2')
s3 = net.addSwitch('s3')
s4 = net.addSwitch('s4')
h1 = net.addHost('h1')
h2 = net.addHost('h2')
s1.linkTo(s2)
s2.linkTo(s3)
s3.linkTo(s4)
s4.linkTo(s1)
s1.linkTo(h1)
s3.linkTo(h2)
net.build()
c1.start()
start_of13_switches(c1, [s1, s2, s3, s4])
CLI(net)
net.stop()<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software # |
<|file_name|>mixins.py<|end_file_name|><|fim▁begin|># $Id$
"""Mixins that are useful for classes using vtk_kit.
@author: Charl P. Botha <http://cpbotha.net/>
"""
from external.vtkPipeline.ConfigVtkObj import ConfigVtkObj
from external.vtkPipeline.vtkMethodParser import VtkMethodParser
from module_base import ModuleBase
from module_mixins import IntrospectModuleMixin # temporary
import module_utils # temporary, most of this should be in utils.
import re
import types
import utils
#########################################################################
class PickleVTKObjectsModuleMixin(object):
"""This mixin will pickle the state of all vtk objects whose binding
attribute names have been added to self._vtkObjects, e.g. if you have
a self._imageMath, '_imageMath' should be in the list.
Your module has to derive from module_base as well so that it has a
self._config!
Remember to call the __init__ of this class with the list of attribute
strings representing vtk objects that you want pickled. All the objects
have to exist and be initially configured by then.
Remember to call close() when your child class close()s.
"""
def __init__(self, vtkObjectNames):
# you have to add the NAMES of the objects that you want pickled
# to this list.
self._vtkObjectNames = vtkObjectNames
self.statePattern = re.compile ("To[A-Z0-9]")
# make sure that the state of the vtkObjectNames objects is
# encapsulated in the initial _config
self.logic_to_config()
def close(self):
# make sure we get rid of these bindings as well
del self._vtkObjectNames
def logic_to_config(self):
parser = VtkMethodParser()
for vtkObjName in self._vtkObjectNames:
# pickled data: a list with toggle_methods, state_methods and
# get_set_methods as returned by the vtkMethodParser. Each of
# these is a list of tuples with the name of the method (as
# returned by the vtkMethodParser) and the value; in the case
# of the stateMethods, we use the whole stateGroup instead of
# just a single name
vtkObjPD = [[], [], []]
vtkObj = getattr(self, vtkObjName)
parser.parse_methods(vtkObj)
# parser now has toggle_methods(), state_methods() and
# get_set_methods();
# toggle_methods: ['BlaatOn', 'AbortExecuteOn']
# state_methods: [['SetBlaatToOne', 'SetBlaatToTwo'],
# ['SetMaatToThree', 'SetMaatToFive']]
# get_set_methods: ['NumberOfThreads', 'Progress']
for method in parser.toggle_methods():
# if you query ReleaseDataFlag on a filter with 0 outputs,
# VTK yields an error
if vtkObj.GetNumberOfOutputPorts() == 0 and \
method == 'ReleaseDataFlagOn':
continue
# we need to snip the 'On' off
val = eval("vtkObj.Get%s()" % (method[:-2],))
vtkObjPD[0].append((method, val))
for stateGroup in parser.state_methods():
# we search up to the To
end = self.statePattern.search (stateGroup[0]).start ()
# so we turn SetBlaatToOne to GetBlaat
get_m = 'G'+stateGroup[0][1:end]
# we're going to have to be more clever when we set_config...
# use a similar trick to get_state in vtkMethodParser
val = eval('vtkObj.%s()' % (get_m,))
vtkObjPD[1].append((stateGroup, val))
for method in parser.get_set_methods():
val = eval('vtkObj.Get%s()' % (method,))
vtkObjPD[2].append((method, val))
# finally set the pickle data in the correct position
setattr(self._config, vtkObjName, vtkObjPD)
def config_to_logic(self):
# go through at least the attributes in self._vtkObjectNames
for vtkObjName in self._vtkObjectNames:
try:
vtkObjPD = getattr(self._config, vtkObjName)
vtkObj = getattr(self, vtkObjName)
except AttributeError:
print "PickleVTKObjectsModuleMixin: %s not available " \
"in self._config OR in self. Skipping." % (vtkObjName,)
else:
for method, val in vtkObjPD[0]:
if val:
eval('vtkObj.%s()' % (method,))
else:
# snip off the On
eval('vtkObj.%sOff()' % (method[:-2],))
for stateGroup, val in vtkObjPD[1]:
# keep on calling the methods in stategroup until
# the getter returns a value == val.
end = self.statePattern.search(stateGroup[0]).start()
getMethod = 'G'+stateGroup[0][1:end]
for i in range(len(stateGroup)):
m = stateGroup[i]
eval('vtkObj.%s()' % (m,))
tempVal = eval('vtkObj.%s()' % (getMethod,))
if tempVal == val:
# success! break out of the for loop
break
for method, val in vtkObjPD[2]:
try:
eval('vtkObj.Set%s(val)' % (method,))
except TypeError:
if type(val) in [types.TupleType, types.ListType]:
# sometimes VTK wants the separate elements
# and not the tuple / list
eval("vtkObj.Set%s(*val)"%(method,))
else:
# re-raise the exception if it wasn't a
# tuple/list
raise
#########################################################################
# note that the pickle mixin comes first, as its config_to_logic/logic_to_config
# should be chosen over that of noConfig
class SimpleVTKClassModuleBase(PickleVTKObjectsModuleMixin,
IntrospectModuleMixin,
ModuleBase):
"""Use this base to make a DeVIDE module that wraps a single VTK
object. The state of the VTK object will be saved when the network
is.
You only have to override the __init__ method and call the __init__<|fim▁hole|> shown if the user requests module help). If you don't want this,
call the ctor with replaceDoc=False.
inputFunctions is a list of the complete methods that have to be called
on the encapsulated VTK class, e.g. ['SetInput1(inputStream)',
'SetInput1(inputStream)']. The same goes for outputFunctions, except that
there's no inputStream involved. Use None in both cases if you want
the default to be used (SetInput(), GetOutput()).
"""
def __init__(self, module_manager, vtkObjectBinding, progressText,
inputDescriptions, outputDescriptions,
replaceDoc=True,
inputFunctions=None, outputFunctions=None):
self._viewFrame = None
self._configVtkObj = None
# first these two mixins
ModuleBase.__init__(self, module_manager)
self._theFilter = vtkObjectBinding
if replaceDoc:
myMessage = "<em>"\
"This is a special DeVIDE module that very simply " \
"wraps a single VTK class. In general, the " \
"complete state of the class will be saved along " \
"with the rest of the network. The documentation " \
"below is that of the wrapped VTK class:</em>"
self.__doc__ = '%s\n\n%s' % (myMessage, self._theFilter.__doc__)
# now that we have the object, init the pickle mixin so
# that the state of this object will be saved
PickleVTKObjectsModuleMixin.__init__(self, ['_theFilter'])
# make progress hooks for the object
module_utils.setup_vtk_object_progress(self, self._theFilter,
progressText)
self._inputDescriptions = inputDescriptions
self._outputDescriptions = outputDescriptions
self._inputFunctions = inputFunctions
self._outputFunctions = outputFunctions
def _createViewFrame(self):
parentWindow = self._module_manager.get_module_view_parent_window()
import resources.python.defaultModuleViewFrame
reload(resources.python.defaultModuleViewFrame)
dMVF = resources.python.defaultModuleViewFrame.defaultModuleViewFrame
viewFrame = module_utils.instantiate_module_view_frame(
self, self._module_manager, dMVF)
# ConfigVtkObj parent not important, we're passing frame + panel
# this should populate the sizer with a new sizer7
# params: noParent, noRenwin, vtk_obj, frame, panel
self._configVtkObj = ConfigVtkObj(None, None,
self._theFilter,
viewFrame, viewFrame.viewFramePanel)
module_utils.create_standard_object_introspection(
self, viewFrame, viewFrame.viewFramePanel,
{'Module (self)' : self}, None)
# we don't want the Execute button to be default... else stuff gets
# executed with every enter in the command window (at least in Doze)
module_utils.create_eoca_buttons(self, viewFrame,
viewFrame.viewFramePanel,
False)
self._viewFrame = viewFrame
return viewFrame
def close(self):
# we play it safe... (the graph_editor/module_manager should have
# disconnected us by now)
for input_idx in range(len(self.get_input_descriptions())):
self.set_input(input_idx, None)
PickleVTKObjectsModuleMixin.close(self)
IntrospectModuleMixin.close(self)
if self._viewFrame is not None:
self._configVtkObj.close()
self._viewFrame.Destroy()
ModuleBase.close(self)
# get rid of our binding to the vtkObject
del self._theFilter
def get_output_descriptions(self):
return self._outputDescriptions
def get_output(self, idx):
# this will only every be invoked if your get_output_descriptions has
# 1 or more elements
if self._outputFunctions:
return eval('self._theFilter.%s' % (self._outputFunctions[idx],))
else:
return self._theFilter.GetOutput()
def get_input_descriptions(self):
return self._inputDescriptions
def set_input(self, idx, inputStream):
# this will only be called for a certain idx if you've specified that
# many elements in your get_input_descriptions
if self._inputFunctions:
exec('self._theFilter.%s' %
(self._inputFunctions[idx]))
else:
if idx == 0:
self._theFilter.SetInput(inputStream)
else:
self._theFilter.SetInput(idx, inputStream)
def execute_module(self):
# it could be a writer, in that case, call the Write method.
if hasattr(self._theFilter, 'Write') and \
callable(self._theFilter.Write):
self._theFilter.Write()
else:
self._theFilter.Update()
def streaming_execute_module(self):
"""All VTK classes should be streamable.
"""
# it could be a writer, in that case, call the Write method.
if hasattr(self._theFilter, 'Write') and \
callable(self._theFilter.Write):
self._theFilter.Write()
else:
self._theFilter.Update()
def view(self):
if self._viewFrame is None:
# we have an initial config populated with stuff and in sync
# with theFilter. The viewFrame will also be in sync with the
# filter
self._viewFrame = self._createViewFrame()
self._viewFrame.Show(True)
self._viewFrame.Raise()
def config_to_view(self):
# the pickleVTKObjectsModuleMixin does logic <-> config
# so when the user clicks "sync", logic_to_config is called
# which transfers picklable state from the LOGIC to the CONFIG
# then we do double the work and call update_gui, which transfers
# the same state from the LOGIC straight up to the VIEW
self._configVtkObj.update_gui()
def view_to_config(self):
# same thing here: user clicks "apply", view_to_config is called which
# zaps UI changes straight to the LOGIC. Then we have to call
# logic_to_config explicitly which brings the info back up to the
# config... i.e. view -> logic -> config
# after that, config_to_logic is called which transfers all state AGAIN
# from the config to the logic
self._configVtkObj.apply_changes()
self.logic_to_config()
#########################################################################<|fim▁end|> | of this class with the desired parameters.
The __doc__ string of your module class will be replaced with the
__doc__ string of the encapsulated VTK class (and will thus be |
<|file_name|>ev_mention.py<|end_file_name|><|fim▁begin|>async def ev_mention(ev, message):<|fim▁hole|> 'event': 'mention',
'count': 0
}
collection = 'EventStats'
database = ev.bot.cfg.db.database
check = ev.db[database][collection].find_one({"event": 'mention'})
if not check:
ev.db[database][collection].insert_one(def_stat_data)
ev_count = 0
else:
ev_count = check['count']
ev_count += 1
update_target = {"event": 'mention'}
update_data = {"$set": {'count': ev_count}}
ev.db[database][collection].update_one(update_target, update_data)<|fim▁end|> | def_stat_data = { |
<|file_name|>constants.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
<|fim▁hole|>export const $DELEGATE = '$delegate';
export const $EXCEPTION_HANDLER = '$exceptionHandler';
export const $HTTP_BACKEND = '$httpBackend';
export const $INJECTOR = '$injector';
export const $INTERVAL = '$interval';
export const $PARSE = '$parse';
export const $PROVIDE = '$provide';
export const $ROOT_ELEMENT = '$rootElement';
export const $ROOT_SCOPE = '$rootScope';
export const $SCOPE = '$scope';
export const $TEMPLATE_CACHE = '$templateCache';
export const $TEMPLATE_REQUEST = '$templateRequest';
export const $$TESTABILITY = '$$testability';
export const COMPILER_KEY = '$$angularCompiler';
export const DOWNGRADED_MODULE_COUNT_KEY = '$$angularDowngradedModuleCount';
export const GROUP_PROJECTABLE_NODES_KEY = '$$angularGroupProjectableNodes';
export const INJECTOR_KEY = '$$angularInjector';
export const LAZY_MODULE_REF = '$$angularLazyModuleRef';
export const NG_ZONE_KEY = '$$angularNgZone';
export const UPGRADE_APP_TYPE_KEY = '$$angularUpgradeAppType';
export const REQUIRE_INJECTOR = '?^^' + INJECTOR_KEY;
export const REQUIRE_NG_MODEL = '?ngModel';
export const UPGRADE_MODULE_NAME = '$$UpgradeModule';<|fim▁end|> | export const $COMPILE = '$compile';
export const $CONTROLLER = '$controller'; |
<|file_name|>42f4a0f4afd9_create_table.py<|end_file_name|><|fim▁begin|>"""create_table
Revision ID: 42f4a0f4afd9
Revises: None
Create Date: 2014-11-15 16:53:22.716676
"""
# revision identifiers, used by Alembic.
revision = '42f4a0f4afd9'
down_revision = None
from alembic import op
import sqlalchemy as sa
#from guokr.platform.sqlalchemy.types import JSONType
def upgrade():
op.create_table(u'board',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'round',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('board_id', sa.Integer(),
sa.ForeignKey('board.id'), nullable=True),
sa.Column('num', sa.Integer(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('round_id', sa.Integer(),
sa.ForeignKey('round.id'), nullable=True),
sa.Column('place', sa.Unicode(1024), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_started', sa.DateTime(timezone=True),
nullable=False, index=True),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'team',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match_player',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('match_id', sa.Integer(),
sa.ForeignKey('match.id'), nullable=True),
sa.Column('team_id', sa.Integer(),
sa.ForeignKey('team.id'), nullable=True),
sa.Column('score', sa.Integer(), server_default='0', nullable=True),
sa.Column('is_home', sa.Boolean(), nullable=False,
server_default=sa.sql.false()),
# sa.Column('info', JSONType()),<|fim▁hole|>
def downgrade():
op.drop_table(u'team')
op.drop_table(u'match_player')
op.drop_table(u'match')
op.drop_table(u'round')
op.drop_table(u'board')<|fim▁end|> | sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp())) |
<|file_name|>CatsReferralForSessionManagementVo.java<|end_file_name|><|fim▁begin|>// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.RefMan.vo;
/**
* Linked to RefMan.CatsReferral business object (ID: 1004100035).
*/
public class CatsReferralForSessionManagementVo extends ims.RefMan.vo.CatsReferralRefVo implements ims.vo.ImsCloneable, Comparable
{
private static final long serialVersionUID = 1L;
public CatsReferralForSessionManagementVo()
{
}
public CatsReferralForSessionManagementVo(Integer id, int version)
{
super(id, version);
}
public CatsReferralForSessionManagementVo(ims.RefMan.vo.beans.CatsReferralForSessionManagementVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.referraldetails = bean.getReferralDetails() == null ? null : bean.getReferralDetails().buildVo();
this.currentstatus = bean.getCurrentStatus() == null ? null : bean.getCurrentStatus().buildVo();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.RefMan.vo.beans.CatsReferralForSessionManagementVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.referraldetails = bean.getReferralDetails() == null ? null : bean.getReferralDetails().buildVo(map);
this.currentstatus = bean.getCurrentStatus() == null ? null : bean.getCurrentStatus().buildVo(map);
}
public ims.vo.ValueObjectBean getBean()
{
return this.getBean(new ims.vo.ValueObjectBeanMap());
}
public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map)
{
ims.RefMan.vo.beans.CatsReferralForSessionManagementVoBean bean = null;
if(map != null)
bean = (ims.RefMan.vo.beans.CatsReferralForSessionManagementVoBean)map.getValueObjectBean(this);
if (bean == null)
{
bean = new ims.RefMan.vo.beans.CatsReferralForSessionManagementVoBean();
map.addValueObjectBean(this, bean);
bean.populate(map, this);
}
return bean;
}
public Object getFieldValueByFieldName(String fieldName)
{
if(fieldName == null)
throw new ims.framework.exceptions.CodingRuntimeException("Invalid field name");
fieldName = fieldName.toUpperCase();
if(fieldName.equals("REFERRALDETAILS"))
return getReferralDetails();
if(fieldName.equals("CURRENTSTATUS"))
return getCurrentStatus();
return super.getFieldValueByFieldName(fieldName);
}
public boolean getReferralDetailsIsNotNull()
{
return this.referraldetails != null;
}
public ims.RefMan.vo.ReferralLetterForSessionManagementVo getReferralDetails()
{
return this.referraldetails;
}
public void setReferralDetails(ims.RefMan.vo.ReferralLetterForSessionManagementVo value)
{
this.isValidated = false;
this.referraldetails = value;
}
public boolean getCurrentStatusIsNotNull()
{
return this.currentstatus != null;
}
public ims.RefMan.vo.CatsReferralStatusLiteVo getCurrentStatus()
{
return this.currentstatus;
}
public void setCurrentStatus(ims.RefMan.vo.CatsReferralStatusLiteVo value)
{
this.isValidated = false;
this.currentstatus = value;
}
public boolean isValidated()
{
if(this.isBusy)
return true;
this.isBusy = true;
if(!this.isValidated)<|fim▁hole|> this.isBusy = false;
return false;
}
this.isBusy = false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(this.isBusy)
return null;
this.isBusy = true;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
{
this.isBusy = false;
this.isValidated = true;
return null;
}
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
this.isBusy = false;
this.isValidated = false;
return result;
}
public void clearIDAndVersion()
{
this.id = null;
this.version = 0;
}
public Object clone()
{
if(this.isBusy)
return this;
this.isBusy = true;
CatsReferralForSessionManagementVo clone = new CatsReferralForSessionManagementVo(this.id, this.version);
if(this.referraldetails == null)
clone.referraldetails = null;
else
clone.referraldetails = (ims.RefMan.vo.ReferralLetterForSessionManagementVo)this.referraldetails.clone();
if(this.currentstatus == null)
clone.currentstatus = null;
else
clone.currentstatus = (ims.RefMan.vo.CatsReferralStatusLiteVo)this.currentstatus.clone();
clone.isValidated = this.isValidated;
this.isBusy = false;
return clone;
}
public int compareTo(Object obj)
{
return compareTo(obj, true);
}
public int compareTo(Object obj, boolean caseInsensitive)
{
if (obj == null)
{
return -1;
}
if(caseInsensitive); // this is to avoid eclipse warning only.
if (!(CatsReferralForSessionManagementVo.class.isAssignableFrom(obj.getClass())))
{
throw new ClassCastException("A CatsReferralForSessionManagementVo object cannot be compared an Object of type " + obj.getClass().getName());
}
if (this.id == null)
return 1;
if (((CatsReferralForSessionManagementVo)obj).getBoId() == null)
return -1;
return this.id.compareTo(((CatsReferralForSessionManagementVo)obj).getBoId());
}
public synchronized static int generateValueObjectUniqueID()
{
return ims.vo.ValueObject.generateUniqueID();
}
public int countFieldsWithValue()
{
int count = 0;
if(this.referraldetails != null)
count++;
if(this.currentstatus != null)
count++;
return count;
}
public int countValueObjectFields()
{
return 2;
}
protected ims.RefMan.vo.ReferralLetterForSessionManagementVo referraldetails;
protected ims.RefMan.vo.CatsReferralStatusLiteVo currentstatus;
private boolean isValidated = false;
private boolean isBusy = false;
}<|fim▁end|> | { |
<|file_name|>core_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras core layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class DropoutLayersTest(keras_parameterized.TestCase):
def test_dropout(self):
testing_utils.layer_test(
keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Dropout,
kwargs={'rate': 0.5,
'noise_shape': [3, 1]},
input_shape=(3, 2))
def test_dropout_supports_masking(self):
dropout = keras.layers.Dropout(0.5)
self.assertEqual(True, dropout.supports_masking)
def test_spatial_dropout_1d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout1D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4))
def test_spatial_dropout_2d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 5))
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 5))
def test_spatial_dropout_3d(self):
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 4, 5))
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 4, 5))
@keras_parameterized.run_all_keras_modes
class LambdaLayerTest(keras_parameterized.TestCase):
def test_lambda(self):
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={'function': lambda x: x + 1},
input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={
'function': lambda x, a, b: x * a + b,
'arguments': {
'a': 0.6,
'b': 0.4
}
},
input_shape=(3, 2))
# test serialization with function
def f(x):
return x + 1
ld = keras.layers.Lambda(f)
config = ld.get_config()
ld = keras.layers.deserialize({
'class_name': 'Lambda',
'config': config
})
# test with lambda
ld = keras.layers.Lambda(
lambda x: keras.backend.concatenate([math_ops.square(x), x]))
config = ld.get_config()
ld = keras.layers.Lambda.from_config(config)
def test_lambda_multiple_inputs(self):
ld = keras.layers.Lambda(lambda x: x[0], output_shape=lambda x: x[0])
x1 = np.ones([3, 2], np.float32)
x2 = np.ones([3, 5], np.float32)
out = ld([x1, x2])
self.assertAllEqual(out.shape, [3, 2])
def test_lambda_output_shape(self):
l = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
l(keras.backend.variable(np.ones((1, 1))))
self.assertEqual((1, 1), l.get_config()['output_shape'])
def test_lambda_output_shape_function(self):
def get_output_shape(input_shape):
return 1 * input_shape
l = keras.layers.Lambda(lambda x: x + 1, output_shape=get_output_shape)
l(keras.backend.variable(np.ones((1, 1))))
self.assertEqual('lambda', l.get_config()['output_shape_type'])
def test_lambda_output_shape_autocalculate_multiple_inputs(self):
def lambda_fn(x):
return math_ops.matmul(x[0], x[1])
l = keras.layers.Lambda(lambda_fn)
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual((10, 20), output_shape)
def test_lambda_output_shape_list_multiple_outputs(self):
def lambda_fn(x):
return x
l = keras.layers.Lambda(lambda_fn, output_shape=[(10,), (20,)])
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
def test_lambda_output_shape_tuple_with_none(self):
def lambda_fn(x):
return x
l = keras.layers.Lambda(lambda_fn, output_shape=(None, 10))
output_shape = l.compute_output_shape((5, 10, 20))
self.assertAllEqual([5, None, 10], output_shape.as_list())
def test_lambda_output_shape_function_multiple_outputs(self):
def lambda_fn(x):
return x
def output_shape_fn(input_shape):
return input_shape
l = keras.layers.Lambda(lambda_fn, output_shape=output_shape_fn)
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
def test_lambda_config_serialization(self):
# Test serialization with output_shape and output_shape_type
layer = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
layer(keras.backend.variable(np.ones((1, 1))))
config = layer.get_config()
layer = keras.layers.deserialize({
'class_name': 'Lambda',
'config': config
})
layer = keras.layers.Lambda.from_config(config)
@keras_parameterized.run_all_keras_modes
class CoreLayersTest(keras_parameterized.TestCase):
def test_masking(self):
testing_utils.layer_test(
keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
def test_keras_mask(self):
x = np.ones((10, 10))
y = keras.layers.Masking(1.)(x)
self.assertTrue(hasattr(y, '_keras_mask'))
self.assertTrue(y._keras_mask is not None)
self.assertAllClose(self.evaluate(y._keras_mask), np.zeros((10,)))
def test_activation(self):
# with string argument
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': 'relu'},
input_shape=(3, 2))
# with function argument
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': keras.backend.relu},
input_shape=(3, 2))
def test_reshape(self):
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (8, 1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (1, -1)},
input_shape=(3, 2, 4))
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(None, None, 2))
def test_permute(self):
testing_utils.layer_test(
keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
def test_permute_errors_on_invalid_starting_dims_index(self):
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
testing_utils.layer_test(
keras.layers.Permute,
kwargs={'dims': (0, 1, 2)}, input_shape=(3, 2, 4))
def test_permute_errors_on_invalid_set_of_dims_indices(self):
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
testing_utils.layer_test(
keras.layers.Permute,
kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4))
def test_flatten(self):
testing_utils.layer_test(
keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4))
# Test channels_first
inputs = np.random.random((10, 3, 5, 5)).astype('float32')
outputs = testing_utils.layer_test(
keras.layers.Flatten,
kwargs={'data_format': 'channels_first'},
input_data=inputs)
target_outputs = np.reshape(
np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3))
self.assertAllClose(outputs, target_outputs)
def test_flatten_scalar_channels(self):
testing_utils.layer_test(
keras.layers.Flatten, kwargs={}, input_shape=(3,))
# Test channels_first
inputs = np.random.random((10,)).astype('float32')
outputs = testing_utils.layer_test(
keras.layers.Flatten,
kwargs={'data_format': 'channels_first'},
input_data=inputs)
target_outputs = np.expand_dims(inputs, -1)
self.assertAllClose(outputs, target_outputs)
def test_repeat_vector(self):
testing_utils.layer_test(
keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
def test_dense(self):
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(None, None, 2))
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 5, 2))
def test_dense_dtype(self):
inputs = ops.convert_to_tensor(
np.random.randint(low=0, high=7, size=(2, 2)))
layer = keras.layers.Dense(5, dtype='float32')<|fim▁hole|> outputs = layer(inputs)
self.assertEqual(outputs.dtype, 'float32')
def test_dense_with_policy(self):
inputs = ops.convert_to_tensor(
np.random.randint(low=0, high=7, size=(2, 2)), dtype='float16')
layer = keras.layers.Dense(5, dtype=policy.Policy('infer_float32_vars'))
outputs = layer(inputs)
self.assertEqual(outputs.dtype, 'float16')
self.assertEqual(layer.kernel.dtype, 'float32')
def test_dense_regularization(self):
layer = keras.layers.Dense(
3,
kernel_regularizer=keras.regularizers.l1(0.01),
bias_regularizer='l1',
activity_regularizer='l2',
name='dense_reg')
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(3, len(layer.losses))
def test_dense_constraints(self):
k_constraint = keras.constraints.max_norm(0.01)
b_constraint = keras.constraints.max_norm(0.01)
layer = keras.layers.Dense(
3, kernel_constraint=k_constraint, bias_constraint=b_constraint)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def test_activity_regularization(self):
layer = keras.layers.ActivityRegularization(l1=0.1)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(1, len(layer.losses))
config = layer.get_config()
self.assertEqual(config.pop('l1'), 0.1)
def test_numpy_inputs(self):
if context.executing_eagerly():
layer = keras.layers.RepeatVector(2)
x = np.ones((10, 10))
self.assertAllEqual(np.ones((10, 2, 10)), layer(x))
layer = keras.layers.Concatenate()
x, y = np.ones((10, 10)), np.ones((10, 10))
self.assertAllEqual(np.ones((10, 20)), layer([x, y]))
if __name__ == '__main__':
test.main()<|fim▁end|> | |
<|file_name|>MovementCommandSubscriber.java<|end_file_name|><|fim▁begin|>package org.iproduct.iptpi.domain.movement;
import static java.lang.Math.PI;
import static java.lang.Math.abs;
import static java.lang.Math.atan;
import static java.lang.Math.cbrt;
import static java.lang.Math.cos;
import static java.lang.Math.hypot;
import static java.lang.Math.min;
import static java.lang.Math.pow;
import static java.lang.Math.signum;
import static java.lang.Math.sin;
import static java.lang.Math.sqrt;
import static java.lang.Math.tan;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAIN_AXE_LENGTH;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_ANGULAR_ACCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_LINEAR_ACCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_LINEAR_VELOCITY;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.ROBOT_STOPPING_DECCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.WHEEL_RADIUS;
import static org.iproduct.iptpi.domain.CommandName.STOP;
import static org.iproduct.iptpi.domain.CommandName.VOID;
import org.iproduct.iptpi.domain.Command;
import org.iproduct.iptpi.domain.arduino.LineReadings;
import org.iproduct.iptpi.domain.audio.AudioPlayer;
import org.iproduct.iptpi.domain.position.Position;
import org.iproduct.iptpi.domain.position.PositionsFlux;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import com.pi4j.wiringpi.Gpio;
import reactor.core.publisher.EmitterProcessor;
import reactor.core.publisher.Flux;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuple3;
import reactor.util.function.Tuple4;
import reactor.util.function.Tuples;
public class MovementCommandSubscriber implements Subscriber<Command> {
public static final int MAX_SPEED = 1024;
public static final int CLOCK_DIVISOR = 2;
public static final double LANDING_CURVE_PARAMETER = 0.000000005;
public static final MotorsCommand STOP_COMMAND = new MotorsCommand(0, 0, 0, 0, 0);
private Subscription subscription;
private PositionsFlux positions;
private Flux<LineReadings> lineReadings;
// private SchedulerGroup eventLoops = SchedulerGroup.async();
//Create movement command broadcaster
private EmitterProcessor<Command> commandFlux = EmitterProcessor.create();
public MovementCommandSubscriber(PositionsFlux positions, Flux<LineReadings> lineReadings) {
this.positions = positions;
this.lineReadings = lineReadings;
}
@Override
public void onNext(Command command) {
setupGpioForMovement();
switch (command.getName()) {
case MOVE_FORWARD : moveForward(command); break;
case FOLLOW_LINE : followLine(command); break;
case MOVE_RELATIVE : moveRelative(command); break;
case STOP :
System.out.println("STOPPING THE ROBOT");
runMotors(STOP_COMMAND);
break;
default:
break;
}
}
protected void moveRelative(Command command) {
RelativeMovement relMove = (RelativeMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(relMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(relMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double targetDeltaX = relMove.getDeltaX();
double targetDeltaY = relMove.getDeltaY();
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
double distance = hypot(targetDeltaX, targetDeltaY);
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
double targetHeading, targetDeltaHeading, targetCurvature, h = 0;
if(relMove.getDeltaHeading() == 0 ) {
targetCurvature = targetDeltaHeading = 0;
targetHeading = startPos.getHeading();
} else {
targetDeltaHeading = relMove.getDeltaHeading();
targetHeading = startPos.getHeading() + targetDeltaHeading ;
targetCurvature = (2 * sin(targetDeltaHeading / 2) ) / distance ;
h = sqrt( 1/(targetCurvature * targetCurvature) - 0.25 * distance * distance );
}
double xC, yC; //circle center coordinates
double r = hypot(distance/2, h);
if(targetCurvature != 0) {
double q = hypot( targetX - startPos.getX(), targetY - startPos.getY() ),
x3 = (targetX + startPos.getX()) /2,
y3 = (targetY + startPos.getY()) /2;
if(targetCurvature > 0) {
xC = x3 + sqrt(r*r - (q*q/4)) * (startPos.getY() - targetY)/q;
yC = y3 + sqrt(r*r - (q*q/4)) * (targetX - startPos.getX() )/q;
} else {
xC = x3 - sqrt(r*r - (q*q/4)) * (startPos.getY() - targetY)/q;
yC = y3 - sqrt(r*r - (q*q/4)) * (targetX - startPos.getX() )/q;
}
} else {
xC = (targetX + startPos.getX()) /2;
yC = (targetY + startPos.getY()) /2;
}
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
System.out.println("$$$$$$$$$$$$$$ TargetCurvature=" + targetCurvature );
double targetAngularVelocity;
if (targetDeltaHeading != 0 && relMove.getAngularVelocity() == 0)
targetAngularVelocity = targetVelocity * targetCurvature;
else
targetAngularVelocity = relMove.getAngularVelocity();
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux.zip(positions, skip1)
.scan(initialCommand, (last, tupple) -> {
Position prevPos = ((Position)tupple.getT1());
Position currPos = ((Position)tupple.getT2());
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
if(targetCurvature == 0) {
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
} else {
double deltaHeading = targetAngularVelocity * time;
double startAng = atan((startPos.getY() - yC) / (startPos.getX() - xC));
double angle = startAng + deltaHeading;
if(signum(angle) != (startPos.getY() - yC))
angle -= PI;
tarX = cos(angle) * r + xC;
tarY = sin(angle) * r + yC;
tarH = startPos.getHeading() + deltaHeading;
remainingPathLength = (targetDeltaHeading - deltaHeading ) / targetCurvature;
// System.out.println(" -----> tarX=" + tarX + ", tarY=" + tarY + ", tarH=" + tarH + ", deltaHeading=" + deltaHeading + ", startAng=" + startAng + ", angle=" + angle);
// System.out.println(" -----> r=" + r + ", xC=" + xC + ", yC=" + yC );
}
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = -targetAngularVelocity * errX + currV * sin (errH);
double landAngV = targetAngularVelocity + (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH) + targetAngularVelocity * errY;
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("--> errH=" + errH + ", targetHeading=" + targetHeading + ", currH=" + currH + ", dist=" + currDist
);
// System.out.println("!!! landH=" + landH + ", dErrY=" + dErrY
// + ", currAngV=" + currAngV + ", landAngV=" + landAngV + ", switchAngV=" + switchAngV
// + ", switchAngA=" + switchAngA + ", newAngV=" + newAngV );
// System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
// System.out.println("!!! newDeltaV=" + switchA * dt / WHEEL_RADIUS + ", newDelatLR=" + newDeltaLR + ", newVL=" + newVL + ", newVR=" + newVR);
double remainingDeltaHeading = targetHeading - currH;
if(remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION
|| targetDeltaHeading > 0.01
&& abs(remainingDeltaHeading) > 0.05 && remainingDeltaHeading * targetDeltaHeading > 0 ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
protected void followLine(Command command) {
{
ForwardMovement forwardMove = (ForwardMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(forwardMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(forwardMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double distance = forwardMove.getDistance();
double targetHeading = startPos.getHeading();
double targetDeltaX = distance * cos(targetHeading);
double targetDeltaY = distance * sin(targetHeading);
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux<Tuple2<Position, Position>> lastTwoPositionsFlux = Flux.zip(positions, skip1);
Flux<Tuple4<Position, Position, LineReadings, Command>> flux =
Flux.combineLatest(
lastTwoPositionsFlux,
lineReadings,
commandFlux.startWith(new Command(VOID, null)),
(Object[] args) ->
Tuples.of(((Tuple2<Position, Position>)args[0]).getT1(),
((Tuple2<Position, Position>)args[0]).getT2(),
(LineReadings)args[1],
(Command)args[2])
);
flux.scan(initialCommand, (last, tuple4) -> {
System.out.println("########## NEW EVENT !!!!!!!!!!!");
Position prevPos = tuple4.getT1();
Position currPos = tuple4.getT2();
LineReadings lastReadings = tuple4.getT3();
Command lastCommand = tuple4.getT4();
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = currV * sin (errH);
double landAngV = (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH);
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
// double newV = currV + switchA * dt;
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("!!! time=" + time + ", dt=" + dt + ", tarX=" + tarX + ", tarY=" + tarY
+ ", startH=" + startH + ", errH=" + errH + ", targetX=" + targetX + ", targetY=" + targetY + ", targetHeading=" + targetHeading
+ ", errX=" + errX + ", errY=" + errY + ", dlandY=" + dlandY + ", currV=" + currV + ", dist=" + currDist
+ ", switchAngV/dt=" + switchAngV / dt );
System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
if(lastCommand.getName() != STOP && remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
}
protected void moveForward(Command command) {
{
ForwardMovement forwardMove = (ForwardMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(forwardMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(forwardMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double distance = forwardMove.getDistance();
double targetHeading = startPos.getHeading();
double targetDeltaX = distance * cos(targetHeading);
double targetDeltaY = distance * sin(targetHeading);
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux<Tuple2<Position, Position>> lastTwoPositionsFlux = Flux.zip(positions, skip1);
Flux<Tuple3<Position, Position, Command>> flux =
Flux.combineLatest(
lastTwoPositionsFlux,
commandFlux.startWith(new Command(VOID, null)),
(tuple2, lastCommand) -> Tuples.of(tuple2.getT1(), tuple2.getT2(), lastCommand)
);
flux.scan(initialCommand, (last, tuple3) -> {
System.out.println("########## NEW EVENT !!!!!!!!!!!");
Position prevPos = tuple3.getT1();
Position currPos = tuple3.getT2();
Command lastCommand = tuple3.getT3();
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = currV * sin (errH);
double landAngV = (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH);
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
// double newV = currV + switchA * dt;
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
<|fim▁hole|>
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("!!! time=" + time + ", dt=" + dt + ", tarX=" + tarX + ", tarY=" + tarY
+ ", startH=" + startH + ", errH=" + errH + ", targetX=" + targetX + ", targetY=" + targetY + ", targetHeading=" + targetHeading
+ ", errX=" + errX + ", errY=" + errY + ", dlandY=" + dlandY + ", currV=" + currV + ", dist=" + currDist
+ ", switchAngV/dt=" + switchAngV / dt );
System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
if(lastCommand.getName() != STOP && remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
}
protected void setupGpioForMovement() {
// Motor direction pins
Gpio.pinMode(5, Gpio.OUTPUT);
Gpio.pinMode(6, Gpio.OUTPUT);
Gpio.pinMode(12, Gpio.PWM_OUTPUT);
Gpio.pinMode(13, Gpio.PWM_OUTPUT);
Gpio.pwmSetMode(Gpio.PWM_MODE_MS);
Gpio.pwmSetRange(MAX_SPEED);
Gpio.pwmSetClock(CLOCK_DIVISOR);
}
private void runMotors(MotorsCommand mc) {
//setting motor directions
Gpio.digitalWrite(5, mc.getDirR() > 0 ? 1 : 0);
Gpio.digitalWrite(6, mc.getDirL() > 0 ? 1 : 0);
//setting speed
if(mc.getVelocityR() >= 0 && mc.getVelocityR() <= MAX_SPEED)
Gpio.pwmWrite(12, mc.getVelocityR()); // speed up to MAX_SPEED
if(mc.getVelocityL() >= 0 && mc.getVelocityL() <= MAX_SPEED)
Gpio.pwmWrite(13, mc.getVelocityL());
}
@Override
public void onSubscribe(Subscription s) {
subscription = s;
subscription.request(Long.MAX_VALUE);
}
@Override
public void onError(Throwable t) {
// TODO Auto-generated method stub
}
@Override
public void onComplete() {
// TODO Auto-generated method stub
}
}<|fim▁end|> | |
<|file_name|>Scroll.js<|end_file_name|><|fim▁begin|>/**
* @module creatine.transitions
**/
(function() {
"use strict";
/**
* A transition effect to scroll the new scene.
*
* ## Usage example
*
* var game = new tine.Game(null, {
* create: function() {
* var transition = new tine.transitions.Scroll(tine.TOP, null, 1000);
* game.replace(new MyScene(), transition);
* }
* });
*
* @class Scroll
* @constructor
* @param {Constant} [direction=creatine.LEFT] The direction.
* @param {Function} [ease=createjs.Ease.linear] An easing function from
* `createjs.Ease` (provided by TweenJS).
* @param {Number} [time=400] The transition time in milliseconds.
**/
var Scroll = function(direction, ease, time) {
/**
* Direction of the effect.
* @property direction
* @type {Constant}
**/
this.direction = direction || creatine.LEFT;
/**
* An Easing function from createjs.Ease.
* @property ease
* @type {Function}
**/
this.ease = ease || createjs.Ease.linear;
/**
* The transition time in milliseconds.
* @property time
* @type {Number}
**/
this.time = time || 400;
}
var p = Scroll.prototype;
/**
* Initialize the transition (called by the director).
* @method start
* @param {Director} director The Director instance.
* @param {Scene} outScene The active scene.
* @param {Scene} inScene The incoming scene.
* @param {Function} callback The callback function called when the
* transition is done.
* @protected
**/
p.start = function(director, outScene, inScene, callback) {
this.director = director;
this.outScene = outScene;
this.inScene = inScene;
this.callback = callback;
var w = director.stage.canvas.width;
var h = director.stage.canvas.height;
var dir = this.direction;
this.targetX = 0;
this.targetY = 0;
inScene.x = 0;
inScene.y = 0;
switch (this.direction) {
case creatine.LEFT:
inScene.x = w;
this.targetX = -w;
break;
case creatine.RIGHT:
inScene.x = -w;
this.targetX = w;
break;
case creatine.TOP:
inScene.y = h;
this.targetY = -h;
break;
case creatine.BOTTOM:
inScene.y = -h;
this.targetY = h;
break;
}
var self = this;
createjs.Tween.get(inScene, {override:true})
.to({x:0, y:0}, this.time, this.ease)
.call(function() { self.complete(); })
createjs.Tween.get(outScene, {override:true})
.to({x:this.targetX, y:this.targetY}, this.time, this.ease)
}
/**
* Finalize the transition (called by the director).
* @method complete
* @protected
**/
p.complete = function() {
createjs.Tween.removeTweens(this.inScene);
createjs.Tween.removeTweens(this.outScene);
this.inScene.x = 0;
this.inScene.x = 0;
this.outScene.x = 0;
this.outScene.y = 0;<|fim▁hole|>
creatine.transitions.Scroll = Scroll;
}());<|fim▁end|> | this.callback();
} |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi;
use ast::{P, Ident};
use ast;
use ast_util;
use codemap::{Span, respan, DUMMY_SP};
use ext::base::ExtCtxt;
use ext::quote::rt::*;
use fold::Folder;
use owned_slice::OwnedSlice;
use parse::token::special_idents;
use parse::token;
pub struct Field {
ident: ast::Ident,
ex: @ast::Expr
}
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {
pub use ext;
pub use parse;
}
pub trait AstBuilder {
// paths
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path;
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_all(&self, sp: Span,
global: bool,
idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path;
// types
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy;
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty>;
fn ty_path(&self, ast::Path, Option<OwnedSlice<ast::TyParamBound>>) -> P<ast::Ty>;
fn ty_ident(&self, span: Span, idents: ast::Ident) -> P<ast::Ty>;
fn ty_rptr(&self, span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability) -> P<ast::Ty>;
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_infer(&self, sp: Span) -> P<ast::Ty>;
fn ty_nil(&self) -> P<ast::Ty>;
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn strip_bounds(&self, bounds: &Generics) -> Generics;
fn typaram(&self,
span: Span,
id: ast::Ident,
sized: ast::Sized,
bounds: OwnedSlice<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam;
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef;
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound;
fn lifetime(&self, span: Span, ident: ast::Name) -> ast::Lifetime;
// statements
fn stmt_expr(&self, expr: @ast::Expr) -> @ast::Stmt;
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: @ast::Expr) -> @ast::Stmt;
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: @ast::Expr)
-> @ast::Stmt;
// blocks
fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P<ast::Block>;
fn block_expr(&self, expr: @ast::Expr) -> P<ast::Block>;
fn block_all(&self, span: Span,
view_items: Vec<ast::ViewItem> ,
stmts: Vec<@ast::Stmt> ,
expr: Option<@ast::Expr>) -> P<ast::Block>;
// expressions
fn expr(&self, span: Span, node: ast::Expr_) -> @ast::Expr;
fn expr_path(&self, path: ast::Path) -> @ast::Expr;
fn expr_ident(&self, span: Span, id: ast::Ident) -> @ast::Expr;
fn expr_self(&self, span: Span) -> @ast::Expr;
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: @ast::Expr, rhs: @ast::Expr) -> @ast::Expr;
fn expr_deref(&self, sp: Span, e: @ast::Expr) -> @ast::Expr;
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: @ast::Expr) -> @ast::Expr;
fn expr_managed(&self, sp: Span, e: @ast::Expr) -> @ast::Expr;
fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr;
fn expr_mut_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr;
fn expr_field_access(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr;
fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
args: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_method_call(&self, span: Span,
expr: @ast::Expr, ident: ast::Ident,
args: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_block(&self, b: P<ast::Block>) -> @ast::Expr;
fn expr_cast(&self, sp: Span, expr: @ast::Expr, ty: P<ast::Ty>) -> @ast::Expr;
fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field;
fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field> ) -> @ast::Expr;
fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: Vec<ast::Field> ) -> @ast::Expr;
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> @ast::Expr;
fn expr_uint(&self, span: Span, i: uint) -> @ast::Expr;
fn expr_int(&self, sp: Span, i: int) -> @ast::Expr;
fn expr_u8(&self, sp: Span, u: u8) -> @ast::Expr;
fn expr_bool(&self, sp: Span, value: bool) -> @ast::Expr;
fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr;
fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_vec_ng(&self, sp: Span) -> @ast::Expr;
fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr;
fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr;
fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr;
fn expr_none(&self, sp: Span) -> @ast::Expr;
fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr;
fn expr_unreachable(&self, span: Span) -> @ast::Expr;
fn expr_ok(&self, span: Span, expr: @ast::Expr) -> @ast::Expr;
fn expr_err(&self, span: Span, expr: @ast::Expr) -> @ast::Expr;
fn expr_try(&self, span: Span, head: @ast::Expr) -> @ast::Expr;
fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat;
fn pat_wild(&self, span: Span) -> @ast::Pat;
fn pat_lit(&self, span: Span, expr: @ast::Expr) -> @ast::Pat;
fn pat_ident(&self, span: Span, ident: ast::Ident) -> @ast::Pat;
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> @ast::Pat;
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat;
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> @ast::Pat;
fn arm(&self, span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm;
fn arm_unreachable(&self, span: Span) -> ast::Arm;
fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec<ast::Arm> ) -> @ast::Expr;
fn expr_if(&self, span: Span,
cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr;
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> @ast::Expr;
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr;
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr;
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr;
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , blk: @ast::Expr) -> @ast::Expr;
fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr;
fn lambda_expr_1(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr;
fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident> , blk: Vec<@ast::Stmt> ) -> @ast::Expr;
fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr;
fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr;
// items
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute> , node: ast::Item_) -> @ast::Item;
fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg;
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl>;
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> @ast::Item;
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>) -> @ast::Item;
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant;
fn item_enum_poly(&self,
span: Span,
name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> @ast::Item;
fn item_enum(&self, span: Span, name: Ident, enum_def: ast::EnumDef) -> @ast::Item;
fn item_struct_poly(&self,
span: Span,
name: Ident,
struct_def: ast::StructDef,
generics: Generics) -> @ast::Item;
fn item_struct(&self, span: Span, name: Ident, struct_def: ast::StructDef) -> @ast::Item;
fn item_mod(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute> ,
vi: Vec<ast::ViewItem> , items: Vec<@ast::Item> ) -> @ast::Item;
fn item_ty_poly(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
generics: Generics) -> @ast::Item;
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> @ast::Item;
fn attribute(&self, sp: Span, mi: @ast::MetaItem) -> ast::Attribute;
fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem;
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<@ast::MetaItem> )
-> @ast::MetaItem;
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> @ast::MetaItem;
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem;
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem;
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem;
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem;
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem;
}
impl<'a> AstBuilder for ExtCtxt<'a> {
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, false, strs, Vec::new(), Vec::new())
}
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
self.path(span, vec!(id))
}
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, true, strs, Vec::new(), Vec::new())
}
fn path_all(&self,
sp: Span,
global: bool,
mut idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path {
let last_identifier = idents.pop().unwrap();
let mut segments: Vec<ast::PathSegment> = idents.move_iter()
.map(|ident| {
ast::PathSegment {
identifier: ident,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
}).collect();
segments.push(ast::PathSegment {
identifier: last_identifier,
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
});
ast::Path {
span: sp,
global: global,
segments: segments,
}
}
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
ast::MutTy {
ty: ty,
mutbl: mutbl
}
}
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
span: span,
node: ty
})
}
fn ty_path(&self, path: ast::Path, bounds: Option<OwnedSlice<ast::TyParamBound>>)
-> P<ast::Ty> {
self.ty(path.span,
ast::TyPath(path, bounds, ast::DUMMY_NODE_ID))
}
// Might need to take bounds as an argument in the future, if you ever want
// to generate a bounded existential trait type.
fn ty_ident(&self, span: Span, ident: ast::Ident)
-> P<ast::Ty> {
self.ty_path(self.path_ident(span, ident), None)
}
fn ty_rptr(&self,
span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability)
-> P<ast::Ty> {
self.ty(span,
ast::TyRptr(lifetime, self.ty_mt(ty, mutbl)))
}
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty(span, ast::TyUniq(ty))
}
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty_path(
self.path_all(DUMMY_SP,
true,
vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Option")
),
Vec::new(),
vec!( ty )), None)
}
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer)
}
fn ty_nil(&self) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
node: ast::TyNil,
span: DUMMY_SP,
})
}
fn typaram(&self,
span: Span,
id: ast::Ident,
sized: ast::Sized,
bounds: OwnedSlice<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam {
ast::TyParam {
ident: id,
id: ast::DUMMY_NODE_ID,
sized: sized,
bounds: bounds,
default: default,
span: span
}
}
// these are strange, and probably shouldn't be used outside of
// pipes. Specifically, the global version possible generates
// incorrect code.
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_ident(DUMMY_SP, p.ident)).collect()
}
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_path(
self.path_global(DUMMY_SP, vec!(p.ident)), None)).collect()
}
fn strip_bounds(&self, generics: &Generics) -> Generics {
let new_params = generics.ty_params.map(|ty_param| {
ast::TyParam { bounds: OwnedSlice::empty(), ..*ty_param }
});
Generics {
ty_params: new_params,
.. (*generics).clone()
}
}
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
ast::TraitRef {
path: path,
ref_id: ast::DUMMY_NODE_ID
}
}
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound {
ast::TraitTyParamBound(self.trait_ref(path))
}
fn lifetime(&self, span: Span, name: ast::Name) -> ast::Lifetime {
ast::Lifetime { id: ast::DUMMY_NODE_ID, span: span, name: name }
}
fn stmt_expr(&self, expr: @ast::Expr) -> @ast::Stmt {
@respan(expr.span, ast::StmtSemi(expr, ast::DUMMY_NODE_ID))
}
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: @ast::Expr) -> @ast::Stmt {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = @ast::Local {
ty: self.ty_infer(sp),
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
};
let decl = respan(sp, ast::DeclLocal(local));
@respan(sp, ast::StmtDecl(@decl, ast::DUMMY_NODE_ID))
}
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: @ast::Expr)
-> @ast::Stmt {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = @ast::Local {
ty: typ,
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
};
let decl = respan(sp, ast::DeclLocal(local));
@respan(sp, ast::StmtDecl(@decl, ast::DUMMY_NODE_ID))
}
fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@Expr>) -> P<ast::Block> {
self.block_all(span, Vec::new(), stmts, expr)
}
fn block_expr(&self, expr: @ast::Expr) -> P<ast::Block> {
self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr))
}
fn block_all(&self,
span: Span,
view_items: Vec<ast::ViewItem> ,
stmts: Vec<@ast::Stmt> ,
expr: Option<@ast::Expr>) -> P<ast::Block> {
P(ast::Block {
view_items: view_items,
stmts: stmts,
expr: expr,
id: ast::DUMMY_NODE_ID,
rules: ast::DefaultBlock,
span: span,
})
}
fn expr(&self, span: Span, node: ast::Expr_) -> @ast::Expr {
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: node,
span: span,
}
}
fn expr_path(&self, path: ast::Path) -> @ast::Expr {
self.expr(path.span, ast::ExprPath(path))
}
fn expr_ident(&self, span: Span, id: ast::Ident) -> @ast::Expr {
self.expr_path(self.path_ident(span, id))
}
fn expr_self(&self, span: Span) -> @ast::Expr {
self.expr_ident(span, special_idents::self_)
}
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: @ast::Expr, rhs: @ast::Expr) -> @ast::Expr {
self.expr(sp, ast::ExprBinary(op, lhs, rhs))
}
fn expr_deref(&self, sp: Span, e: @ast::Expr) -> @ast::Expr {
self.expr_unary(sp, ast::UnDeref, e)
}
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: @ast::Expr) -> @ast::Expr {
self.expr(sp, ast::ExprUnary(op, e))
}
fn expr_managed(&self, sp: Span, e: @ast::Expr) -> @ast::Expr {
self.expr_unary(sp, ast::UnBox, e)
}
fn expr_field_access(&self, sp: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr {
self.expr(sp, ast::ExprField(expr, ident, Vec::new()))
}
fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr {
self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e))
}
fn expr_mut_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr {
self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e))
}
fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr(span, ast::ExprCall(expr, args))
}
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr(span, ast::ExprCall(self.expr_ident(span, id), args))
}
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
args: Vec<@ast::Expr> ) -> @ast::Expr {
let pathexpr = self.expr_path(self.path_global(sp, fn_path));
self.expr_call(sp, pathexpr, args)
}
fn expr_method_call(&self, span: Span,
expr: @ast::Expr,
ident: ast::Ident,
mut args: Vec<@ast::Expr> ) -> @ast::Expr {
args.unshift(expr);
self.expr(span, ast::ExprMethodCall(ident, Vec::new(), args))
}
fn expr_block(&self, b: P<ast::Block>) -> @ast::Expr {
self.expr(b.span, ast::ExprBlock(b))
}
fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field {
ast::Field { ident: respan(span, name), expr: e, span: span }
}
fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field> ) -> @ast::Expr {
self.expr(span, ast::ExprStruct(path, fields, None))
}
fn expr_struct_ident(&self, span: Span,
id: ast::Ident, fields: Vec<ast::Field> ) -> @ast::Expr {
self.expr_struct(span, self.path_ident(span, id), fields)
}
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> @ast::Expr {
self.expr(sp, ast::ExprLit(@respan(sp, lit)))
}
fn expr_uint(&self, span: Span, i: uint) -> @ast::Expr {
self.expr_lit(span, ast::LitUint(i as u64, ast::TyU))
}
fn expr_int(&self, sp: Span, i: int) -> @ast::Expr {
self.expr_lit(sp, ast::LitInt(i as i64, ast::TyI))
}
fn expr_u8(&self, sp: Span, u: u8) -> @ast::Expr {
self.expr_lit(sp, ast::LitUint(u as u64, ast::TyU8))
}
fn expr_bool(&self, sp: Span, value: bool) -> @ast::Expr {
self.expr_lit(sp, ast::LitBool(value))
}
fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr {
self.expr(sp, ast::ExprVstore(expr, vst))
}
fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr(sp, ast::ExprVec(exprs))
}
fn expr_vec_ng(&self, sp: Span) -> @ast::Expr {
self.expr_call_global(sp,
vec!(self.ident_of("std"),
self.ident_of("vec"),
self.ident_of("Vec"),
self.ident_of("new")),
Vec::new())
}
fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
}
fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr {
self.expr_lit(sp, ast::LitStr(s, ast::CookedStr))
}
fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr {
self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
}
fn expr_cast(&self, sp: Span, expr: @ast::Expr, ty: P<ast::Ty>) -> @ast::Expr {
self.expr(sp, ast::ExprCast(expr, ty))
}
fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Some"));
self.expr_call_global(sp, some, vec!(expr))
}
fn expr_none(&self, sp: Span) -> @ast::Expr {
let none = self.path_global(sp, vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("None")));
self.expr_path(none)
}
fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr {
let loc = self.codemap().lookup_char_pos(span.lo);
self.expr_call_global(
span,
vec!(
self.ident_of("std"),
self.ident_of("rt"),
self.ident_of("begin_unwind")),
vec!(
self.expr_str(span, msg),
self.expr_str(span,
token::intern_and_get_ident(loc.file.name)),
self.expr_uint(span, loc.line)))
}
fn expr_unreachable(&self, span: Span) -> @ast::Expr {
self.expr_fail(span,
InternedString::new(
"internal error: entered unreachable code"))
}
fn expr_ok(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr {
let ok = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Ok"));
self.expr_call_global(sp, ok, vec!(expr))
}
fn expr_err(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr {
let err = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Err"));
self.expr_call_global(sp, err, vec!(expr))
}
fn expr_try(&self, sp: Span, head: @ast::Expr) -> @ast::Expr {
let ok = self.ident_of("Ok");
let ok_path = self.path_ident(sp, ok);
let err = self.ident_of("Err");
let err_path = self.path_ident(sp, err);
let binding_variable = self.ident_of("__try_var");
let binding_pat = self.pat_ident(sp, binding_variable);
let binding_expr = self.expr_ident(sp, binding_variable);
// Ok(__try_var) pattern
let ok_pat = self.pat_enum(sp, ok_path, vec!(binding_pat));
// Err(__try_var) (pattern and expression resp.)
let err_pat = self.pat_enum(sp, err_path, vec!(binding_pat));
let err_inner_expr = self.expr_call_ident(sp, err, vec!(binding_expr));
// return Err(__try_var)
let err_expr = self.expr(sp, ast::ExprRet(Some(err_inner_expr)));
// Ok(__try_var) => __try_var
let ok_arm = self.arm(sp, vec!(ok_pat), binding_expr);
// Err(__try_var) => return Err(__try_var)
let err_arm = self.arm(sp, vec!(err_pat), err_expr);
// match head { Ok() => ..., Err() => ... }
self.expr_match(sp, head, vec!(ok_arm, err_arm))
}
fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat {
@ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }
}
fn pat_wild(&self, span: Span) -> @ast::Pat {
self.pat(span, ast::PatWild)
}
fn pat_lit(&self, span: Span, expr: @ast::Expr) -> @ast::Pat {
self.pat(span, ast::PatLit(expr))
}
fn pat_ident(&self, span: Span, ident: ast::Ident) -> @ast::Pat {
self.pat_ident_binding_mode(span, ident, ast::BindByValue(ast::MutImmutable))
}
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> @ast::Pat {
let path = self.path_ident(span, ident);<|fim▁hole|> self.pat(span, pat)
}
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat {
let pat = ast::PatEnum(path, Some(subpats));
self.pat(span, pat)
}
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> @ast::Pat {
let pat = ast::PatStruct(path, field_pats, false);
self.pat(span, pat)
}
fn arm(&self, _span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm {
ast::Arm {
pats: pats,
guard: None,
body: expr
}
}
fn arm_unreachable(&self, span: Span) -> ast::Arm {
self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span))
}
fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec<ast::Arm> ) -> @Expr {
self.expr(span, ast::ExprMatch(arg, arms))
}
fn expr_if(&self, span: Span,
cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr {
let els = els.map(|x| self.expr_block(self.block_expr(x)));
self.expr(span, ast::ExprIf(cond, self.block_expr(then), els))
}
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> @ast::Expr {
self.expr(span, ast::ExprFnBlock(fn_decl, blk))
}
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr {
let fn_decl = self.fn_decl(
ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
self.ty_infer(span));
self.expr(span, ast::ExprFnBlock(fn_decl, blk))
}
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr {
self.lambda(span, Vec::new(), blk)
}
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr {
self.lambda(span, vec!(ident), blk)
}
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: @ast::Expr) -> @ast::Expr {
self.lambda(span, ids, self.block_expr(expr))
}
fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr {
self.lambda0(span, self.block_expr(expr))
}
fn lambda_expr_1(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr {
self.lambda1(span, self.block_expr(expr), ident)
}
fn lambda_stmts(&self,
span: Span,
ids: Vec<ast::Ident>,
stmts: Vec<@ast::Stmt>)
-> @ast::Expr {
self.lambda(span, ids, self.block(span, stmts, None))
}
fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr {
self.lambda0(span, self.block(span, stmts, None))
}
fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr {
self.lambda1(span, self.block(span, stmts, None), ident)
}
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
let arg_pat = self.pat_ident(span, ident);
ast::Arg {
ty: ty,
pat: arg_pat,
id: ast::DUMMY_NODE_ID
}
}
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl> {
P(ast::FnDecl {
inputs: inputs,
output: output,
cf: ast::Return,
variadic: false
})
}
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute> , node: ast::Item_) -> @ast::Item {
// FIXME: Would be nice if our generated code didn't violate
// Rust coding conventions
@ast::Item { ident: name,
attrs: attrs,
id: ast::DUMMY_NODE_ID,
node: node,
vis: ast::Inherited,
span: span }
}
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> @ast::Item {
self.item(span,
name,
Vec::new(),
ast::ItemFn(self.fn_decl(inputs, output),
ast::NormalFn,
abi::Rust,
generics,
body))
}
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>
) -> @ast::Item {
self.item_fn_poly(
span,
name,
inputs,
output,
ast_util::empty_generics(),
body)
}
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
let args = tys.move_iter().map(|ty| {
ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID }
}).collect();
respan(span,
ast::Variant_ {
name: name,
attrs: Vec::new(),
kind: ast::TupleVariantKind(args),
id: ast::DUMMY_NODE_ID,
disr_expr: None,
vis: ast::Public
})
}
fn item_enum_poly(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> @ast::Item {
self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics))
}
fn item_enum(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef) -> @ast::Item {
self.item_enum_poly(span, name, enum_definition,
ast_util::empty_generics())
}
fn item_struct(&self, span: Span, name: Ident,
struct_def: ast::StructDef) -> @ast::Item {
self.item_struct_poly(
span,
name,
struct_def,
ast_util::empty_generics()
)
}
fn item_struct_poly(&self, span: Span, name: Ident,
struct_def: ast::StructDef, generics: Generics) -> @ast::Item {
self.item(span, name, Vec::new(), ast::ItemStruct(@struct_def, generics))
}
fn item_mod(&self, span: Span, name: Ident,
attrs: Vec<ast::Attribute> ,
vi: Vec<ast::ViewItem> ,
items: Vec<@ast::Item> ) -> @ast::Item {
self.item(
span,
name,
attrs,
ast::ItemMod(ast::Mod {
view_items: vi,
items: items,
})
)
}
fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>,
generics: Generics) -> @ast::Item {
self.item(span, name, Vec::new(), ast::ItemTy(ty, generics))
}
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> @ast::Item {
self.item_ty_poly(span, name, ty, ast_util::empty_generics())
}
fn attribute(&self, sp: Span, mi: @ast::MetaItem) -> ast::Attribute {
respan(sp, ast::Attribute_ {
style: ast::AttrOuter,
value: mi,
is_sugared_doc: false,
})
}
fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem {
@respan(sp, ast::MetaWord(w))
}
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<@ast::MetaItem> )
-> @ast::MetaItem {
@respan(sp, ast::MetaList(name, mis))
}
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> @ast::MetaItem {
@respan(sp, ast::MetaNameValue(name, respan(sp, value)))
}
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem {
ast::ViewItem {
node: ast::ViewItemUse(vp),
attrs: Vec::new(),
vis: vis,
span: sp
}
}
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem {
let last = path.segments.last().unwrap().identifier;
self.view_use_simple_(sp, vis, last, path)
}
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem {
self.view_use(sp, vis,
vec!(@respan(sp,
ast::ViewPathSimple(ident,
path,
ast::DUMMY_NODE_ID))))
}
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
let imports = imports.iter().map(|id| {
respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID })
}).collect();
self.view_use(sp, vis,
vec!(@respan(sp,
ast::ViewPathList(self.path(sp, path),
imports,
ast::DUMMY_NODE_ID))))
}
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem {
self.view_use(sp, vis,
vec!(@respan(sp,
ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID))))
}
}
struct Duplicator<'a> {
cx: &'a ExtCtxt<'a>,
}
impl<'a> Folder for Duplicator<'a> {
fn new_id(&mut self, _: NodeId) -> NodeId {
ast::DUMMY_NODE_ID
}
}
pub trait Duplicate {
//
// Duplication functions
//
// These functions just duplicate AST nodes.
//
fn duplicate(&self, cx: &ExtCtxt) -> Self;
}
impl Duplicate for @ast::Expr {
fn duplicate(&self, cx: &ExtCtxt) -> @ast::Expr {
let mut folder = Duplicator {
cx: cx,
};
folder.fold_expr(*self)
}
}<|fim▁end|> | let pat = ast::PatIdent(bm, path, None); |
<|file_name|>rnn_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import time
import timeit
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python.util import nest
class Plus1RNNCell(tf.nn.rnn_cell.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 5
@property
def state_size(self):
return 5
def __call__(self, input_, state, scope=None):
return (input_ + 1, state + 1)<|fim▁hole|>
class DummyMultiDimensionalLSTM(tf.nn.rnn_cell.RNNCell):
"""LSTM Cell generating (output, new_state) = (input + 1, state + 1).
The input to this cell may have an arbitrary number of dimensions that follow
the preceding 'Time' and 'Batch' dimensions.
"""
def __init__(self, dims):
"""Initialize the Multi-dimensional LSTM cell.
Args:
dims: tuple that contains the dimensions of the output of the cell,
without including 'Time' or 'Batch' dimensions.
"""
if not isinstance(dims, tuple):
raise TypeError("The dimensions passed to DummyMultiDimensionalLSTM"
"should be a tuple of ints.")
self._dims = dims
self._output_size = tf.TensorShape(self._dims)
self._state_size = (tf.TensorShape(self._dims), tf.TensorShape(self._dims))
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self._state_size
def __call__(self, input_, state, scope=None):
h, c = state
return (input_ + 1, (h + 1, c + 1))
class NestedRNNCell(tf.nn.rnn_cell.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1).
The input, output and state of this cell is a tuple of two tensors.
"""
@property
def output_size(self):
return (5, 5)
@property
def state_size(self):
return (6, 6)
def __call__(self, input_, state, scope=None):
h, c = state
x, y = input_
return ((x + 1, y + 1), (h + 1, c + 1))
class TestStateSaver(object):
def __init__(self, batch_size, state_size):
self._batch_size = batch_size
self._state_size = state_size
self.saved_state = {}
def state(self, name):
if isinstance(self._state_size, dict):
state_size = self._state_size[name]
else:
state_size = self._state_size
if isinstance(state_size, int):
state_size = (state_size,)
elif isinstance(state_size, tuple):
pass
else:
raise TypeError("state_size should either be an int or a tuple")
return tf.zeros((self._batch_size,) + state_size)
def save_state(self, name, state):
self.saved_state[name] = state
return tf.identity(state)
class RNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testInvalidSequenceLengthShape(self):
cell = Plus1RNNCell()
inputs = [tf.placeholder(tf.float32, shape=(3, 4))]
with self.assertRaisesRegexp(ValueError, "must be a vector"):
tf.nn.rnn(cell, inputs, dtype=tf.float32, sequence_length=4)
with self.assertRaisesRegexp(ValueError, "must be a vector"):
tf.nn.dynamic_rnn(
cell, tf.pack(inputs), dtype=tf.float32, sequence_length=[[4]])
def testRNN(self):
cell = Plus1RNNCell()
batch_size = 2
input_size = 5
max_length = 8 # unrolled up to this length
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, state = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out, inp in zip(outputs, inputs):
self.assertEqual(out.get_shape(), inp.get_shape())
self.assertEqual(out.dtype, inp.dtype)
with self.test_session(use_gpu=False) as sess:
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs + [state],
feed_dict={inputs[0]: input_value})
# Outputs
for v in values[:-1]:
self.assertAllClose(v, input_value + 1.0)
# Final state
self.assertAllClose(
values[-1],
max_length * np.ones((batch_size, input_size), dtype=np.float32))
def testDropout(self):
cell = Plus1RNNCell()
full_dropout_cell = tf.nn.rnn_cell.DropoutWrapper(
cell, input_keep_prob=1e-12, seed=0)
batch_size = 2
input_size = 5
max_length = 8
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("drop_scope"):
dropped_outputs, _ = tf.nn.rnn(
full_dropout_cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out, inp in zip(outputs, inputs):
self.assertEqual(out.get_shape().as_list(), inp.get_shape().as_list())
self.assertEqual(out.dtype, inp.dtype)
with self.test_session(use_gpu=False) as sess:
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs + [state],
feed_dict={inputs[0]: input_value})
full_dropout_values = sess.run(dropped_outputs,
feed_dict={inputs[0]: input_value})
for v in values[:-1]:
self.assertAllClose(v, input_value + 1.0)
for d_v in full_dropout_values[:-1]: # Add 1.0 to dropped_out (all zeros)
self.assertAllClose(d_v, np.ones_like(input_value))
def _testDynamicCalculation(self, use_gpu):
cell = Plus1RNNCell()
sequence_length = tf.placeholder(tf.int64)
batch_size = 2
input_size = 5
max_length = 8
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("drop_scope"):
dynamic_outputs, dynamic_state = tf.nn.rnn(
cell, inputs, sequence_length=sequence_length, dtype=tf.float32)
self.assertEqual(len(dynamic_outputs), len(inputs))
with self.test_session(use_gpu=use_gpu) as sess:
input_value = np.random.randn(batch_size, input_size)
dynamic_values = sess.run(dynamic_outputs,
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
dynamic_state_value = sess.run([dynamic_state],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# outputs are fully calculated for t = 0, 1
for v in dynamic_values[:2]:
self.assertAllClose(v, input_value + 1.0)
# outputs at t = 2 are zero for entry 0, calculated for entry 1
self.assertAllClose(
dynamic_values[2],
np.vstack((
np.zeros((input_size)),
1.0 + input_value[1, :])))
# outputs at t = 3+ are zero
for v in dynamic_values[3:]:
self.assertAllEqual(v, np.zeros_like(input_value))
# the final states are:
# entry 0: the values from the calculation at t=1
# entry 1: the values from the calculation at t=2
self.assertAllEqual(
dynamic_state_value[0],
np.vstack((
1.0 * (1 + 1) * np.ones((input_size)),
1.0 * (2 + 1) * np.ones((input_size)))))
def testDynamicCalculation(self):
self._testDynamicCalculation(True)
self._testDynamicCalculation(False)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts
# with the proper scope.
tf.initialize_all_variables()
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testScope(self):
def factory(scope):
cell = Plus1RNNCell()
batch_size = 2
input_size = 5
max_length = 8 # unrolled up to this length
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
return tf.nn.rnn(cell, inputs, dtype=tf.float32, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class GRUTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testDynamic(self, use_gpu):
time_steps = 8
num_units = 3
input_size = 5
batch_size = 2
input_values = np.random.randn(time_steps, batch_size, input_size)
sequence_length = np.random.randint(0, time_steps, size=batch_size)
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(
tf.float32, shape=(time_steps, batch_size, input_size))
cell = tf.nn.rnn_cell.GRUCell(num_units=num_units)
with tf.variable_scope("dynamic_scope"):
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs=concat_inputs, sequence_length=sequence_length,
time_major=True, dtype=tf.float32)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
sess.run([outputs_dynamic, state_dynamic], feed_dict=feeds)
def testDynamic(self):
self._testDynamic(use_gpu=False)
self._testDynamic(use_gpu=True)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testDynamicScope(self):
time_steps = 8
num_units = 3
input_size = 5
batch_size = 2
sequence_length = np.random.randint(0, time_steps, size=batch_size)
def factory(scope):
concat_inputs = tf.placeholder(
tf.float32, shape=(time_steps, batch_size, input_size))
cell = tf.nn.rnn_cell.GRUCell(num_units=num_units)
return tf.nn.dynamic_rnn(cell, inputs=concat_inputs,
sequence_length=sequence_length,
time_major=True, dtype=tf.float32,
scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class LSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testNoProjNoSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(num_units, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def _testCellClipping(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True, cell_clip=0.0, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs, feed_dict={inputs[0]: input_value})
for value in values:
# if cell c is clipped to 0, tanh(c) = 0 => m==0
self.assertAllEqual(value, np.zeros((batch_size, num_units)))
def _testNoProjNoShardingSimpleStateSaver(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, 2 * num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name="save_lstm")
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
(last_state_value, saved_state_value) = sess.run(
[state, state_saver.saved_state["save_lstm"]],
feed_dict={inputs[0]: input_value})
self.assertAllEqual(last_state_value, saved_state_value)
def testNoProjNoShardingTupleStateSaver(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=True)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name=("c", "m"))
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
last_and_saved_states = sess.run(
state + (state_saver.saved_state["c"], state_saver.saved_state["m"]),
feed_dict={inputs[0]: input_value})
self.assertEqual(4, len(last_and_saved_states))
self.assertAllEqual(last_and_saved_states[:2], last_and_saved_states[2:])
def testNoProjNoShardingNestedTupleStateSaver(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, {"c0": num_units,
"m0": num_units,
"c1": num_units + 1,
"m1": num_units + 1,
"c2": num_units + 2,
"m2": num_units + 2,
"c3": num_units + 3,
"m3": num_units + 3})
def _cell(i):
return tf.nn.rnn_cell.LSTMCell(
num_units + i, use_peepholes=False, initializer=initializer,
state_is_tuple=True)
# This creates a state tuple which has 4 sub-tuples of length 2 each.
cell = tf.nn.rnn_cell.MultiRNNCell(
[_cell(i) for i in range(4)], state_is_tuple=True)
self.assertEqual(len(cell.state_size), 4)
for i in range(4):
self.assertEqual(len(cell.state_size[i]), 2)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
state_names = (("c0", "m0"), ("c1", "m1"),
("c2", "m2"), ("c3", "m3"))
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name=state_names)
self.assertEqual(len(outputs), len(inputs))
# Final output comes from _cell(3) which has state size num_units + 3
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units + 3])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
last_states = sess.run(
list(nest.flatten(state)), feed_dict={inputs[0]: input_value})
saved_states = sess.run(
list(state_saver.saved_state.values()),
feed_dict={inputs[0]: input_value})
self.assertEqual(8, len(last_states))
self.assertEqual(8, len(saved_states))
flat_state_names = nest.flatten(state_names)
named_saved_states = dict(
zip(state_saver.saved_state.keys(), saved_states))
for i in range(8):
self.assertAllEqual(
last_states[i],
named_saved_states[flat_state_names[i]])
def _testProjNoSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def testStateTupleWithProjAndSequenceLength(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell_notuple = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=False)
cell_tuple = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=True)
outputs_notuple, state_notuple = tf.nn.rnn(
cell_notuple, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_tuple, state_tuple = tf.nn.rnn(
cell_tuple, inputs, dtype=tf.float32,
sequence_length=sequence_length)
self.assertEqual(len(outputs_notuple), len(inputs))
self.assertEqual(len(outputs_tuple), len(inputs))
self.assertTrue(isinstance(state_tuple, tuple))
self.assertTrue(isinstance(state_notuple, tf.Tensor))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_notuple_v = sess.run(
outputs_notuple, feed_dict={inputs[0]: input_value})
outputs_tuple_v = sess.run(
outputs_tuple, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_notuple_v, outputs_tuple_v)
(state_notuple_v,) = sess.run(
(state_notuple,), feed_dict={inputs[0]: input_value})
state_tuple_v = sess.run(
state_tuple, feed_dict={inputs[0]: input_value})
self.assertAllEqual(state_notuple_v, np.hstack(state_tuple_v))
def _testProjSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def _testTooManyShards(self, use_gpu):
num_units = 3
input_size = 5
num_proj = 4
num_proj_shards = 4
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()):
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
with self.assertRaises(ValueError):
tf.nn.rnn(cell, inputs, dtype=tf.float32)
def _testDoubleInput(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float64, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(
cell, inputs, initial_state=cell.zero_state(batch_size, tf.float64))
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.asarray(np.random.randn(batch_size, input_size),
dtype=np.float64)
values = sess.run(outputs, feed_dict={inputs[0]: input_value})
self.assertEqual(values[0].dtype, input_value.dtype)
def _testShardNoShardEquivalentOutput(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
initializer = tf.constant_initializer(0.001)
cell_noshard = tf.nn.rnn_cell.LSTMCell(
num_units,
num_proj=num_proj,
use_peepholes=True,
initializer=initializer,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
state_is_tuple=False)
cell_shard = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj,
state_is_tuple=False)
with tf.variable_scope("noshard_scope"):
outputs_noshard, state_noshard = tf.nn.rnn(
cell_noshard, inputs, dtype=tf.float32)
with tf.variable_scope("shard_scope"):
outputs_shard, state_shard = tf.nn.rnn(
cell_shard, inputs, dtype=tf.float32)
self.assertEqual(len(outputs_noshard), len(inputs))
self.assertEqual(len(outputs_noshard), len(outputs_shard))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
feeds = dict((x, input_value) for x in inputs)
values_noshard = sess.run(outputs_noshard, feed_dict=feeds)
values_shard = sess.run(outputs_shard, feed_dict=feeds)
state_values_noshard = sess.run([state_noshard], feed_dict=feeds)
state_values_shard = sess.run([state_shard], feed_dict=feeds)
self.assertEqual(len(values_noshard), len(values_shard))
self.assertEqual(len(state_values_noshard), len(state_values_shard))
for (v_noshard, v_shard) in zip(values_noshard, values_shard):
self.assertAllClose(v_noshard, v_shard, atol=1e-3)
for (s_noshard, s_shard) in zip(state_values_noshard, state_values_shard):
self.assertAllClose(s_noshard, s_shard, atol=1e-3)
def _testDoubleInputWithDropoutAndDynamicCalculation(
self, use_gpu):
"""Smoke test for using LSTM with doubles, dropout, dynamic calculation."""
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
sequence_length = tf.placeholder(tf.int64)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float64, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
dropout_cell = tf.nn.rnn_cell.DropoutWrapper(cell, 0.5, seed=0)
outputs, state = tf.nn.rnn(
dropout_cell, inputs, sequence_length=sequence_length,
initial_state=cell.zero_state(batch_size, tf.float64))
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run(feed_dict={sequence_length: [2, 3]})
input_value = np.asarray(np.random.randn(batch_size, input_size),
dtype=np.float64)
values = sess.run(outputs, feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
state_value = sess.run([state], feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
self.assertEqual(values[0].dtype, input_value.dtype)
self.assertEqual(state_value[0].dtype, input_value.dtype)
def testSharingWeightsWithReuse(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
initializer_d = tf.random_uniform_initializer(-1, 1, seed=self._seed+1)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
cell_d = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer_d,
state_is_tuple=False)
with tf.variable_scope("share_scope"):
outputs0, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("share_scope", reuse=True):
outputs1, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("diff_scope"):
outputs2, _ = tf.nn.rnn(cell_d, inputs, dtype=tf.float32)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
output_values = sess.run(
outputs0 + outputs1 + outputs2, feed_dict={inputs[0]: input_value})
outputs0_values = output_values[:max_length]
outputs1_values = output_values[max_length:2*max_length]
outputs2_values = output_values[2*max_length:]
self.assertEqual(len(outputs0_values), len(outputs1_values))
self.assertEqual(len(outputs0_values), len(outputs2_values))
for o1, o2, o3 in zip(outputs0_values, outputs1_values, outputs2_values):
# Same weights used by both RNNs so outputs should be the same.
self.assertAllEqual(o1, o2)
# Different weights used so outputs should be different.
self.assertTrue(np.linalg.norm(o1-o3) > 1e-6)
def testSharingWeightsWithDifferentNamescope(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
with tf.name_scope("scope0"):
with tf.variable_scope("share_scope"):
outputs0, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.name_scope("scope1"):
with tf.variable_scope("share_scope", reuse=True):
outputs1, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
output_values = sess.run(
outputs0 + outputs1, feed_dict={inputs[0]: input_value})
outputs0_values = output_values[:max_length]
outputs1_values = output_values[max_length:]
self.assertEqual(len(outputs0_values), len(outputs1_values))
for out0, out1 in zip(outputs0_values, outputs1_values):
self.assertAllEqual(out0, out1)
def testDynamicRNNWithTupleStates(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
inputs_c = tf.pack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=True)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
self.assertTrue(isinstance(state_static, tf.nn.rnn_cell.LSTMStateTuple))
self.assertTrue(isinstance(state_dynamic, tf.nn.rnn_cell.LSTMStateTuple))
self.assertEqual(state_static[0], state_static.c)
self.assertEqual(state_static[1], state_static.h)
self.assertEqual(state_dynamic[0], state_dynamic.c)
self.assertEqual(state_dynamic[1], state_dynamic.h)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
state_static_v = sess.run(
state_static, feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
state_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
def testDynamicRNNWithNestedTupleStates(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
inputs_c = tf.pack(inputs)
def _cell(i):
return tf.nn.rnn_cell.LSTMCell(
num_units + i, use_peepholes=True,
num_proj=num_proj + i, initializer=initializer, state_is_tuple=True)
# This creates a state tuple which has 4 sub-tuples of length 2 each.
cell = tf.nn.rnn_cell.MultiRNNCell(
[_cell(i) for i in range(4)], state_is_tuple=True)
self.assertEqual(len(cell.state_size), 4)
for i in range(4):
self.assertEqual(len(cell.state_size[i]), 2)
test_zero = cell.zero_state(1, tf.float32)
self.assertEqual(len(test_zero), 4)
for i in range(4):
self.assertEqual(test_zero[i][0].get_shape()[1], cell.state_size[i][0])
self.assertEqual(test_zero[i][1].get_shape()[1], cell.state_size[i][1])
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
state_static_v = sess.run(
nest.flatten(state_static), feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
nest.flatten(state_dynamic), feed_dict={inputs[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
def _testDynamicEquivalentToStaticRNN(self, use_gpu, use_sequence_length):
time_steps = 8
num_units = 3
num_proj = 4
input_size = 5
batch_size = 2
input_values = np.random.randn(time_steps, batch_size, input_size)
if use_sequence_length:
sequence_length = np.random.randint(0, time_steps, size=batch_size)
else:
sequence_length = None
########### Step 1: Run static graph and generate readouts
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(tf.float32,
shape=(time_steps, batch_size, input_size))
inputs = tf.unpack(concat_inputs)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj, state_is_tuple=False)
with tf.variable_scope("dynamic_scope"):
outputs_static, state_static = tf.nn.rnn(
cell, inputs, sequence_length=sequence_length, dtype=tf.float32)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
# Generate gradients of sum of outputs w.r.t. inputs
static_gradients = tf.gradients(
outputs_static + [state_static], [concat_inputs])
# Generate gradients of individual outputs w.r.t. inputs
static_individual_gradients = nest.flatten([
tf.gradients(y, [concat_inputs])
for y in [outputs_static[0],
outputs_static[-1],
state_static]])
# Generate gradients of individual variables w.r.t. inputs
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
assert len(trainable_variables) > 1, (
"Count of trainable variables: %d" % len(trainable_variables))
# pylint: disable=bad-builtin
static_individual_variable_gradients = nest.flatten([
tf.gradients(y, trainable_variables)
for y in [outputs_static[0],
outputs_static[-1],
state_static]])
# Test forward pass
values_static = sess.run(outputs_static, feed_dict=feeds)
(state_value_static,) = sess.run((state_static,), feed_dict=feeds)
# Test gradients to inputs and variables w.r.t. outputs & final state
static_grad_values = sess.run(static_gradients, feed_dict=feeds)
static_individual_grad_values = sess.run(
static_individual_gradients, feed_dict=feeds)
static_individual_var_grad_values = sess.run(
static_individual_variable_gradients, feed_dict=feeds)
########## Step 2: Run dynamic graph and generate readouts
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(tf.float32,
shape=(time_steps, batch_size, input_size))
inputs = tf.unpack(concat_inputs)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj, state_is_tuple=False)
with tf.variable_scope("dynamic_scope"):
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs=concat_inputs, sequence_length=sequence_length,
time_major=True, dtype=tf.float32)
split_outputs_dynamic = tf.unpack(outputs_dynamic, time_steps)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
# Generate gradients of sum of outputs w.r.t. inputs
dynamic_gradients = tf.gradients(
split_outputs_dynamic + [state_dynamic], [concat_inputs])
# Generate gradients of several individual outputs w.r.t. inputs
dynamic_individual_gradients = nest.flatten([
tf.gradients(y, [concat_inputs])
for y in [split_outputs_dynamic[0],
split_outputs_dynamic[-1],
state_dynamic]])
# Generate gradients of individual variables w.r.t. inputs
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
assert len(trainable_variables) > 1, (
"Count of trainable variables: %d" % len(trainable_variables))
dynamic_individual_variable_gradients = nest.flatten([
tf.gradients(y, trainable_variables)
for y in [split_outputs_dynamic[0],
split_outputs_dynamic[-1],
state_dynamic]])
# Test forward pass
values_dynamic = sess.run(split_outputs_dynamic, feed_dict=feeds)
(state_value_dynamic,) = sess.run(
(state_dynamic,), feed_dict=feeds)
# Test gradients to inputs and variables w.r.t. outputs & final state
dynamic_grad_values = sess.run(dynamic_gradients, feed_dict=feeds)
dynamic_individual_grad_values = sess.run(
dynamic_individual_gradients, feed_dict=feeds)
dynamic_individual_var_grad_values = sess.run(
dynamic_individual_variable_gradients, feed_dict=feeds)
######### Step 3: Comparisons
self.assertEqual(len(values_static), len(values_dynamic))
for (value_static, value_dynamic) in zip(values_static, values_dynamic):
self.assertAllEqual(value_static, value_dynamic)
self.assertAllEqual(state_value_static, state_value_dynamic)
self.assertAllEqual(static_grad_values, dynamic_grad_values)
self.assertEqual(len(static_individual_grad_values),
len(dynamic_individual_grad_values))
self.assertEqual(len(static_individual_var_grad_values),
len(dynamic_individual_var_grad_values))
for i, (a, b) in enumerate(zip(static_individual_grad_values,
dynamic_individual_grad_values)):
tf.logging.info("Comparing individual gradients iteration %d" % i)
self.assertAllEqual(a, b)
for i, (a, b) in enumerate(zip(static_individual_var_grad_values,
dynamic_individual_var_grad_values)):
tf.logging.info(
"Comparing individual variable gradients iteration %d" % i)
self.assertAllEqual(a, b)
def testNoProjNoShardingSimpleStateSaver(self):
self._testNoProjNoShardingSimpleStateSaver(use_gpu=False)
self._testNoProjNoShardingSimpleStateSaver(use_gpu=True)
def testNoProjNoSharding(self):
self._testNoProjNoSharding(use_gpu=False)
self._testNoProjNoSharding(use_gpu=True)
def testCellClipping(self):
self._testCellClipping(use_gpu=False)
self._testCellClipping(use_gpu=True)
def testProjNoSharding(self):
self._testProjNoSharding(use_gpu=False)
self._testProjNoSharding(use_gpu=True)
def testProjSharding(self):
self._testProjSharding(use_gpu=False)
self._testProjSharding(use_gpu=True)
def testTooManyShards(self):
self._testTooManyShards(use_gpu=False)
self._testTooManyShards(use_gpu=True)
def testShardNoShardEquivalentOutput(self):
self._testShardNoShardEquivalentOutput(use_gpu=False)
self._testShardNoShardEquivalentOutput(use_gpu=True)
def testDoubleInput(self):
self._testDoubleInput(use_gpu=False)
self._testDoubleInput(use_gpu=True)
def testDoubleInputWithDropoutAndDynamicCalculation(self):
self._testDoubleInputWithDropoutAndDynamicCalculation(use_gpu=False)
self._testDoubleInputWithDropoutAndDynamicCalculation(use_gpu=True)
def testDynamicEquivalentToStaticRNN(self):
self._testDynamicEquivalentToStaticRNN(
use_gpu=False, use_sequence_length=False)
self._testDynamicEquivalentToStaticRNN(
use_gpu=True, use_sequence_length=False)
self._testDynamicEquivalentToStaticRNN(
use_gpu=False, use_sequence_length=True)
self._testDynamicEquivalentToStaticRNN(
use_gpu=True, use_sequence_length=True)
class BidirectionalRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _createBidirectionalRNN(self,
use_gpu,
use_shape,
use_sequence_length,
scope=None):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64) if use_sequence_length else None
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units,
input_size,
initializer=initializer,
state_is_tuple=False)
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units,
input_size,
initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(
tf.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
outputs, state_fw, state_bw = tf.nn.bidirectional_rnn(
cell_fw,
cell_bw,
inputs,
dtype=tf.float32,
sequence_length=sequence_length,
scope=scope)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(
out.get_shape().as_list(),
[batch_size if use_shape else None, 2 * num_units])
input_value = np.random.randn(batch_size, input_size)
outputs = tf.pack(outputs)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testBidirectionalRNN(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createBidirectionalRNN(use_gpu, use_shape, True))
tf.initialize_all_variables().run()
# Run with pre-specified sequence length of 2, 3
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# First sequence in batch is length=2
# Check that the time=0 forward output is equal to time=1 backward output
self.assertEqual(out[0][0][0], out[1][0][3])
self.assertEqual(out[0][0][1], out[1][0][4])
self.assertEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is equal to time=0 backward output
self.assertEqual(out[1][0][0], out[0][0][3])
self.assertEqual(out[1][0][1], out[0][0][4])
self.assertEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is equal to time=2 backward output
self.assertEqual(out[0][1][0], out[2][1][3])
self.assertEqual(out[0][1][1], out[2][1][4])
self.assertEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is equal to time=1 backward output
self.assertEqual(out[1][1][0], out[1][1][3])
self.assertEqual(out[1][1][1], out[1][1][4])
self.assertEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is equal to time=0 backward output
self.assertEqual(out[2][1][0], out[0][1][3])
self.assertEqual(out[2][1][1], out[0][1][4])
self.assertEqual(out[2][1][2], out[0][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def _testBidirectionalRNNWithoutSequenceLength(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, _ = (
self._createBidirectionalRNN(use_gpu, use_shape, False))
tf.initialize_all_variables().run()
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# Both sequences in batch are length=8. Check that the time=i
# forward output is equal to time=8-1-i backward output
for i in xrange(8):
self.assertEqual(out[i][0][0], out[8 - 1 - i][0][3])
self.assertEqual(out[i][0][1], out[8 - 1 - i][0][4])
self.assertEqual(out[i][0][2], out[8 - 1 - i][0][5])
for i in xrange(8):
self.assertEqual(out[i][1][0], out[8 - 1 - i][1][3])
self.assertEqual(out[i][1][1], out[8 - 1 - i][1][4])
self.assertEqual(out[i][1][2], out[8 - 1 - i][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def testBidirectionalRNN(self):
self._testBidirectionalRNN(use_gpu=False, use_shape=False)
self._testBidirectionalRNN(use_gpu=True, use_shape=False)
self._testBidirectionalRNN(use_gpu=False, use_shape=True)
self._testBidirectionalRNN(use_gpu=True, use_shape=True)
def testBidirectionalRNNWithoutSequenceLength(self):
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=False,
use_shape=False)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=True,
use_shape=False)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=False,
use_shape=True)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=True,
use_shape=True)
def _createBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple, use_time_major,
scope=None):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64)
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units,
initializer=initializer,
state_is_tuple=use_state_tuple)
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units,
initializer=initializer,
state_is_tuple=use_state_tuple)
inputs = max_length * [
tf.placeholder(tf.float32,
shape=(batch_size if use_shape else None, input_size))]
inputs_c = tf.pack(inputs)
if not use_time_major:
inputs_c = tf.transpose(inputs_c, [1, 0, 2])
outputs, states = tf.nn.bidirectional_dynamic_rnn(
cell_fw,
cell_bw,
inputs_c,
sequence_length,
dtype=tf.float32,
time_major=use_time_major,
scope=scope)
outputs = tf.concat(2, outputs)
state_fw, state_bw = states
outputs_shape = [None, max_length, 2 * num_units]
if use_shape:
outputs_shape[0] = batch_size
if use_time_major:
outputs_shape[0], outputs_shape[1] = outputs_shape[1], outputs_shape[0]
self.assertEqual(
outputs.get_shape().as_list(),
outputs_shape)
input_value = np.random.randn(batch_size, input_size)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple, use_time_major):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createBidirectionalDynamicRNN(
use_gpu, use_shape, use_state_tuple, use_time_major))
tf.initialize_all_variables().run()
# Run with pre-specified sequence length of 2, 3
if use_state_tuple:
out, c_fw, m_fw, c_bw, m_bw = sess.run(
[outputs, state_fw[0], state_fw[1], state_bw[0], state_bw[1]],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
s_fw = (c_fw, m_fw)
s_bw = (c_bw, m_bw)
else:
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# First sequence in batch is length=2
# Check that the time=0 forward output is equal to time=1 backward output
if not use_time_major:
out = np.swapaxes(out, 0, 1)
self.assertEqual(out[0][0][0], out[1][0][3])
self.assertEqual(out[0][0][1], out[1][0][4])
self.assertEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is equal to time=0 backward output
self.assertEqual(out[1][0][0], out[0][0][3])
self.assertEqual(out[1][0][1], out[0][0][4])
self.assertEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is equal to time=2 backward output
self.assertEqual(out[0][1][0], out[2][1][3])
self.assertEqual(out[0][1][1], out[2][1][4])
self.assertEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is equal to time=1 backward output
self.assertEqual(out[1][1][0], out[1][1][3])
self.assertEqual(out[1][1][1], out[1][1][4])
self.assertEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is equal to time=0 backward output
self.assertEqual(out[2][1][0], out[0][1][3])
self.assertEqual(out[2][1][1], out[0][1][4])
self.assertEqual(out[2][1][2], out[0][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def testBidirectionalDynamicRNN(self):
# Generate 2^4 option values
# from [True, True, True, True] to [False, False, False, False]
options = itertools.product([True, False], repeat=4)
for option in options:
self._testBidirectionalDynamicRNN(use_gpu=option[0], use_shape=option[1],
use_state_tuple=option[2],
use_time_major=option[3])
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
# REMARKS: factory(scope) is a function accepting a scope
# as an argument, such scope can be None, a string
# or a VariableScope instance.
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts
# with the proper scope.
tf.initialize_all_variables()
all_vars = tf.all_variables()
prefix = prefix or "BiRNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("BiRNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testBidirectionalRNNScope(self):
def factory(scope):
return self._createBidirectionalRNN(
use_gpu=True, use_shape=True,
use_sequence_length=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
def testBidirectionalDynamicRNNScope(self):
def get_factory(use_time_major):
def factory(scope):
return self._createBidirectionalDynamicRNN(
use_gpu=True, use_shape=True, use_state_tuple=True,
use_time_major=use_time_major, scope=scope)
return factory
self._testScope(get_factory(True), use_outer_scope=True)
self._testScope(get_factory(True), use_outer_scope=False)
self._testScope(get_factory(True), prefix=None, use_outer_scope=False)
self._testScope(get_factory(False), use_outer_scope=True)
self._testScope(get_factory(False), use_outer_scope=False)
self._testScope(get_factory(False), prefix=None, use_outer_scope=False)
class MultiDimensionalLSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testMultiDimensionalLSTMAllRNNContainers(self):
feature_dims = (3, 4, 5)
input_size = feature_dims
batch_size = 2
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None,) + input_size)]
inputs_using_dim = max_length * [
tf.placeholder(tf.float32, shape=(batch_size,) + input_size)]
inputs_c = tf.pack(inputs)
# Create a cell for the whole test. This is fine because the cell has no
# variables.
cell = DummyMultiDimensionalLSTM(feature_dims)
state_saver = TestStateSaver(batch_size, input_size)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
outputs_bid, state_bid_fw, state_bid_bw = tf.nn.bidirectional_rnn(
cell, cell, inputs_using_dim, dtype=tf.float32,
sequence_length=sequence_length)
outputs_sav, state_sav = tf.nn.state_saving_rnn(
cell, inputs_using_dim, sequence_length=sequence_length,
state_saver=state_saver, state_name=("h", "c"))
for out, inp in zip(outputs_static, inputs):
self.assertEqual(out.get_shape().as_list(), inp.get_shape().as_list())
self.assertEqual(outputs_dynamic.get_shape().as_list(),
inputs_c.get_shape().as_list())
for out, inp in zip(outputs_bid, inputs_using_dim):
input_shape_list = inp.get_shape().as_list()
# fwd and bwd activations are concatenated along the second dim.
input_shape_list[1] *= 2
self.assertEqual(out.get_shape().as_list(), input_shape_list)
tf.initialize_all_variables().run()
input_total_size = (batch_size,) + input_size
input_value = np.random.randn(*input_total_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
outputs_bid_v = sess.run(
outputs_bid, feed_dict={inputs_using_dim[0]: input_value})
outputs_sav_v = sess.run(
outputs_sav, feed_dict={inputs_using_dim[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
self.assertAllEqual(outputs_static_v, outputs_sav_v)
outputs_static_array = np.array(outputs_static_v)
outputs_static_array_double = np.concatenate(
(outputs_static_array, outputs_static_array), axis=2)
outputs_bid_array = np.array(outputs_bid_v)
self.assertAllEqual(outputs_static_array_double, outputs_bid_array)
state_static_v = sess.run(
state_static, feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
state_dynamic, feed_dict={inputs[0]: input_value})
state_bid_fw_v = sess.run(
state_bid_fw, feed_dict={inputs_using_dim[0]: input_value})
state_bid_bw_v = sess.run(
state_bid_bw, feed_dict={inputs_using_dim[0]: input_value})
state_sav_v = sess.run(
state_sav, feed_dict={inputs_using_dim[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_sav_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_fw_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_bw_v))
class NestedLSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testNestedIOLSTMAllRNNContainers(self):
input_size = 5
batch_size = 2
state_size = 6
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
state_saver = TestStateSaver(batch_size, state_size)
single_input = (tf.placeholder(tf.float32, shape=(None, input_size)),
tf.placeholder(tf.float32, shape=(None, input_size)))
inputs = max_length * [single_input]
inputs_c = (tf.pack([input_[0] for input_ in inputs]),
tf.pack([input_[1] for input_ in inputs]))
single_input_using_dim = (
tf.placeholder(tf.float32, shape=(batch_size, input_size)),
tf.placeholder(tf.float32, shape=(batch_size, input_size)))
inputs_using_dim = max_length * [single_input_using_dim]
# Create a cell for the whole test. This is fine because the cell has no
# variables.
cell = NestedRNNCell()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
outputs_bid, state_bid_fw, state_bid_bw = tf.nn.bidirectional_rnn(
cell, cell, inputs_using_dim, dtype=tf.float32,
sequence_length=sequence_length)
outputs_sav, state_sav = tf.nn.state_saving_rnn(
cell, inputs_using_dim, sequence_length=sequence_length,
state_saver=state_saver, state_name=("h", "c"))
def _assert_same_shape(input1, input2, double=False):
flat_input1 = nest.flatten(input1)
flat_input2 = nest.flatten(input2)
for inp1, inp2 in zip(flat_input1, flat_input2):
input_shape = inp1.get_shape().as_list()
if double:
input_shape[1] *= 2
self.assertEqual(input_shape, inp2.get_shape().as_list())
_assert_same_shape(inputs_c, outputs_dynamic)
_assert_same_shape(inputs, outputs_static)
_assert_same_shape(inputs_using_dim, outputs_sav)
_assert_same_shape(inputs_using_dim, outputs_bid, double=True)
tf.initialize_all_variables().run()
input_total_size = (batch_size, input_size)
input_value = (np.random.randn(*input_total_size),
np.random.randn(*input_total_size))
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={single_input: input_value})
outputs_static_v = sess.run(
outputs_static, feed_dict={single_input: input_value})
outputs_sav_v = sess.run(
outputs_sav, feed_dict={single_input_using_dim: input_value})
outputs_bid_v = sess.run(
outputs_bid, feed_dict={single_input_using_dim: input_value})
self.assertAllEqual(outputs_static_v,
np.transpose(outputs_dynamic_v, (1, 0, 2, 3)))
self.assertAllEqual(outputs_static_v, outputs_sav_v)
outputs_static_array = np.array(outputs_static_v)
outputs_static_array_double = np.concatenate(
(outputs_static_array, outputs_static_array), axis=3)
outputs_bid_array = np.array(outputs_bid_v)
self.assertAllEqual(outputs_static_array_double, outputs_bid_array)
state_dynamic_v = sess.run(
state_dynamic, feed_dict={single_input: input_value})
state_static_v = sess.run(
state_static, feed_dict={single_input: input_value})
state_bid_fw_v = sess.run(
state_bid_fw, feed_dict={single_input_using_dim: input_value})
state_bid_bw_v = sess.run(
state_bid_bw, feed_dict={single_input_using_dim: input_value})
state_sav_v = sess.run(
state_sav, feed_dict={single_input_using_dim: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_sav_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_fw_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_bw_v))
class RawRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testRawRNN(self, max_time):
with self.test_session(graph=tf.Graph()) as sess:
batch_size = 16
input_depth = 4
num_units = 3
inputs = tf.placeholder(shape=(max_time, batch_size, input_depth),
dtype=tf.float32)
sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, unused_loop_state):
emit_output = cell_output # == None for time == 0
if cell_output is None: # time == 0
next_state = cell.zero_state(batch_size, tf.float32)
else:
next_state = cell_state # copy state through
elements_finished = (time_ >= sequence_length)
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
outputs_ta, final_state, _ = tf.nn.raw_rnn(cell, loop_fn)
outputs = outputs_ta.pack()
tf.get_variable_scope().reuse_variables()
outputs_dynamic_rnn, final_state_dynamic_rnn = tf.nn.dynamic_rnn(
cell, inputs, time_major=True, dtype=tf.float32,
sequence_length=sequence_length)
variables = tf.trainable_variables()
gradients = tf.gradients([outputs, final_state], [inputs] + variables)
gradients_dynamic_rnn = tf.gradients(
[outputs_dynamic_rnn, final_state_dynamic_rnn], [inputs] + variables)
tf.initialize_all_variables().run()
rand_input = np.random.randn(max_time, batch_size, input_depth)
if max_time == 0:
rand_seq_len = np.zeros(batch_size)
else:
rand_seq_len = np.random.randint(max_time, size=batch_size)
# To ensure same output lengths for dynamic_rnn and raw_rnn
rand_seq_len[0] = max_time
(outputs_val, outputs_dynamic_rnn_val,
final_state_val, final_state_dynamic_rnn_val) = sess.run(
[outputs, outputs_dynamic_rnn, final_state, final_state_dynamic_rnn],
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
self.assertAllClose(outputs_dynamic_rnn_val, outputs_val)
self.assertAllClose(final_state_dynamic_rnn_val, final_state_val)
# NOTE: Because with 0 time steps, raw_rnn does not have shape
# information about the input, it is impossible to perform
# gradients comparisons as the gradients eval will fail. So
# this case skips the gradients test.
if max_time > 0:
self.assertEqual(len(gradients), len(gradients_dynamic_rnn))
gradients_val = sess.run(
gradients,
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
gradients_dynamic_rnn_val = sess.run(
gradients_dynamic_rnn,
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
self.assertEqual(len(gradients_val), len(gradients_dynamic_rnn_val))
input_gradients_val = gradients_val[0]
input_gradients_dynamic_rnn_val = gradients_dynamic_rnn_val[0]
self.assertAllClose(
input_gradients_val, input_gradients_dynamic_rnn_val)
for i in range(1, len(gradients_val)):
self.assertAllClose(gradients_dynamic_rnn_val[i], gradients_val[i])
def testRawRNNZeroLength(self):
# NOTE: Because with 0 time steps, raw_rnn does not have shape
# information about the input, it is impossible to perform
# gradients comparisons as the gradients eval will fail. So this
# case skips the gradients test.
self._testRawRNN(max_time=0)
def testRawRNN(self):
self._testRawRNN(max_time=10)
def testLoopState(self):
with self.test_session(graph=tf.Graph()):
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, loop_state):
if cell_output is None:
loop_state = tf.constant([0])
next_state = cell.zero_state(batch_size, tf.float32)
else:
loop_state = tf.pack([tf.squeeze(loop_state) + 1])
next_state = cell_state
emit_output = cell_output # == None for time == 0
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input,
next_state, emit_output, loop_state)
r = tf.nn.raw_rnn(cell, loop_fn)
loop_state = r[-1]
self.assertEqual([10], loop_state.eval())
def testLoopStateWithTensorArray(self):
with self.test_session(graph=tf.Graph()):
max_time = 4
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, loop_state):
if cell_output is None:
loop_state = tf.TensorArray(
dynamic_size=True, size=0, dtype=tf.int32, clear_after_read=False)
loop_state = loop_state.write(0, 1)
next_state = cell.zero_state(batch_size, tf.float32)
else:
loop_state = loop_state.write(
time_, loop_state.read(time_ - 1) + time_)
next_state = cell_state
emit_output = cell_output # == None for time == 0
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input,
next_state, emit_output, loop_state)
r = tf.nn.raw_rnn(cell, loop_fn)
loop_state = r[-1]
loop_state = loop_state.pack()
self.assertAllEqual([1, 2, 2 + 2, 4 + 3, 7 + 4], loop_state.eval())
def testEmitDifferentStructureThanCellOutput(self):
with self.test_session(graph=tf.Graph()) as sess:
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, _):
if cell_output is None:
emit_output = (tf.zeros([2, 3], dtype=tf.int32),
tf.zeros([1], dtype=tf.int64))
next_state = cell.zero_state(batch_size, tf.float32)
else:
emit_output = (tf.ones([batch_size, 2, 3], dtype=tf.int32),
tf.ones([batch_size, 1], dtype=tf.int64))
next_state = cell_state
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
r = tf.nn.raw_rnn(cell, loop_fn)
output_ta = r[0]
self.assertEqual(2, len(output_ta))
self.assertEqual([tf.int32, tf.int64], [ta.dtype for ta in output_ta])
output = [ta.pack() for ta in output_ta]
output_vals = sess.run(output)
self.assertAllEqual(
np.ones((max_time, batch_size, 2, 3), np.int32), output_vals[0])
self.assertAllEqual(
np.ones((max_time, batch_size, 1), np.int64), output_vals[1])
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testRawRNNScope(self):
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
def factory(scope):
inputs = tf.placeholder(shape=(max_time, batch_size, input_depth),
dtype=tf.float32)
sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, unused_loop_state):
emit_output = cell_output # == None for time == 0
if cell_output is None: # time == 0
next_state = cell.zero_state(batch_size, tf.float32)
else:
next_state = cell_state
elements_finished = (time_ >= sequence_length)
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
return tf.nn.raw_rnn(cell, loop_fn, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class StateSaverRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testStateSaverRNNScope(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
def factory(scope):
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, 2 * num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
return tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver,
state_name="save_lstm", scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
######### Benchmarking RNN code
def _static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + [final_state], trainable_variables)
return tf.group(final_state, *(gradients + outputs))
def _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients([outputs, final_state], trainable_variables)
return tf.group(final_state, outputs, *gradients)
def graph_creation_static_vs_dynamic_rnn_benchmark(max_time):
config = tf.ConfigProto()
config.allow_soft_placement = True
# These parameters don't matter
batch_size = 512
num_units = 512
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
def _create_static_rnn():
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
def _create_dynamic_rnn():
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(
inputs_t, sequence_length)
delta_static = timeit.timeit(_create_static_rnn, number=5)
delta_dynamic = timeit.timeit(_create_dynamic_rnn, number=5)
print("%d \t %f \t %f \t %f" %
(max_time, delta_static, delta_dynamic, delta_dynamic/delta_static))
return delta_static, delta_dynamic
def _timer(sess, ops):
# Warm in
for _ in range(2):
sess.run(ops)
# Timing run
runs = 20
start = time.time()
for _ in range(runs):
sess.run(ops)
end = time.time()
return (end - start)/float(runs)
def static_vs_dynamic_rnn_benchmark(batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# Using rnn()
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
tf.initialize_all_variables().run()
delta_static = _timer(sess, ops)
# Using dynamic_rnn()
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(
inputs_t, sequence_length)
tf.initialize_all_variables().run()
delta_dynamic = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f \t %f" %
(batch_size, max_time, num_units, use_gpu, delta_static,
delta_dynamic, delta_dynamic/delta_static))
return delta_static, delta_dynamic
def _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + [final_state], trainable_variables)
return tf.group(final_state, *(gradients + outputs))
def half_seq_len_vs_unroll_half_rnn_benchmark(
batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
# Halve the sequence length, full static unroll
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t, sequence_length / 2)
tf.initialize_all_variables().run()
delta_half_seq_len = _timer(sess, ops)
# Halve the unroll size, don't use sequence length
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t[:(max_time // 2)], sequence_length / 2)
tf.initialize_all_variables().run()
delta_unroll_half = _timer(sess, ops)
print("%d \t %d \t\t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_half_seq_len,
delta_unroll_half, delta_half_seq_len/delta_unroll_half))
return delta_half_seq_len, delta_unroll_half
def _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True,
initializer=initializer, state_is_tuple=state_is_tuple)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
final_state = list(final_state) if state_is_tuple else [final_state]
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + final_state, trainable_variables)
return tf.group(*(final_state + gradients + outputs))
def concat_state_vs_tuple_state_rnn_benchmark(
batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
# Run with concatenated states (default)
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=False)
tf.initialize_all_variables().run()
delta_concat_state = _timer(sess, ops)
# Run with tuple states (new)
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=True)
tf.initialize_all_variables().run()
delta_tuple_state = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_concat_state,
delta_tuple_state, delta_concat_state/delta_tuple_state))
return delta_concat_state, delta_tuple_state
def _dynamic_rnn_swap_memory_benchmark(inputs_t, sequence_length,
swap_memory):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length,
swap_memory=swap_memory, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients([outputs, final_state], trainable_variables)
return tf.group(final_state, outputs, *gradients)
def dynamic_rnn_swap_memory_benchmark(batch_size, max_time, num_units):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# No memory swap
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=False)
tf.initialize_all_variables().run()
no_swap = _timer(sess, ops)
# Memory swap
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=True)
tf.initialize_all_variables().run()
swap = _timer(sess, ops)
print("%d \t %d \t %d \t %f \t %f \t %f" %
(batch_size, max_time, num_units, no_swap, swap, swap/no_swap))
return no_swap, swap
def rnn_long_sequence_benchmark(batch_size, seqlen, num_units,
dynamic, swap_memory):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = [seqlen for _ in range(batch_size)]
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(seqlen)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
for _ in range(5):
if dynamic:
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=swap_memory)
tf.initialize_all_variables().run()
elapsed = _timer(sess, ops)
else:
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
tf.initialize_all_variables().run()
elapsed = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f" %
(batch_size, seqlen, num_units, dynamic, elapsed,
elapsed/seqlen))
class BenchmarkRNN(tf.test.Benchmark):
def benchmarkGraphCreationStaticVsDynamicLSTM(self):
print("Graph Creation: Static Unroll vs. Dynamic Unroll LSTM")
print("max_t \t dt(static) \t dt(dynamic) \t dt(dynamic)/dt(static)")
for max_time in (1, 25, 50):
s_dt, d_dt = graph_creation_static_vs_dynamic_rnn_benchmark(max_time)
self.report_benchmark(name="graph_creation_time_static_T%02d" % max_time,
iters=5, wall_time=s_dt)
self.report_benchmark(name="graph_creation_time_dynamic_T%02d" % max_time,
iters=5, wall_time=d_dt)
def benchmarkStaticUnrollVsDynamicFlowLSTM(self):
print("Calculation: Static Unroll with Dynamic Flow LSTM "
"vs. Dynamic Unroll LSTM")
print("batch \t max_t \t units \t gpu \t dt(static) \t dt(dynamic) "
"\t dt(dynamic)/dt(static)")
for batch_size in (256,):
for max_time in (50,):
for num_units in (512, 256, 128):
for use_gpu in (False, True):
s_dt, d_dt = static_vs_dynamic_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="static_unroll_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=s_dt)
self.report_benchmark(
name="dynamic_unroll_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=d_dt)
def benchmarkDynamicLSTMNoMemorySwapVsMemorySwap(self):
print("Calculation: Dynamic LSTM No Memory Swap vs. Memory Swap")
print("batch \t max_t \t units \t no_swap \t swap \t swap/no_swap")
for batch_size in (256, 512):
for max_time in (100,):
for num_units in (512, 256, 128):
no_swap, swap = dynamic_rnn_swap_memory_benchmark(
batch_size, max_time, num_units)
self.report_benchmark(
name="dynamic_lstm_no_memory_swap_T%02d_B%03d_N%03d"
% (max_time, batch_size, num_units),
iters=20, wall_time=no_swap)
self.report_benchmark(
name="dynamic_lstm_with_memory_swap_T%02d_B%03d_N%03d"
% (max_time, batch_size, num_units),
iters=20, wall_time=swap)
def benchmarkStaticUnrollHalfSequenceLengthVsHalfUnroll(self):
print("Calculation: Static Unroll with Halved Sequence Length "
"vs. Half Static Unroll")
print("batch \t full_t \t units \t gpu \t dt(half_seq_len) "
"\t dt(unroll_half) \t dt(half_seq_len)/dt(unroll_half)")
for batch_size in (128,):
for max_time in (50,):
for num_units in (256,):
for use_gpu in (False, True):
s_dt, d_dt = half_seq_len_vs_unroll_half_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="half_seq_len_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=s_dt)
self.report_benchmark(
name="unroll_half_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=d_dt)
def benchmarkStaticUnrollStateConcatVsStateTuple(self):
print("Calculation: Static Unroll with Concatenated State "
"vs. Tuple State")
print("batch \t time \t units \t gpu \t dt(concat_state) "
"\t dt(tuple_state) \t dt(concat_state)/dt(tuple_state)")
for batch_size in (16, 128,):
for max_time in (50,):
for num_units in (16, 128,):
for use_gpu in (False, True):
c_dt, t_dt = concat_state_vs_tuple_state_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="concat_state_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=c_dt)
self.report_benchmark(
name="tuple_state_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=t_dt)
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | |
<|file_name|>census.rs<|end_file_name|><|fim▁begin|>use crate::error::Error;
use habitat_butterfly::{member::{Health,
Member,
MemberList,
Membership},
rumor::{election::{Election as ElectionRumor,
ElectionStatus as ElectionStatusRumor,
ElectionUpdate as ElectionUpdateRumor},
service::{Service as ServiceRumor,
SysInfo},
service_config::ServiceConfig as ServiceConfigRumor,
service_file::ServiceFile as ServiceFileRumor,
ConstIdRumor as _,
RumorStore}};
use habitat_common::outputln;
use habitat_core::{self,
package::PackageIdent,
service::ServiceGroup};
use serde::{ser::SerializeStruct,
Serialize,
Serializer};
use std::{borrow::Cow,
collections::{BTreeMap,
HashMap,
HashSet},
fmt,
path::Path,
result,
str::FromStr};
static LOGKEY: &str = "CE";
pub type MemberId = String;
#[derive(Debug, Serialize)]
pub struct CensusRing {
changed: bool,
census_groups: HashMap<ServiceGroup, CensusGroup>,
local_member_id: MemberId,
last_service_counter: usize,
last_election_counter: usize,
last_election_update_counter: usize,
last_membership_counter: usize,
last_service_config_counter: usize,
last_service_file_counter: usize,
}
impl CensusRing {
/// Indicates whether the census has changed since the last time
/// we looked at rumors.
pub fn changed(&self) -> bool { self.changed }
pub fn new<I>(local_member_id: I) -> Self
where I: Into<MemberId>
{
CensusRing { changed: false,
census_groups: HashMap::new(),
local_member_id: local_member_id.into(),
last_service_counter: 0,
last_election_counter: 0,
last_election_update_counter: 0,
last_membership_counter: 0,
last_service_config_counter: 0,
last_service_file_counter: 0, }
}
/// # Locking (see locking.md)
/// * `RumorStore::list` (write)
/// * `MemberList::entries` (read)
#[allow(clippy::too_many_arguments)]
pub fn update_from_rumors_rsr_mlr(&mut self,
cache_key_path: &Path,
service_rumors: &RumorStore<ServiceRumor>,
election_rumors: &RumorStore<ElectionRumor>,
election_update_rumors: &RumorStore<ElectionUpdateRumor>,
member_list: &MemberList,
service_config_rumors: &RumorStore<ServiceConfigRumor>,
service_file_rumors: &RumorStore<ServiceFileRumor>) {
// If ANY new rumor, of any type, has been received,
// reconstruct the entire census state to ensure consistency
if (service_rumors.get_update_counter() > self.last_service_counter)
|| (member_list.get_update_counter() > self.last_membership_counter)
|| (election_rumors.get_update_counter() > self.last_election_counter)
|| (election_update_rumors.get_update_counter() > self.last_election_update_counter)
|| (service_config_rumors.get_update_counter() > self.last_service_config_counter)
|| (service_file_rumors.get_update_counter() > self.last_service_file_counter)
{
self.changed = true;
self.populate_census_rsr_mlr(service_rumors, member_list);
self.update_from_election_store_rsr(election_rumors);
self.update_from_election_update_store_rsr(election_update_rumors);
self.update_from_service_config_rsr(cache_key_path, service_config_rumors);
self.update_from_service_files_rsr(cache_key_path, service_file_rumors);
// Update our counters to reflect current state.
self.last_membership_counter = member_list.get_update_counter();
self.last_service_counter = service_rumors.get_update_counter();
self.last_election_counter = election_rumors.get_update_counter();
self.last_election_update_counter = election_update_rumors.get_update_counter();
self.last_service_config_counter = service_config_rumors.get_update_counter();
self.last_service_file_counter = service_file_rumors.get_update_counter();
} else {
self.changed = false;
}
}
pub fn census_group_for(&self, sg: &ServiceGroup) -> Option<&CensusGroup> {
self.census_groups.get(sg)
}
pub fn groups(&self) -> Vec<&CensusGroup> { self.census_groups.values().map(|cg| cg).collect() }
/// Populates the census from `ServiceRumor`s and Butterfly-level
/// membership lists.
///
/// (Butterfly provides the health, the ServiceRumors provide the
/// rest).
///
/// # Locking (see locking.md)
/// * `RumorStore::list` (read)
/// * `MemberList::entries` (read)
fn populate_census_rsr_mlr(&mut self,
service_rumors: &RumorStore<ServiceRumor>,
member_list: &MemberList) {
// Populate our census; new groups are created here, as are
// new members of those groups.
//
// NOTE: In the current implementation, these members have an
// indeterminate health status until we process the contents
// of `member_list`. In the future, it would be nice to
// incorporate the member list into
// `census_group.update_from_service_rumors`, where new census
// members are created, so there would be no time that there
// is an indeterminate health anywhere.
for (service_group, rumors) in service_rumors.lock_rsr().iter() {
if let Ok(sg) = service_group_from_str(service_group) {
let local_member_id = Cow::from(&self.local_member_id);
let census_group = self.census_groups
.entry(sg.clone())
.or_insert_with(|| CensusGroup::new(sg, &local_member_id));
census_group.update_from_service_rumors(rumors);
}
}
member_list.with_memberships_mlr(|Membership { member, health }| {
for group in self.census_groups.values_mut() {
if let Some(census_member) = group.find_member_mut(&member.id) {
census_member.update_from_member(&member);
census_member.update_from_health(health);
}
}
Ok(())
})
.ok();
}
/// # Locking (see locking.md)
/// * `RumorStore::list` (read)
fn update_from_election_store_rsr(&mut self, election_rumors: &RumorStore<ElectionRumor>) {
for (service_group, rumors) in election_rumors.lock_rsr().iter() {
let election = rumors.get(ElectionRumor::const_id()).unwrap();
if let Ok(sg) = service_group_from_str(service_group) {
if let Some(census_group) = self.census_groups.get_mut(&sg) {
census_group.update_from_election_rumor(election);
}<|fim▁hole|> }
}
/// # Locking (see locking.md)
/// * `RumorStore::list` (read)
fn update_from_election_update_store_rsr(&mut self,
election_update_rumors: &RumorStore<ElectionUpdateRumor>)
{
for (service_group, rumors) in election_update_rumors.lock_rsr().iter() {
if let Ok(sg) = service_group_from_str(service_group) {
if let Some(census_group) = self.census_groups.get_mut(&sg) {
let election = rumors.get(ElectionUpdateRumor::const_id()).unwrap();
census_group.update_from_election_update_rumor(election);
}
}
}
}
/// # Locking (see locking.md)
/// * `RumorStore::list` (read)
fn update_from_service_config_rsr(&mut self,
cache_key_path: &Path,
service_config_rumors: &RumorStore<ServiceConfigRumor>) {
for (service_group, rumors) in service_config_rumors.lock_rsr().iter() {
if let Ok(sg) = service_group_from_str(service_group) {
if let Some(service_config) = rumors.get(ServiceConfigRumor::const_id()) {
if let Some(census_group) = self.census_groups.get_mut(&sg) {
census_group.update_from_service_config_rumor(cache_key_path,
service_config);
}
}
}
}
}
/// # Locking (see locking.md)
/// * `RumorStore::list` (read)
fn update_from_service_files_rsr(&mut self,
cache_key_path: &Path,
service_file_rumors: &RumorStore<ServiceFileRumor>) {
for (service_group, rumors) in service_file_rumors.lock_rsr().iter() {
if let Ok(sg) = service_group_from_str(service_group) {
let local_member_id = Cow::from(&self.local_member_id);
let census_group = self.census_groups
.entry(sg.clone())
.or_insert_with(|| CensusGroup::new(sg, &local_member_id));
census_group.update_from_service_file_rumors(cache_key_path, rumors);
}
}
}
}
/// This is a proxy struct to represent what information we're writing to the dat file, and
/// therefore what information gets sent out via the HTTP API. Right now, we're just wrapping the
/// actual CensusRing struct, but this will give us something we can refactor against without
/// worrying about breaking the data returned to users.
pub struct CensusRingProxy<'a>(&'a CensusRing);
impl<'a> CensusRingProxy<'a> {
pub fn new(c: &'a CensusRing) -> Self { CensusRingProxy(&c) }
}
impl<'a> Serialize for CensusRingProxy<'a> {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let mut strukt = serializer.serialize_struct("census_ring", 9)?;
strukt.serialize_field("changed", &self.0.changed)?;
strukt.serialize_field("census_groups", &self.0.census_groups)?;
strukt.serialize_field("local_member_id", &self.0.local_member_id)?;
strukt.serialize_field("last_service_counter", &self.0.last_service_counter)?;
strukt.serialize_field("last_election_counter", &self.0.last_election_counter)?;
strukt.serialize_field("last_election_update_counter",
&self.0.last_election_update_counter)?;
strukt.serialize_field("last_membership_counter", &self.0.last_membership_counter)?;
strukt.serialize_field("last_service_config_counter",
&self.0.last_service_config_counter)?;
strukt.serialize_field("last_service_file_counter",
&self.0.last_service_file_counter)?;
strukt.end()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
pub enum ElectionStatus {
None,
ElectionInProgress,
ElectionNoQuorum,
ElectionFinished,
}
impl Default for ElectionStatus {
fn default() -> ElectionStatus { ElectionStatus::None }
}
impl fmt::Display for ElectionStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match *self {
ElectionStatus::ElectionInProgress => "in-progress",
ElectionStatus::ElectionNoQuorum => "no-quorum",
ElectionStatus::ElectionFinished => "finished",
ElectionStatus::None => "none",
};
write!(f, "{}", value)
}
}
impl FromStr for ElectionStatus {
type Err = Error;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value.to_lowercase().as_ref() {
"in-progress" => Ok(ElectionStatus::ElectionInProgress),
"no-quorum" => Ok(ElectionStatus::ElectionNoQuorum),
"finished" => Ok(ElectionStatus::ElectionFinished),
"none" => Ok(ElectionStatus::None),
_ => Err(Error::BadElectionStatus(value.to_string())),
}
}
}
impl From<ElectionStatusRumor> for ElectionStatus {
fn from(val: ElectionStatusRumor) -> ElectionStatus {
match val {
ElectionStatusRumor::Running => ElectionStatus::ElectionInProgress,
ElectionStatusRumor::NoQuorum => ElectionStatus::ElectionNoQuorum,
ElectionStatusRumor::Finished => ElectionStatus::ElectionFinished,
}
}
}
#[derive(Debug, Default, Serialize)]
pub struct ServiceFile {
pub filename: String,
pub incarnation: u64,
pub body: Vec<u8>,
}
#[derive(Debug, Serialize)]
pub struct ServiceConfig {
pub incarnation: u64,
pub value: toml::value::Table,
}
#[derive(Debug)]
pub struct CensusGroup {
pub service_group: ServiceGroup,
pub election_status: ElectionStatus,
pub update_election_status: ElectionStatus,
pub leader_id: Option<MemberId>,
pub service_config: Option<ServiceConfig>,
local_member_id: MemberId,
population: BTreeMap<MemberId, CensusMember>,
update_leader_id: Option<MemberId>,
changed_service_files: Vec<String>,
service_files: HashMap<String, ServiceFile>,
}
impl CensusGroup {
fn new(sg: ServiceGroup, local_member_id: &str) -> Self {
CensusGroup { service_group: sg,
election_status: ElectionStatus::None,
update_election_status: ElectionStatus::None,
local_member_id: local_member_id.to_string(),
population: BTreeMap::new(),
leader_id: None,
update_leader_id: None,
service_config: None,
service_files: HashMap::new(),
changed_service_files: Vec::new(), }
}
/// Returns the census member in the census ring for the running Supervisor.
pub fn me(&self) -> Option<&CensusMember> { self.population.get(&self.local_member_id) }
pub fn leader(&self) -> Option<&CensusMember> {
match self.leader_id {
Some(ref id) => self.population.get(id),
None => None,
}
}
pub fn update_leader(&self) -> Option<&CensusMember> {
match self.update_leader_id {
Some(ref id) => self.population.get(id),
None => None,
}
}
/// Returns a list of all members in the census ring.
pub fn members(&self) -> impl Iterator<Item = &CensusMember> { self.population.values() }
/// Same as `members`, but only returns members that are either
/// alive or suspect, i.e., nothing that is confirmed dead or
/// departed. These are the members that we'll reasonably be
/// interacting with at runtime.
pub fn active_members(&self) -> impl Iterator<Item = &CensusMember> {
self.population
.values()
.filter(|cm| cm.alive() || cm.suspect())
}
pub fn changed_service_files(&self) -> Vec<&ServiceFile> {
self.changed_service_files
.iter()
.map(|f| &self.service_files[f])
.collect()
}
/// Return previous alive peer, the peer to your left in the ordered members list, or None if
/// you have no alive peers.
// XXX: Is me ever None or not Alive?
// XXX: Should we include Suspect members too, or only strictly Alive ones?
pub fn previous_peer(&self) -> Option<&CensusMember> {
self.me()
.and_then(|me| Self::previous_peer_impl(self.population.values(), me))
}
fn previous_peer_impl<'a>(members: impl Iterator<Item = &'a CensusMember>,
me: &CensusMember)
-> Option<&'a CensusMember> {
let mut alive_members = members.filter(|cm| cm.alive());
let mut previous = None;
for member in alive_members.by_ref() {
if member.member_id == me.member_id {
return previous.or_else(|| alive_members.last());
} else {
previous = Some(member);
}
}
None
}
fn update_from_service_rumors(&mut self, rumors: &HashMap<String, ServiceRumor>) {
for (member_id, service_rumor) in rumors.iter() {
// Yeah - we are ourself - we're alive.
let is_self = member_id == &self.local_member_id;
let member = self.population
.entry(member_id.to_string())
.or_insert_with(|| {
// Note: this is where CensusMembers are created
let mut new_member = CensusMember::default();
new_member.alive = is_self;
new_member
});
member.update_from_service_rumor(&self.service_group, service_rumor);
}
}
fn update_from_election_rumor(&mut self, election: &ElectionRumor) {
self.leader_id = None;
for census_member in self.population.values_mut() {
if census_member.update_from_election_rumor(election) {
self.leader_id = Some(census_member.member_id.clone());
}
}
match election.status {
ElectionStatusRumor::Running => {
self.election_status = ElectionStatus::ElectionInProgress;
}
ElectionStatusRumor::NoQuorum => {
self.election_status = ElectionStatus::ElectionNoQuorum;
}
ElectionStatusRumor::Finished => {
self.election_status = ElectionStatus::ElectionFinished;
}
}
}
fn update_from_election_update_rumor(&mut self, election: &ElectionUpdateRumor) {
self.update_leader_id = None;
for census_member in self.population.values_mut() {
if census_member.update_from_election_update_rumor(election) {
self.update_leader_id = Some(census_member.member_id.clone());
}
}
match election.status {
ElectionStatusRumor::Running => {
self.update_election_status = ElectionStatus::ElectionInProgress;
}
ElectionStatusRumor::NoQuorum => {
self.update_election_status = ElectionStatus::ElectionNoQuorum;
}
ElectionStatusRumor::Finished => {
self.update_election_status = ElectionStatus::ElectionFinished;
}
}
}
fn update_from_service_config_rumor(&mut self,
cache_key_path: &Path,
service_config: &ServiceConfigRumor) {
match service_config.config(cache_key_path) {
Ok(config) => {
if self.service_config.is_none()
|| service_config.incarnation > self.service_config.as_ref().unwrap().incarnation
{
self.service_config = Some(ServiceConfig { incarnation:
service_config.incarnation,
value: config, });
}
}
Err(err) => warn!("{}", err),
}
}
fn update_from_service_file_rumors(&mut self,
cache_key_path: &Path,
service_file_rumors: &HashMap<String, ServiceFileRumor>)
{
self.changed_service_files.clear();
for (_m_id, service_file_rumor) in service_file_rumors.iter() {
let filename = service_file_rumor.filename.to_string();
let file = self.service_files
.entry(filename.clone())
.or_insert_with(ServiceFile::default);
if service_file_rumor.incarnation > file.incarnation {
match service_file_rumor.body(cache_key_path) {
Ok(body) => {
self.changed_service_files.push(filename.clone());
file.filename = filename.clone();
file.incarnation = service_file_rumor.incarnation;
file.body = body;
}
Err(e) => {
warn!("Cannot decrypt service file for {} {} {}: {}",
self.service_group,
service_file_rumor.filename,
service_file_rumor.incarnation,
e)
}
}
}
}
}
fn find_member_mut(&mut self, member_id: &str) -> Option<&mut CensusMember> {
self.population.get_mut(member_id)
}
/// Determine what configuration keys the group as a whole
/// exports. Returns a set of the top-level exported keys.
///
/// This implementation is a righteous hack to cover the fact that
/// there is not yet a centralized view of what a "group" actually
/// exports! There has been some talk of having a "leader" role in
/// all topologies, in which case we could just ask the leader
/// what the group exports. Until that time, the best we can do is
/// ask an active member what *they* export (if there is a leader,
/// though, we'll just ask them).
pub fn group_exports<'a>(&'a self) -> Result<HashSet<&'a String>, Error> {
self.leader()
.or_else(|| self.active_members().next())
.ok_or_else(|| Error::NoActiveMembers(self.service_group.clone()))
.map(|m| m.cfg.keys().collect())
}
}
impl Serialize for CensusGroup {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let mut strukt = serializer.serialize_struct("census_group", 10)?;
strukt.serialize_field("service_group", &self.service_group)?;
strukt.serialize_field("election_status", &self.election_status)?;
strukt.serialize_field("update_election_status", &self.update_election_status)?;
strukt.serialize_field("leader_id", &self.leader_id)?;
strukt.serialize_field("service_config", &self.service_config)?;
strukt.serialize_field("local_member_id", &self.local_member_id)?;
let new_pop: BTreeMap<MemberId, CensusMemberProxy<'_>> =
self.population
.iter()
.map(|(k, v)| (k.clone(), CensusMemberProxy::new(v)))
.collect();
strukt.serialize_field("population", &new_pop)?;
strukt.serialize_field("update_leader_id", &self.update_leader_id)?;
strukt.serialize_field("changed_service_files", &self.changed_service_files)?;
strukt.serialize_field("service_files", &self.service_files)?;
strukt.end()
}
}
// User-facing documentation is available at
// https://www.habitat.sh/docs/reference/#template-data; update that
// as required.
#[derive(Clone, Debug, Default, Serialize)]
pub struct CensusMember {
pub member_id: MemberId,
pub pkg: PackageIdent,
pub service: String,
pub group: String,
pub org: Option<String>,
pub persistent: bool,
pub leader: bool,
pub follower: bool,
pub update_leader: bool,
pub update_follower: bool,
pub election_is_running: bool,
pub election_is_no_quorum: bool,
pub election_is_finished: bool,
pub update_election_is_running: bool,
pub update_election_is_no_quorum: bool,
pub update_election_is_finished: bool,
pub sys: SysInfo,
pub alive: bool,
pub suspect: bool,
pub confirmed: bool,
pub departed: bool,
// Maps must be represented last in a serializable struct for the current version of the toml
// crate. Additionally, this deserialization method is required to correct any ordering issues
// with the table being serialized - https://docs.rs/toml/0.4.0/toml/ser/fn.tables_last.html
#[serde(serialize_with = "toml::ser::tables_last")]
pub cfg: toml::value::Table,
}
impl CensusMember {
fn update_from_service_rumor(&mut self, sg: &ServiceGroup, rumor: &ServiceRumor) {
self.member_id = rumor.member_id.to_string();
self.service = sg.service().to_string();
self.group = sg.group().to_string();
if let Some(org) = sg.org() {
self.org = Some(org.to_string());
}
match PackageIdent::from_str(&rumor.pkg) {
Ok(ident) => self.pkg = ident,
Err(err) => warn!("Received a bad package ident from gossip data, err={}", err),
};
self.sys = rumor.sys.clone();
self.cfg = toml::from_slice(&rumor.cfg).unwrap_or_default();
}
fn update_from_election_rumor(&mut self, election: &ElectionRumor) -> bool {
self.election_is_running = election.status == ElectionStatusRumor::Running;
self.election_is_no_quorum = election.status == ElectionStatusRumor::NoQuorum;
self.election_is_finished = election.status == ElectionStatusRumor::Finished;
if self.election_is_finished {
if self.member_id == election.member_id {
self.leader = true;
self.follower = false;
} else {
self.leader = false;
self.follower = true;
}
}
self.leader
}
fn update_from_election_update_rumor(&mut self, election: &ElectionUpdateRumor) -> bool {
self.update_election_is_running = election.status == ElectionStatusRumor::Running;
self.update_election_is_no_quorum = election.status == ElectionStatusRumor::NoQuorum;
self.update_election_is_finished = election.status == ElectionStatusRumor::Finished;
if self.update_election_is_finished {
if self.member_id == election.member_id {
self.update_leader = true;
self.update_follower = false;
} else {
self.update_leader = false;
self.update_follower = true;
}
}
self.update_leader
}
fn update_from_member(&mut self, member: &Member) {
self.sys.gossip_ip = member.address.to_string();
self.sys.gossip_port = u32::from(member.gossip_port);
self.persistent = true;
}
fn update_from_health(&mut self, health: Health) {
self.alive = false;
self.suspect = false;
self.confirmed = false;
self.departed = false;
match health {
Health::Alive => self.alive = true,
Health::Suspect => self.suspect = true,
Health::Confirmed => self.confirmed = true,
Health::Departed => self.departed = true,
}
}
/// Is this member currently considered to be alive or not?
pub fn alive(&self) -> bool { self.alive }
pub fn suspect(&self) -> bool { self.suspect }
pub fn confirmed(&self) -> bool { self.confirmed }
pub fn departed(&self) -> bool { self.departed }
}
/// This data structure just wraps the CensusMember and allows us to tweak the serialization logic.
#[derive(Debug, Clone)]
pub struct CensusMemberProxy<'a>(Cow<'a, CensusMember>);
impl<'a> CensusMemberProxy<'a> {
pub fn new(c: &'a CensusMember) -> Self { CensusMemberProxy(Cow::Borrowed(&c)) }
#[cfg(test)]
pub fn new_owned(c: CensusMember) -> Self { CensusMemberProxy(Cow::Owned(c)) }
#[cfg(test)]
pub fn to_mut(&mut self) -> &mut CensusMember { self.0.to_mut() }
}
impl std::ops::Deref for CensusMemberProxy<'_> {
type Target = CensusMember;
fn deref(&self) -> &Self::Target { &(self.0) }
}
impl<'a> Serialize for CensusMemberProxy<'a> {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let mut strukt = serializer.serialize_struct("census_member", 24)?;
strukt.serialize_field("member_id", &self.member_id)?;
strukt.serialize_field("pkg", &self.pkg)?;
strukt.serialize_field("package", &self.pkg.to_string())?;
strukt.serialize_field("service", &self.service)?;
strukt.serialize_field("group", &self.group)?;
strukt.serialize_field("org", &self.org)?;
strukt.serialize_field("persistent", &self.persistent)?;
strukt.serialize_field("leader", &self.leader)?;
strukt.serialize_field("follower", &self.follower)?;
strukt.serialize_field("update_leader", &self.update_leader)?;
strukt.serialize_field("update_follower", &self.update_follower)?;
strukt.serialize_field("election_is_running", &self.election_is_running)?;
strukt.serialize_field("election_is_no_quorum", &self.election_is_no_quorum)?;
strukt.serialize_field("election_is_finished", &self.election_is_finished)?;
strukt.serialize_field("update_election_is_running",
&self.update_election_is_running)?;
strukt.serialize_field("update_election_is_no_quorum",
&self.update_election_is_no_quorum)?;
strukt.serialize_field("update_election_is_finished",
&self.update_election_is_finished)?;
strukt.serialize_field("sys", &self.sys)?;
strukt.serialize_field("alive", &self.alive)?;
strukt.serialize_field("suspect", &self.suspect)?;
strukt.serialize_field("confirmed", &self.confirmed)?;
strukt.serialize_field("departed", &self.departed)?;
strukt.serialize_field("cfg", &self.cfg)?;
strukt.end()
}
}
fn service_group_from_str(sg: &str) -> Result<ServiceGroup, habitat_core::Error> {
ServiceGroup::from_str(sg).map_err(|e| {
outputln!("Malformed service group; cannot populate \
configuration data. Aborting.: {}",
e);
e
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_helpers::*;
use habitat_butterfly::{member::{Health,
MemberList},
rumor::{election::{self,
Election as ElectionRumor,
ElectionUpdate as ElectionUpdateRumor},
service::{Service as ServiceRumor,
SysInfo},
service_config::ServiceConfig as ServiceConfigRumor,
service_file::ServiceFile as ServiceFileRumor,
RumorStore}};
use habitat_core::{fs::CACHE_KEY_PATH,
package::ident::PackageIdent,
service::ServiceGroup};
#[test]
fn update_from_rumors() {
let (ring, sg_one, sg_two) = test_census_ring();
let census_group_one = ring.census_group_for(&sg_one).unwrap();
assert!(census_group_one.me().is_none());
assert_eq!(census_group_one.leader().unwrap().member_id, "member-a");
assert!(census_group_one.update_leader().is_none());
let census_group_two = ring.census_group_for(&sg_two).unwrap();
assert_eq!(census_group_two.me().unwrap().member_id,
"member-b".to_string());
assert_eq!(census_group_two.update_leader().unwrap().member_id,
"member-b".to_string());
let mut members = census_group_two.members();
assert_eq!(members.next().unwrap().member_id, "member-a");
assert_eq!(members.next().unwrap().member_id, "member-b");
}
#[test]
fn census_ring_proxy_conforms_to_the_schema() {
let (ring, ..) = test_census_ring();
let crp = CensusRingProxy::new(&ring);
let json = serde_json::to_string(&crp).unwrap();
assert_valid(&json, "http_gateway_census_schema.json");
}
fn test_census_ring() -> (CensusRing, ServiceGroup, ServiceGroup) {
let mut sys_info = SysInfo::default();
sys_info.ip = "1.2.3.4".to_string();
sys_info.hostname = "hostname".to_string();
sys_info.gossip_ip = "0.0.0.0".to_string();
sys_info.gossip_port = 7777;
sys_info.http_gateway_ip = "0.0.0.0".to_string();
sys_info.http_gateway_port = 9631;
let pg_id = PackageIdent::new("starkandwayne",
"shield",
Some("0.10.4"),
Some("20170419115548"));
let sg_one = ServiceGroup::new("shield", "one", None).unwrap();
let service_store: RumorStore<ServiceRumor> = RumorStore::default();
let service_one = ServiceRumor::new("member-a".to_string(),
&pg_id,
sg_one.clone(),
sys_info.clone(),
None);
let sg_two = ServiceGroup::new("shield", "two", None).unwrap();
let service_two = ServiceRumor::new("member-b".to_string(),
&pg_id,
sg_two.clone(),
sys_info.clone(),
None);
let service_three = ServiceRumor::new("member-a".to_string(),
&pg_id,
sg_two.clone(),
sys_info,
None);
service_store.insert_rsw(service_one);
service_store.insert_rsw(service_two);
service_store.insert_rsw(service_three);
let election_store: RumorStore<ElectionRumor> = RumorStore::default();
let mut election = ElectionRumor::new("member-a",
&sg_one,
election::Term::default(),
10,
true /* has_quorum */);
election.finish();
election_store.insert_rsw(election);
let election_update_store: RumorStore<ElectionUpdateRumor> = RumorStore::default();
let mut election_update = ElectionUpdateRumor::new("member-b",
&sg_two,
election::Term::default(),
10,
true /* has_quorum */);
election_update.finish();
election_update_store.insert_rsw(election_update);
let member_list = MemberList::new();
let service_config_store: RumorStore<ServiceConfigRumor> = RumorStore::default();
let service_file_store: RumorStore<ServiceFileRumor> = RumorStore::default();
let mut ring = CensusRing::new("member-b".to_string());
ring.update_from_rumors_rsr_mlr(&*CACHE_KEY_PATH,
&service_store,
&election_store,
&election_update_store,
&member_list,
&service_config_store,
&service_file_store);
(ring, sg_one, sg_two)
}
/// Create a bare-minimum CensusMember with the given Health
fn test_census_member(id: &str, health: Health) -> CensusMember {
let pkg = "habitat-testing/test_service".parse()
.expect("valid package ident");
CensusMember { member_id: id.into(),
pkg,
service: "test_service".to_string(),
group: "default".to_string(),
org: None,
persistent: false,
leader: false,
follower: false,
update_leader: false,
update_follower: false,
election_is_running: false,
election_is_no_quorum: false,
election_is_finished: false,
update_election_is_running: false,
update_election_is_no_quorum: false,
update_election_is_finished: false,
sys: SysInfo::default(),
alive: health == Health::Alive,
suspect: health == Health::Suspect,
confirmed: health == Health::Confirmed,
departed: health == Health::Departed,
cfg: toml::value::Table::new() }
}
#[test]
fn active_members_leaves_only_active_members() {
let population = vec![test_census_member("live-one", Health::Alive),
test_census_member("suspect-one", Health::Suspect),
test_census_member("confirmed-one", Health::Confirmed),
test_census_member("departed-one", Health::Departed),];
let sg: ServiceGroup =
"test-service.default".parse()
.expect("This should be a valid service group");
let mut census_group = CensusGroup::new(sg, &"live-one".to_string());
for member in population {
census_group.population
.insert(member.member_id.clone(), member);
}
let mut active_members = census_group.active_members();
assert_eq!(active_members.next().unwrap().member_id, "live-one");
assert_eq!(active_members.next().unwrap().member_id, "suspect-one");
assert!(active_members.next().is_none());
}
fn assert_eq_member_ids(cm: Option<&CensusMember>, id: Option<&str>) {
assert_eq!(cm.map(|cm| cm.member_id.as_str()), id);
}
#[test]
fn previous_peer_with_no_members() {
let me = test_census_member("me", Health::Alive);
let members = vec![];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me), None);
}
#[test]
fn previous_peer_with_no_alive_members() {
let me = test_census_member("me", Health::Alive);
let members = vec![test_census_member("left_of_me", Health::Confirmed),
me.clone(),];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me), None);
}
#[test]
fn previous_peer_with_only_me() {
let me = test_census_member("me", Health::Alive);
let members = vec![me.clone()];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me), None);
}
#[test]
fn previous_peer_simple() {
let me = test_census_member("me", Health::Alive);
let members = vec![test_census_member("left_of_me", Health::Alive), me.clone()];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me),
Some("left_of_me"));
}
#[test]
fn previous_peer_wraparound() {
let me = test_census_member("me", Health::Alive);
let members = vec![me.clone(),
test_census_member("left_of_me_with_wrapping", Health::Alive),];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me),
Some("left_of_me_with_wrapping"));
}
#[test]
fn previous_peer_normal() {
let me = test_census_member("me", Health::Alive);
let members = vec![test_census_member("2_left_of_me", Health::Alive),
test_census_member("left_of_me", Health::Alive),
me.clone(),
test_census_member("right_of_me", Health::Alive),];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me),
Some("left_of_me"));
}
#[test]
fn previous_peer_with_confirmed() {
let me = test_census_member("me", Health::Alive);
let members = vec![test_census_member("2_left_of_me", Health::Alive),
test_census_member("left_of_me", Health::Confirmed),
me.clone(),
test_census_member("right_of_me", Health::Alive),];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me),
Some("2_left_of_me"));
}
#[test]
fn previous_peer_with_confirmed_and_wraparound() {
let me = test_census_member("me", Health::Alive);
let members = vec![test_census_member("left_of_me", Health::Confirmed),
me.clone(),
test_census_member("left_of_me_with_wrapping", Health::Alive),
test_census_member("2_right_of_me", Health::Confirmed),];
assert_eq_member_ids(CensusGroup::previous_peer_impl(members.iter(), &me),
Some("left_of_me_with_wrapping"));
}
}<|fim▁end|> | } |
<|file_name|>schema.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .util import Specification
from . import compat
class Field(Specification):
"""
Field object for adding fields to a resource schema.
Currently this is built around the Tabular Data Package.
"""
SPECIFICATION = {'name': compat.str,
'title': compat.str,
'type': compat.str,
'format': compat.str,
'constraints': dict}
REQUIRED = ('name',)
class Constraints(Specification):
"""
Constraints object which can be added to a field in a resource schema
in order to represent the constraints put on that particular field.
"""
SPECIFICATION = {'required': bool,
'minLength': int,
'maxLength': int,
'unique': bool,
'pattern': compat.str,
'minimum': None,
'maximum': None}
class Reference(Specification):
"""
Reference object which can be added to a ForeignKey object to represent
the reference to the other datapackage.
"""
SPECIFICATION = {'datapackage': compat.str,
'resource': compat.str,
'fields': (compat.str, list)}
REQUIRED = ('fields',)
def __setattr__(self, attribute, value):
if attribute == 'fields':
# We need to make sure all fields are represented with by their
# names if it is a list
if type(value) == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
modified_value.append(single_value)
elif isinstance(single_value, Field):
modified_value.append(single_value.name)<|fim▁hole|> type(single_value)))
value = modified_value
elif type(value) == compat.str:
# We don't need to do anything with a str
pass
elif isinstance(value, Field):
# Set the name from the field as the value
value = value.name
else:
raise TypeError("Type of field ({0}) is not supported".format(
type(value)))
super(Reference, self).__setattr__(attribute, value)
class ForeignKey(Specification):
"""
ForeignKey object which can be added to a resource schema object to
represent a foreign key in another data package.
"""
SPECIFICATION = {'fields': (compat.str, list),
'reference': Reference}
REQUIRED = ('fields', 'reference')
def __setattr__(self, attribute, value):
# If the attribute is 'reference' we need to check if there is a
# fields attribute and do some checks to see if they are inconsistent
# because they shouldn't be
if attribute == 'reference' and 'fields' in self:
fields = self['fields']
if type(fields) != type(value.fields):
raise TypeError(
'Reference fields must have the same type as fields')
if type(value.fields) == list:
if len(value.fields) != len(fields):
raise ValueError(
'Reference fields and fields are inconsistent')
if attribute == 'fields':
value_type = type(value)
# We only want to show the names of the fields so we add we need
# to go through a list and get out the names and use them as the
# value
if value_type == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
modified_value.append(single_value)
elif isinstance(single_value, Field):
modified_value.append(single_value.name)
else:
raise TypeError(
'Foreign key type ({0}) is not supported'.format(
type(single_value)))
value = modified_value
elif value_type == compat.str:
# We don't need to do anything if the value is a str
pass
elif isinstance(value, Field):
value = value.name
else:
raise TypeError("Type of field ({0}) is not supported".format(
value_type))
# Same check as before about inconsistencies but just the other
# way around
if 'reference' in self:
reference_fields = self['reference'].fields
if type(reference_fields) != value_type:
raise TypeError(
'Fields must have the same type as Reference fields')
if type(reference_fields) == list:
if len(reference_fields) != len(value):
raise ValueError(
'Reference fields and fields are inconsistent')
super(ForeignKey, self).__setattr__(attribute, value)
class Schema(Specification):
"""
Schema object which holds the representation of the schema for a
Tabular Data Package (using the JSON Table Schema protocol). The
schema can be used just like a dictionary which means it is ready
for json serialization and export as part of a data package
descriptor (when added to a resource).
"""
SPECIFICATION = {'fields': list,
'primaryKey': (compat.str, list),
'foreignKeys': list}
def __init__(self, *args, **kwargs):
# We need to initialize an empty fields array (this is a required
# field but we don't require it, we create it)
self['fields'] = []
# We add the fields using the internal method so we can do
# validation of each field
self.add_fields(kwargs.pop('fields', []))
super(Schema, self).__init__(self, *args, **kwargs)
def __setattr__(self, attribute, value):
if attribute == 'primaryKey' and value is not None:
# Primary Keys must be a reference to existing fields so we
# need to check if the primary key is in the fields array
field_names = [f.name for f in self.get('fields', [])]
if type(value) == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
if single_value in field_names:
modified_value.append(single_value)
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
single_value))
elif isinstance(single_value, Field):
if single_value.name in field_names:
modified_value.append(single_value.name)
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
single_value.name))
else:
raise TypeError(
'primaryKey type ({0}) is not supported'.format(
type(single_value)))
value = modified_value
elif type(value) == compat.str:
if value not in field_names:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
value))
elif isinstance(value, Field):
if value.name in field_names:
value = value.name
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
value.name))
else:
raise TypeError('Primary Key type ({0}) not supported'.format(
type(value)))
super(Schema, self).__setattr__(attribute, value)
def add_field(self, field):
"""
Adds a field to the resource schema
:param ~Field field: A Field instance containing the field to be
appended to the schema.
"""
if isinstance(field, Field):
self['fields'].append(field)
elif type(field) == dict:
self['fields'].append(Field(field))
else:
raise TypeError("Type of parameter field is not supported.")
def add_fields(self, fields):
"""
Adds fields to the resource schema
:param list fields: A list of Field instances which should be
appended (extend) to the resource schema fields.
"""
# We loop through the fields list to make sure all elements
# in the list are of the proper type
for field in fields:
self.add_field(field)
def add_foreign_key(self, foreign_key):
"""
Adds a foreign key to the resource schema.
:param ~ForeignKey foreign_key: A ForeignKey object which keeps
track of a foreign key relationship to another data package.
"""
# We can only accept ForeignKey objects
if not isinstance(foreign_key, ForeignKey):
raise TypeError("Foreign Key type is not supported")
# ForeignKey fields must be a schema field
field_names = [f.name for f in self.get('fields', [])]
for field in foreign_key.fields:
if field not in field_names:
raise ValueError(
"Foreign key field '{0}' is not in schema fields".format(
field))
# Append the ForeignKey to the foreignKeys object or create it if it
# doesn't exist
foreign_keys = dict.get(self, 'foreignKeys', [])
foreign_keys.append(foreign_key)
self['foreignKeys'] = foreign_keys
def add_foreign_keys(self, foreign_keys):
"""
Adds foreign keys to the resource schema
:param list foreign_keys: A list of ForeignKey instances which should
be appended (extend) to the resource schema fields or create a
foreignKeys attribute if it doesn't exist.
"""
# We loop through the foreign keys list to make sure all elements
# in the list are of the proper type and validate
for foreign_key in foreign_keys:
self.add_foreign_key(foreign_key)<|fim▁end|> | else:
raise TypeError(
'Field type ({0}) is not supported'.format( |
<|file_name|>TCP_SocketNoDelay.cpp<|end_file_name|><|fim▁begin|>//----------------------------------------------------------------------------
// XC program; finite element analysis code
// for structural analysis and design.
//
// Copyright (C) Luis Claudio Pérez Tato
//
// This program derives from OpenSees <http://opensees.berkeley.edu>
// developed by the «Pacific earthquake engineering research center».
//
// Except for the restrictions that may arise from the copyright
// of the original program (see copyright_opensees.txt)
// XC is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This software is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
//
// You should have received a copy of the GNU General Public License
// along with this program.
// If not, see <http://www.gnu.org/licenses/>.
//----------------------------------------------------------------------------
/* ****************************************************************** **
** OpenSees - Open System for Earthquake Engineering Simulation **
** Pacific Earthquake Engineering Research Center **
** **
** **
** (C) Copyright 1999, The Regents of the University of California **
** All Rights Reserved. **
** **
** Commercial use of this program without express permission of the **
** University of California, Berkeley, is strictly prohibited. See **
** file 'COPYRIGHT' in main directory for information on usage and **
** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. **
** **
** Developed by: **
** Frank McKenna ([email protected]) **
** Gregory L. Fenves ([email protected]) **
** Filip C. Filippou ([email protected]) **
** **
** ****************************************************************** */
// $Revision: 1.2 $
// $Date: 2003/02/14 23:00:39 $
// $Source: /usr/local/cvs/OpenSees/SRC/actor/channel/TCP_SocketNoDelay.cpp,v $
// File: ~/actor/TCP_SocketNoDelay.C
//
// Written: fmk 11/95
// Revised:
//
// Purpose: This file contains the implementation of the methods needed
// to define the TCP_SocketNoDelay class interface.
#include "utility/actor/channel/TCP_SocketNoDelay.h"
#include <netinet/in.h>
#include <netinet/tcp.h>
#include "utility/matrix/Matrix.h"
#include "utility/matrix/Vector.h"
#include "../message/Message.h"
#include "../address/ChannelAddress.h"
#include "../actor/MovableObject.h"
static int GetHostAddr(char *host, char *IntAddr);
static void inttoa(unsigned int no, char *string, int *cnt);
// TCP_SocketNoDelay(unsigned int other_Port, char *other_InetAddr):
// constructor to open a socket with my inet_addr and with a port number
// given by the OS.
XC::TCP_SocketNoDelay::TCP_SocketNoDelay(void)
:myPort(0)
{
// set up my_Addr
bzero((char *) &my_Addr, sizeof(my_Addr));
my_Addr.sin_family = AF_INET;
my_Addr.sin_addr.s_addr = htonl(INADDR_ANY);
my_Addr.sin_port = htons(0);
addrLength = sizeof(my_Addr);
// open a socket
if ((sockfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not open socket\n";
}
// bind local address to it
if (bind(sockfd, (struct sockaddr *) &my_Addr,sizeof(my_Addr)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not bind local address\n";
}
// get my_address info
INET_getsockname(sockfd, &my_Addr, &addrLength);
myPort = ntohs(my_Addr.sin_port);
}
// TCP_SocketNoDelay(unsigned int port):
// constructor to open a socket with my inet_addr and with a port number port.
XC::TCP_SocketNoDelay::TCP_SocketNoDelay(unsigned int port)
:myPort(0)
{
// set up my_Addr with address given by port and internet address of
// machine on which the process that uses this routine is running.
char me[20];
char my_InetAddr[MAX_INET_ADDR];
gethostname(me,MAX_INET_ADDR);
GetHostAddr(me,my_InetAddr);
bzero((char *) &my_Addr, sizeof(my_Addr));
my_Addr.sin_family = AF_INET;
my_Addr.sin_addr.s_addr = inet_addr(my_InetAddr);
my_Addr.sin_port = htons(port);
addrLength = sizeof(my_Addr);
// open a socket
if ((sockfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not open socket\n";
}
// bind local address to it
if (bind(sockfd,(struct sockaddr *)&my_Addr,sizeof(my_Addr)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not bind local address\n";
}
// get my_address info
INET_getsockname(sockfd, &my_Addr, &addrLength);
myPort = ntohs(my_Addr.sin_port);
}
// TCP_SocketNoDelay(unsigned int other_Port, char *other_InetAddr):
// constructor to open a socket with my inet_addr and with a port number
// given by the OS. Then to connect with a TCP_SocketNoDelay whose address is
// given by other_Port and other_InetAddr.
XC::TCP_SocketNoDelay::TCP_SocketNoDelay(unsigned int other_Port, char *other_InetAddr)
:myPort(0)
{
// set up remote address
bzero((char *) &other_Addr, sizeof(other_Addr));
other_Addr.sin_family = AF_INET;
other_Addr.sin_addr.s_addr = inet_addr(other_InetAddr);
other_Addr.sin_port = htons(other_Port);
// set up my_Addr
bzero((char *) &my_Addr, sizeof(my_Addr));
my_Addr.sin_family = AF_INET;
my_Addr.sin_addr.s_addr = htonl(INADDR_ANY);
my_Addr.sin_port = htons(0);
addrLength = sizeof(my_Addr);
// open a socket
if ((sockfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not open socket\n";
}
// bind local address to it
if (bind(sockfd, (struct sockaddr *) &my_Addr,sizeof(my_Addr)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not bind local address\n";
}
myPort = ntohs(my_Addr.sin_port);
}
// ~TCP_SocketNoDelay():
// destructor
XC::TCP_SocketNoDelay::~TCP_SocketNoDelay()
{
close(sockfd);
}
int
XC::TCP_SocketNoDelay::setUpActor(void)
{
// now try to connect to socket with remote address.
if (connect(sockfd, (struct sockaddr *) &other_Addr,
sizeof(other_Addr))< 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not connect\n";
return -1;
}
// get my_address info<|fim▁hole|>
// set socket so no delay
int optlen;
optlen = 1;
if ((setsockopt(sockfd,IPPROTO_TCP, TCP_NODELAY,
(char *) &optlen, sizeof(int))) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not set TCP_NODELAY\n";
}
/*
int flag=sizeof(int);
if ((getsockopt(sockfd,IPPROTO_TCP, TCP_NODELAY,
(char *) &optlen, &flag)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not set TCP_NODELAY\n";
}
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - " << optlen << " flag " << flag << std::endl;
*/
return 0;
}
int
XC::TCP_SocketNoDelay::setUpShadow(void)
{
// wait for other process to contact me & set up connection
int newsockfd;
listen(sockfd, 1);
newsockfd = accept(sockfd, (struct sockaddr *) &other_Addr, &addrLength);
if (newsockfd < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not accept connection\n";
return -1;
}
// close old socket & reset sockfd
close(sockfd); // we can close as we are not
// going to wait for others to connect
sockfd = newsockfd;
// get my_address info
INET_getsockname(sockfd, &my_Addr, &addrLength);
myPort = ntohs(my_Addr.sin_port);
// set socket so no delay
int optlen;
optlen = 1;
if ((setsockopt(sockfd,IPPROTO_TCP, TCP_NODELAY,
(char *) &optlen, sizeof(int))) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not set TCP_NODELAY\n";
}
/*
int flag=sizeof(int);
if ((getsockopt(sockfd,IPPROTO_TCP, TCP_NODELAY,
(char *) &optlen, &flag)) < 0) {
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - could not set TCP_NODELAY\n";
}
std::cerr << "XC::TCP_SocketNoDelay::TCP_SocketNoDelay - " << optlen << " flag " << flag << std::endl;
*/
return 0;
}
int
XC::TCP_SocketNoDelay::setNextAddress(const XC::ChannelAddress &theAddress)
{
SocketAddress *theSocketAddress = 0;
if (theAddress.getType() == SOCKET_TYPE) {
theSocketAddress = (SocketAddress *)(&theAddress);
// check address is the only address a TCP_SocketNoDelay can send to
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMsg() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
else {
std::cerr << "XC::TCP_SocketNoDelay::setNextAddress() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
return 0;
}
int
XC::TCP_SocketNoDelay::sendObj(MovableObject &theObject,
FEM_ObjectBroker &theBroker,
ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay";
std::cerr << " address given is not that address\n";
return -1;
}
}
return theObject.sendSelf(*this, theBroker);
}
int
XC::TCP_SocketNoDelay::recvObj(MovableObject &theObject,
FEM_ObjectBroker &theBroker,
ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMsg() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
return theObject.recvSelf(*this, theBroker);
}
// void Recv(Message &):
// Method to receive a message, also sets other_Addr to that of sender
int
XC::TCP_SocketNoDelay::recvMsg(Message &msg, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMsg() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the message and
// place the incoming data there
int nleft,nread;
char *gMsg;
gMsg = msg.data;
nleft = msg.length;
while (nleft > 0) {
nread = read(sockfd,gMsg,nleft);
nleft -= nread;
gMsg += nread;
}
return 0;
}
// void Send(Message &):
// Method to send a message to an address given by other_Addr.
int XC::TCP_SocketNoDelay::sendMsg(const Message &msg, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMsg() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the message and
// place the incoming data there
int nwrite, nleft;
char *gMsg;
gMsg = msg.data;
nleft = msg.length;
while (nleft > 0) {
nwrite = write(sockfd,gMsg,nleft);
nleft -= nwrite;
gMsg += nwrite;
}
return 0;
}
int
XC::TCP_SocketNoDelay::recvMatrix(Matrix &theMatrix, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMatrix() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::Matrix and
// place the incoming data there
int nleft,nread;
double *data = theMatrix.myData;
char *gMsg = (char *)data;;
nleft = theMatrix.dataSize * sizeof(double);
while (nleft > 0) {
nread = read(sockfd,gMsg,nleft);
nleft -= nread;
gMsg += nread;
}
return 0;
}
// void Send(Matrix &):
// Method to send a XC::Matrix to an address given by other_Addr.
int
XC::TCP_SocketNoDelay::sendMatrix(const XC::Matrix &theMatrix, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
} SocketAddress *theSocketAddress = 0;
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvMatrix() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::Matrix and
// place the incoming data there
int nwrite, nleft;
double *data = theMatrix.myData;
char *gMsg = (char *)data;
nleft = theMatrix.dataSize * sizeof(double);
while (nleft > 0) {
nwrite = write(sockfd,gMsg,nleft);
nleft -= nwrite;
gMsg += nwrite;
}
return 0;
}
int
XC::TCP_SocketNoDelay::recvVector(Vector &theVector, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvVector() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::Vector and
// place the incoming data there
int nleft,nread;
double *data = theVector.theData;
char *gMsg = (char *)data;;
nleft = theVector.sz * sizeof(double);
while (nleft > 0) {
nread = read(sockfd,gMsg,nleft);
nleft -= nread;
gMsg += nread;
}
return 0;
}
// void Send(Vector &):
// Method to send a XC::Vector to an address given by other_Addr.
int
XC::TCP_SocketNoDelay::sendVector(const XC::Vector &theVector, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvVector() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::Vector and
// place the incoming data there
int nwrite, nleft;
double *data = theVector.theData;
char *gMsg = (char *)data;
nleft = theVector.sz * sizeof(double);
while (nleft > 0) {
nwrite = write(sockfd,gMsg,nleft);
nleft -= nwrite;
gMsg += nwrite;
}
return 0;
}
int
XC::TCP_SocketNoDelay::recvID(ID &theID, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvID() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::ID and
// place the incoming data there
int nleft,nread;
int *data = theID.data;
char *gMsg = (char *)data;;
nleft = theID.sz * sizeof(int);
while (nleft > 0) {
nread = read(sockfd,gMsg,nleft);
nleft -= nread;
gMsg += nread;
}
return 0;
}
// void Send(ID &):
// Method to send a XC::ID to an address given by other_Addr.
int
XC::TCP_SocketNoDelay::sendID(const XC::ID &theID, ChannelAddress *theAddress)
{
// first check address is the only address a TCP_SocketNoDelay can send to
SocketAddress *theSocketAddress = 0;
if (theAddress != 0) {
if (theAddress->getType() == SOCKET_TYPE)
theSocketAddress = (SocketAddress *)theAddress;
else {
std::cerr << "XC::TCP_SocketNoDelay::sendObj() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with a TCP_SocketNoDelay";
std::cerr << " address given is not of type XC::SocketAddress\n";
return -1;
}
if (bcmp((char *) &other_Addr, (char *) &theSocketAddress->addr,
theSocketAddress->addrLength) != 0) {
std::cerr << "XC::TCP_SocketNoDelay::recvID() - a TCP_SocketNoDelay ";
std::cerr << "can only communicate with one other TCP_SocketNoDelay\n";
return -1;
}
}
// if o.k. get a ponter to the data in the XC::ID and
// place the incoming data there
int nwrite, nleft;
int *data = theID.data;
char *gMsg = (char *)data;
nleft = theID.sz * sizeof(int);
while (nleft > 0) {
nwrite = write(sockfd,gMsg,nleft);
nleft -= nwrite;
gMsg += nwrite;
}
return 0;
}
unsigned int
XC::TCP_SocketNoDelay::getPortNumber(void) const
{
return myPort;
}
char *
XC::TCP_SocketNoDelay::addToProgram(void)
{
char *tcp = " 3 ";
char me[20];
char my_InetAddr[MAX_INET_ADDR];
char myPortNum[8];
unsigned int thePort = this->getPortNumber();
/*
char *me =(char *)malloc(30*sizeof(char));
char *my_InetAddr=(char *)malloc(30*sizeof(char));
char *myPortNum = (char *)malloc(30*sizeof(char));
for (int i=0; i<30; i++) {
me[i] = ' ';
my_InetAddr[i] = ' ';
myPortNum[i] = ' ';
}
*/
int start = 0;
inttoa(thePort,myPortNum,&start);
gethostname(me,MAX_INET_ADDR);
GetHostAddr(me,my_InetAddr);
char *newStuff =(char *)malloc(100*sizeof(char));
for (int i=0; i<100; i++)
newStuff[i] = ' ';
strcpy(newStuff,tcp);
strcat(newStuff," ");
strcat(newStuff,my_InetAddr);
strcat(newStuff," ");
strcat(newStuff,myPortNum);
strcat(newStuff," ");
return newStuff;
}
// G e t H o s t A d d r
// GetHostAddr is a function to get the internet address of a host
// Takes machine name host & Returns 0 if o.k, -1 if gethostbyname
// error, -2 otherwise. The internet address is returned in IntAddr
static int GetHostAddr(char *host, char *IntAddr)
{
register struct hostent *hostptr;
if ( (hostptr = gethostbyname(host)) == nullptr)
return (-1);
switch(hostptr->h_addrtype) {
case AF_INET:
strcpy(IntAddr,inet_ntoa(*(struct in_addr *)*hostptr->h_addr_list));
return (0);
break;
default:
return (-2);
}
}
/*
* i n t t o a
*
* Function to convert int to ascii
*
*/
static void inttoa(unsigned int no, char *string, int *cnt) {
if (no /10) {
inttoa(no/10, string, cnt);
*cnt = *cnt+1;
}
string[*cnt] = no % 10 + '0';
}<|fim▁end|> | INET_getsockname(sockfd, &my_Addr, &addrLength);
|
<|file_name|>test_backoff.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import math
import mock
import time
import unittest
import dcm.agent.connection.websocket as websocket
import dcm.agent.handshake as handshake
import dcm.agent.tests.utils.general as test_utils
from dcm.agent.events.globals import global_space as dcm_events
def fake_incoming_message(incoming_doc):
pass
class TestBackoff(unittest.TestCase):
@classmethod
def setUpClass(cls):
test_utils.connect_to_debugger()
def run_with_connect_errors(
self,
backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj):
class FakeHS(object):
def get_send_document(self):
ws.throw_error(Exception("just for tests"))
return {}
def incoming_document(self, incoming_doc):
return handshake.HandshakeIncomingReply(
handshake.HandshakeIncomingReply.REPLY_CODE_SUCCESS)
m = mock.Mock()
conn_obj.return_value = m
server_url = "wss://notreal.com"
ws = websocket.WebSocketConnection(
server_url,
backoff_amount=int(backoff_seconds*1000),
max_backoff=int(max_backoff_seconds*1000))
ws.connect(fake_incoming_message, FakeHS())
nw = datetime.datetime.now()
done_time = nw + datetime.timedelta(seconds=run_time_seconds)
while done_time > nw:
remaining = done_time - nw
dcm_events.poll(timeblock=remaining.total_seconds())
nw = datetime.datetime.now()
ws.close()
return m
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_no_retry(self, conn_obj):
"""Make sure that just 1 connect happens when waiting less than the
backoff time"""
m = mock.Mock()
conn_obj.return_value = m
backoff_seconds = 3.0
max_backoff_seconds = backoff_seconds * 100.0 # just make a big number
run_time_seconds = backoff_seconds / 2.0 # less then the back off
m = self.run_with_connect_errors(
backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertEqual(1, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_retry_connections(self, conn_obj):
"""Make sure reconnections happen"""
m = mock.Mock()
conn_obj.return_value = m
initial_backoff_seconds = 0.5
max_backoff_seconds = 600.0
run_time_seconds = 5.0
expected_backoff_count =\
int(math.log(run_time_seconds / initial_backoff_seconds, 2))
m = self.run_with_connect_errors(
initial_backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertLessEqual(expected_backoff_count-2, m.connect.call_count)
self.assertGreaterEqual(expected_backoff_count+2, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_retry_connections_never_more_than_max_back(self, conn_obj):
m = mock.Mock()
conn_obj.return_value = m
initial_backoff_seconds = 5.0
max_backoff_seconds = 0.1
run_time_seconds = 3.0
expected_backoff_count = run_time_seconds / max_backoff_seconds
m = self.run_with_connect_errors(
initial_backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertGreaterEqual(expected_backoff_count, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_force_backoff(self, conn_obj):
# force the backoff to be longer than the max run time then make sure
# that the connect is only called once
backoff_seconds = 0.2
max_backoff_seconds = backoff_seconds
run_time_seconds = backoff_seconds * 10.0
force_time = run_time_seconds + 1.0
m = mock.Mock()
conn_obj.return_value = m
server_url = "wss://notreal.com"
ws = websocket.WebSocketConnection(
server_url,
backoff_amount=int(backoff_seconds*1000),
max_backoff=int(max_backoff_seconds*1000))
def send_in_handshake():
ws.event_incoming_message(
{handshake.HandshakeIncomingReply.REPLY_KEY_FORCE_BACKOFF:
force_time,
'return_code':
handshake.HandshakeIncomingReply.REPLY_CODE_FORCE_BACKOFF})
class FakeHS(object):
def get_send_document(self):
dcm_events.register_callback(send_in_handshake)
return {}
def incoming_document(self, incoming_doc):
hs = handshake.HandshakeIncomingReply(
handshake.HandshakeIncomingReply.REPLY_CODE_FORCE_BACKOFF,
force_backoff=force_time)
return hs
ws.connect(fake_incoming_message, FakeHS())
nw = datetime.datetime.now()
done_time = nw + datetime.timedelta(seconds=run_time_seconds)
while done_time > nw:
remaining = done_time - nw<|fim▁hole|> nw = datetime.datetime.now()
ws.close()
self.assertEqual(1, m.connect.call_count)
def test_backoff_object_ready_immediately(self):
initial_backoff_second = 300.0
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
self.assertTrue(backoff.ready())
def test_backoff_object_error_not_ready(self):
initial_backoff_second = 300.0
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
self.assertFalse(backoff.ready())
def test_backoff_object_error_wait_ready(self):
initial_backoff_second = 0.05
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
time.sleep(initial_backoff_second)
self.assertTrue(backoff.ready())
def test_backoff_object_ready_after_many_errors_than_activity(self):
initial_backoff_second = 0.05
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
backoff.error()
backoff.error()
backoff.error()
backoff.error()
backoff.error()
self.assertFalse(backoff.ready())
backoff.activity()
self.assertTrue(backoff.ready())<|fim▁end|> | dcm_events.poll(timeblock=remaining.total_seconds()) |
<|file_name|>WebRtcProducer.ts<|end_file_name|><|fim▁begin|>///<reference path="../../../lib/RTCPeerConnection.d.ts"/>
///<reference path="WebRtcCommons.ts"/>
"use strict";
class WebRtcProducer {
private _id : string;
private _debugMode : boolean = false;
private _successCalled : boolean = false;
private connection: RTCPeerConnection = null;
private channel: RTCDataChannel = null;
private _onPassDataToPeer : IWebRtcConnectionDataCallback = null;
private _onConnectionSucces : () => void = null;
private _onConnectionError : (error: Object) => void = null;
private _config: any = null;
/**
* constructor
*/
constructor(servers: RTCIceServer[], _id?: string, _debugMode?: boolean) {
this._id = _id;
this._debugMode = _debugMode||false;
if (servers != null)
this._config = { "iceServers": servers };
}
/**
* setCallbacks
*/
setCallbacks(onPassDataToPeer: IWebRtcConnectionDataCallback,
onConnectionSucces: () => void, onConnectionError: (error: Object) => void): void {
this._onPassDataToPeer = onPassDataToPeer;
this._onConnectionSucces = onConnectionSucces;
this._onConnectionError = onConnectionError;
}
/**
* isConnected
*/
isConnected(): boolean {
return this.connection != null
&& (this.connection.iceConnectionState === 'completed' //RTCIceConnectionState.completed
|| this.connection.iceConnectionState === 'connected')
&& this.channel != null
&& this.channel.readyState === 'open' ; //RTCDataChannelState.open
}
/**
* configure
*/
configure(data: IWebRtcConnectionData): void {
var self = this;
// step 1
if (data === null) {
if (this._debugMode)
self.log('configure - Step1', data);
this.connection.createOffer(
function(sdp: RTCSessionDescription): void {
if (self._debugMode)
self.log('onOfferCreated', sdp);
self.connection.setLocalDescription(sdp, null);
self._onPassDataToPeer({'RTCSessionDescription': sdp});
},
function (errorInformation: DOMError): void {
console.error('onOfferError', errorInformation);
});
} else
// step 2
if (data['RTCSessionDescription'] != undefined) {
if (this._debugMode)
this.log('configure - Step2', data);
this.connection.setRemoteDescription(data['RTCSessionDescription']);
} else
// step 3
if (data['RTCIceCandidate'] != undefined) {
if (this._debugMode)
this.log('configure - Step3', data);
this.connection.addIceCandidate(data['RTCIceCandidate'],
function(): void {
if (self._debugMode)
self.log('onAddIceCandidateSuccess');
},
function (error): void {
if (self._debugMode)
self.log('onAddIceCandidateError');
});
}
}
/**
* sendText
*/
sendMessage(msg: string): void {
if (this._debugMode)
this.log('Sending message: "' +msg +'"');
if (!this.isConnected())
throw new WebRtcConnectionNotInitializedError('');
this.channel.send(msg);
}
/**
* open
*/
open(): void{
if (this._debugMode)
this.log('Creating new; iceServers: ' +JSON.stringify(this._config));
if (typeof webkitRTCPeerConnection === 'function') {
this.connection = new webkitRTCPeerConnection( this._config );
} else if (typeof mozRTCPeerConnection === 'function') {
throw new Error('Not implemented yet.');
//this.connection = new mozRTCPeerConnection( this._config );
} else
throw new Error('unknown implementation of RTCPeerConnection');
this.internalInit();
this._successCalled = false;
}
/**
* close
*/
close(): void{
this._successCalled = false;
if (this.channel != null)
this.channel.close();
if (this.connection != null)
this.connection.close();
}
/**
* internalInit
*/
private internalInit(): void {
this.channel = this.connection.createDataChannel('label', null);
this.channel.onopen = this.onReceiveChannelStateChange;
this.channel.onclose = this.onReceiveChannelStateChange;
this.connection.onicecandidate =
function(event: RTCIceCandidateEvent): void {
if (event.candidate) {
if (this._debugMode)
this.log('onIceCandidate', event.candidate);
this._onPassDataToPeer({'RTCIceCandidate': event.candidate});
}
this.tryCallSuccess();
}.bind(this);
this.connection.oniceconnectionstatechange =
function(event: Event): void {
if (this._debugMode)
this.log('onIceConnectionStateChange: ' +this.connection.iceConnectionState, event);
this.tryCallSuccess();
}.bind(this);
}
/**
* onReceiveChannelStateChange
*/
private onReceiveChannelStateChange = function(event: Event): void {
if (this._debugMode)
this.log('onReceiveChannelStateChange', event);
this.tryCallSuccess();
}.bind(this);
/**
* tryCallSuccess
*/
private tryCallSuccess = function(): void {
if (!this._successCalled && this.isConnected()) {
if (this._debugMode)
this.log('triggering onConnectionSucces callback');
this._successCalled = true;
this._onConnectionSucces();
}
}.bind(this);
/**
* log
*/
private log(msg: string, ...optionalParams: Object[]) {
if (!this._debugMode)
throw new Error('Debug mode is disabled.');
var arr: Object[] = new Array<Object>().concat(this.dbgId() + ' ' + msg).concat(optionalParams);
console.log.apply(console, arr);
document.writeln(this.dbgId() +' ' +msg +' ' +this.connectionState() +'<br>');
}
/**
* connectionState
*/
private connectionState(): string {
return '<b>[connected: '+this.isConnected() +']</b> '
+'connection.iceConnectionState: '+ (this.connection === null ? 'null' : this.connection.iceConnectionState) +'; '
+'connection.iceGatheringState: '+ (this.connection === null ? 'null' : this.connection.iceGatheringState) +'; '
+'connection.signalingState: '+ (this.connection === null ? 'null' : this.connection.signalingState) +'; '
+'channel.readyState: '+ (this.channel === null ? 'null' : this.channel.readyState);
}
/**
* dbgId
*/ <|fim▁hole|> }
}<|fim▁end|> | private dbgId(): string{
return '[' +(this._id != '' ? this._id +' ' : '') +'producer]'; |
<|file_name|>_references.js<|end_file_name|><|fim▁begin|>/// <reference path="jquery-ui-1.10.3.js" />
/// <reference path="jquery-2.0.3.js" />
/// <reference path="jquery.validate.js" />
/// <reference path="jquery.validate.unobtrusive.js" />
<|fim▁hole|>/// <reference path="bootstrap.js"/><|fim▁end|> | /// <reference path="knockout-2.1.0.debug.js" />
/// <reference path="modernizr-2.5.3.js" />
|
<|file_name|>myconfig.py<|end_file_name|><|fim▁begin|>#coding:utf-8
import os
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(filename)s:%(lineno)s - %(funcName)20s() - %(name)s - %(levelname)s - %(message)s')
logging.warning('load module:%s', __name__)
user_path = os.path.expanduser("~/")
dir_path = os.path.join(user_path, '.mywunder/')
try:
os.mkdir(dir_path)
except OSError:
pass
config_txt = os.path.join(dir_path, 'config.txt')
db_path = os.path.join(dir_path, "mywunder.db")<|fim▁hole|>CLIENT_ID = 'ce310d4e732dc98c6a07'<|fim▁end|> | |
<|file_name|>webgl_conformance_test.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.core import util
src_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
conformance_path = os.path.join(src_path, 'third_party', 'webgl_conformance')
conformance_harness_script = r"""
var testHarness = {};
testHarness._allTestSucceeded = true;
testHarness._messages = '';
testHarness._failures = 0;
testHarness._finished = false;
testHarness.reportResults = function(success, msg) {
testHarness._allTestSucceeded = testHarness._allTestSucceeded && !!success;
if(!success) {
testHarness._failures++;
if(msg) {
testHarness._messages += msg + "\n";
}
}
};
testHarness.notifyFinished = function() {
testHarness._finished = true;
};
testHarness.navigateToPage = function(src) {
var testFrame = document.getElementById("test-frame");
testFrame.src = src;
};
window.webglTestHarness = testHarness;
window.parent.webglTestHarness = testHarness;
console.log("Harness injected.");
"""
def _DidWebGLTestSucceed(tab):
return tab.EvaluateJavaScript('webglTestHarness._allTestSucceeded')
def _WebGLTestMessages(tab):
return tab.EvaluateJavaScript('webglTestHarness._messages')
class WebGLConformanceTest(page_test.PageTest):
def __init__(self):
super(WebGLConformanceTest, self).__init__('ValidatePage')
def CreatePageSet(self, options):
tests = WebGLConformanceTest._ParseTests('00_test_list.txt', '1.0.1')
page_set_dict = {
'description': 'Executes WebGL conformance tests',<|fim▁hole|> 'serving_dirs': [
'../../../../third_party/webgl_conformance'
],
'pages': []
}
pages = page_set_dict['pages']
for test in tests:
pages.append({
'url': 'file:///../../../../third_party/webgl_conformance/' + test,
'script_to_evaluate_on_commit': conformance_harness_script,
'wait_for_javascript_expression': 'webglTestHarness._finished'
})
return page_set.PageSet.FromDict(page_set_dict, __file__)
def ValidatePage(self, page, tab, results):
if _DidWebGLTestSucceed(tab):
results.AddSuccess(page)
else:
results.AddFailureMessage(page, _WebGLTestMessages(tab))
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-webgl')
@staticmethod
def _ParseTests(path, version = None):
test_paths = []
current_dir = os.path.dirname(path)
full_path = os.path.normpath(os.path.join(conformance_path, path))
if not os.path.exists(full_path):
raise Exception('The WebGL conformance test path specified ' +
'does not exist: ' + full_path)
with open(full_path, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//') or line.startswith('#'):
continue
line_tokens = line.split(' ')
i = 0
min_version = None
while i < len(line_tokens):
token = line_tokens[i]
if token == '--min-version':
i += 1
min_version = line_tokens[i]
i += 1
if version and min_version and version < min_version:
continue
test_name = line_tokens[-1]
if '.txt' in test_name:
include_path = os.path.join(current_dir, test_name)
test_paths += WebGLConformanceTest._ParseTests(
include_path, version)
else:
test = os.path.join(current_dir, test_name)
test_paths.append(test)
return test_paths<|fim▁end|> | 'user_agent_type': 'desktop', |
<|file_name|>GiftEntry.java<|end_file_name|><|fim▁begin|>/**
* The GiftEntry class describes a gift in terms of three values:
* the gift name, the gift receipient, and whether the gift
* has been bought yet.
*
* @author Richard White
* @version 2013-12-05
*/
public class GiftEntry
{
/**
* Constructor for known recipient with blank Gift
* @param recipient The person who will receive an as yet unnamed gift
*/
/**
* Constructor for known recipient with known Gift
* @param recipient The person who will receive the gift
* @param gift The gift this person will receive
*/
/**
* setName establishes the giftName for a person's gift
* @param theGiftName the name of the gift
*/
<|fim▁hole|> * @param theRecipient the name of the gift's receiver
*/
/**
* setAsPurchased checks the gift off as purchased
*/
/**
* getName returns giftName for a gift
* @return the name of the gift
*/
/**
* getRecipient identifies the recipient of a gift
* @return gift's receiver
*/
/**
* isPurchased identifies whether the gift has been purchased
* @return the value true if purchased, false if not yet purchased
*/
}<|fim▁end|> | /**
* setRecipient establishes the recipient of a gift |
<|file_name|>db.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2019 The Bitcoin Core developers
// Copyright (c) 2014-2019 The DigiByte Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <wallet/db.h>
#include <addrman.h>
#include <hash.h>
#include <protocol.h>
#include <utilstrencodings.h>
#include <wallet/walletutil.h>
#include <stdint.h>
#ifndef WIN32
#include <sys/stat.h>
#endif
#include <boost/thread.hpp>
namespace {
//! Make sure database has a unique fileid within the environment. If it
//! doesn't, throw an error. BDB caches do not work properly when more than one
//! open database has the same fileid (values written to one database may show
//! up in reads to other databases).
//!
//! BerkeleyDB generates unique fileids by default
//! (https://docs.oracle.com/cd/E17275_01/html/programmer_reference/program_copy.html),
//! so digibyte should never create different databases with the same fileid, but
//! this error can be triggered if users manually copy database files.
void CheckUniqueFileid(const BerkeleyEnvironment& env, const std::string& filename, Db& db)
{
if (env.IsMock()) return;
u_int8_t fileid[DB_FILE_ID_LEN];
int ret = db.get_mpf()->get_fileid(fileid);
if (ret != 0) {
throw std::runtime_error(strprintf("BerkeleyBatch: Can't open database %s (get_fileid failed with %d)", filename, ret));
}
for (const auto& item : env.mapDb) {
u_int8_t item_fileid[DB_FILE_ID_LEN];
if (item.second && item.second->get_mpf()->get_fileid(item_fileid) == 0 &&
memcmp(fileid, item_fileid, sizeof(fileid)) == 0) {
const char* item_filename = nullptr;
item.second->get_dbname(&item_filename, nullptr);
throw std::runtime_error(strprintf("BerkeleyBatch: Can't open database %s (duplicates fileid %s from %s)", filename,
HexStr(std::begin(item_fileid), std::end(item_fileid)),
item_filename ? item_filename : "(unknown database)"));
}
}
}
CCriticalSection cs_db;
std::map<std::string, BerkeleyEnvironment> g_dbenvs GUARDED_BY(cs_db); //!< Map from directory name to open db environment.
} // namespace
BerkeleyEnvironment* GetWalletEnv(const fs::path& wallet_path, std::string& database_filename)
{
fs::path env_directory;
if (fs::is_regular_file(wallet_path)) {
// Special case for backwards compatibility: if wallet path points to an
// existing file, treat it as the path to a BDB data file in a parent
// directory that also contains BDB log files.
env_directory = wallet_path.parent_path();
database_filename = wallet_path.filename().string();
} else {
// Normal case: Interpret wallet path as a directory path containing
// data and log files.
env_directory = wallet_path;
database_filename = "wallet.dat";
}
LOCK(cs_db);
// Note: An ununsed temporary BerkeleyEnvironment object may be created inside the
// emplace function if the key already exists. This is a little inefficient,
// but not a big concern since the map will be changed in the future to hold
// pointers instead of objects, anyway.
return &g_dbenvs.emplace(std::piecewise_construct, std::forward_as_tuple(env_directory.string()), std::forward_as_tuple(env_directory)).first->second;
}
//
// BerkeleyBatch
//
void BerkeleyEnvironment::Close()
{
if (!fDbEnvInit)
return;
fDbEnvInit = false;
for (auto& db : mapDb) {
auto count = mapFileUseCount.find(db.first);
assert(count == mapFileUseCount.end() || count->second == 0);
if (db.second) {
db.second->close(0);
delete db.second;
db.second = nullptr;
}
}
int ret = dbenv->close(0);
if (ret != 0)
LogPrintf("BerkeleyEnvironment::Close: Error %d closing database environment: %s\n", ret, DbEnv::strerror(ret));
if (!fMockDb)
DbEnv((u_int32_t)0).remove(strPath.c_str(), 0);
}
void BerkeleyEnvironment::Reset()
{
dbenv.reset(new DbEnv(DB_CXX_NO_EXCEPTIONS));
fDbEnvInit = false;
fMockDb = false;
}
BerkeleyEnvironment::BerkeleyEnvironment(const fs::path& dir_path) : strPath(dir_path.string())
{
Reset();
}
BerkeleyEnvironment::~BerkeleyEnvironment()
{
Close();
}
bool BerkeleyEnvironment::Open(bool retry)
{
if (fDbEnvInit)
return true;
boost::this_thread::interruption_point();
fs::path pathIn = strPath;
TryCreateDirectories(pathIn);
if (!LockDirectory(pathIn, ".walletlock")) {
LogPrintf("Cannot obtain a lock on wallet directory %s. Another instance of digibyte may be using it.\n", strPath);
return false;
}
fs::path pathLogDir = pathIn / "database";
TryCreateDirectories(pathLogDir);
fs::path pathErrorFile = pathIn / "db.log";
LogPrintf("BerkeleyEnvironment::Open: LogDir=%s ErrorFile=%s\n", pathLogDir.string(), pathErrorFile.string());
unsigned int nEnvFlags = 0;
if (gArgs.GetBoolArg("-privdb", DEFAULT_WALLET_PRIVDB))
nEnvFlags |= DB_PRIVATE;
dbenv->set_lg_dir(pathLogDir.string().c_str());
dbenv->set_cachesize(0, 0x100000, 1); // 1 MiB should be enough for just the wallet
dbenv->set_lg_bsize(0x10000);
dbenv->set_lg_max(1048576);
dbenv->set_lk_max_locks(40000);
dbenv->set_lk_max_objects(40000);
dbenv->set_errfile(fsbridge::fopen(pathErrorFile, "a")); /// debug
dbenv->set_flags(DB_AUTO_COMMIT, 1);
dbenv->set_flags(DB_TXN_WRITE_NOSYNC, 1);
dbenv->log_set_config(DB_LOG_AUTO_REMOVE, 1);
int ret = dbenv->open(strPath.c_str(),
DB_CREATE |
DB_INIT_LOCK |
DB_INIT_LOG |
DB_INIT_MPOOL |
DB_INIT_TXN |
DB_THREAD |
DB_RECOVER |
nEnvFlags,
S_IRUSR | S_IWUSR);
if (ret != 0) {
LogPrintf("BerkeleyEnvironment::Open: Error %d opening database environment: %s\n", ret, DbEnv::strerror(ret));
int ret2 = dbenv->close(0);
if (ret2 != 0) {
LogPrintf("BerkeleyEnvironment::Open: Error %d closing failed database environment: %s\n", ret2, DbEnv::strerror(ret2));
}
Reset();
if (retry) {
// try moving the database env out of the way
fs::path pathDatabaseBak = pathIn / strprintf("database.%d.bak", GetTime());
try {
fs::rename(pathLogDir, pathDatabaseBak);
LogPrintf("Moved old %s to %s. Retrying.\n", pathLogDir.string(), pathDatabaseBak.string());
} catch (const fs::filesystem_error&) {
// failure is ok (well, not really, but it's not worse than what we started with)
}
// try opening it again one more time
if (!Open(false /* retry */)) {
// if it still fails, it probably means we can't even create the database env
return false;
}
} else {
return false;
}
}
fDbEnvInit = true;
fMockDb = false;
return true;
}
void BerkeleyEnvironment::MakeMock()
{
if (fDbEnvInit)
throw std::runtime_error("BerkeleyEnvironment::MakeMock: Already initialized");
boost::this_thread::interruption_point();
LogPrint(BCLog::DB, "BerkeleyEnvironment::MakeMock\n");
dbenv->set_cachesize(1, 0, 1);
dbenv->set_lg_bsize(10485760 * 4);
dbenv->set_lg_max(10485760);
dbenv->set_lk_max_locks(10000);
dbenv->set_lk_max_objects(10000);
dbenv->set_flags(DB_AUTO_COMMIT, 1);
dbenv->log_set_config(DB_LOG_IN_MEMORY, 1);
int ret = dbenv->open(nullptr,
DB_CREATE |
DB_INIT_LOCK |
DB_INIT_LOG |
DB_INIT_MPOOL |
DB_INIT_TXN |
DB_THREAD |
DB_PRIVATE,
S_IRUSR | S_IWUSR);
if (ret > 0)
throw std::runtime_error(strprintf("BerkeleyEnvironment::MakeMock: Error %d opening database environment.", ret));
fDbEnvInit = true;
fMockDb = true;
}
BerkeleyEnvironment::VerifyResult BerkeleyEnvironment::Verify(const std::string& strFile, recoverFunc_type recoverFunc, std::string& out_backup_filename)
{
LOCK(cs_db);
assert(mapFileUseCount.count(strFile) == 0);
Db db(dbenv.get(), 0);
int result = db.verify(strFile.c_str(), nullptr, nullptr, 0);
if (result == 0)
return VerifyResult::VERIFY_OK;
else if (recoverFunc == nullptr)
return VerifyResult::RECOVER_FAIL;
// Try to recover:
bool fRecovered = (*recoverFunc)(fs::path(strPath) / strFile, out_backup_filename);
return (fRecovered ? VerifyResult::RECOVER_OK : VerifyResult::RECOVER_FAIL);
}
bool BerkeleyBatch::Recover(const fs::path& file_path, void *callbackDataIn, bool (*recoverKVcallback)(void* callbackData, CDataStream ssKey, CDataStream ssValue), std::string& newFilename)
{
std::string filename;
BerkeleyEnvironment* env = GetWalletEnv(file_path, filename);
// Recovery procedure:
// move wallet file to walletfilename.timestamp.bak
// Call Salvage with fAggressive=true to
// get as much data as possible.
// Rewrite salvaged data to fresh wallet file
// Set -rescan so any missing transactions will be
// found.
int64_t now = GetTime();
newFilename = strprintf("%s.%d.bak", filename, now);
int result = env->dbenv->dbrename(nullptr, filename.c_str(), nullptr,
newFilename.c_str(), DB_AUTO_COMMIT);
if (result == 0)
LogPrintf("Renamed %s to %s\n", filename, newFilename);
else
{
LogPrintf("Failed to rename %s to %s\n", filename, newFilename);
return false;
}
std::vector<BerkeleyEnvironment::KeyValPair> salvagedData;
bool fSuccess = env->Salvage(newFilename, true, salvagedData);
if (salvagedData.empty())
{
LogPrintf("Salvage(aggressive) found no records in %s.\n", newFilename);
return false;
}
LogPrintf("Salvage(aggressive) found %u records\n", salvagedData.size());
std::unique_ptr<Db> pdbCopy = MakeUnique<Db>(env->dbenv.get(), 0);
int ret = pdbCopy->open(nullptr, // Txn pointer
filename.c_str(), // Filename
"main", // Logical db name
DB_BTREE, // Database type
DB_CREATE, // Flags
0);
if (ret > 0) {
LogPrintf("Cannot create database file %s\n", filename);
pdbCopy->close(0);
return false;
}
DbTxn* ptxn = env->TxnBegin();
for (BerkeleyEnvironment::KeyValPair& row : salvagedData)
{
if (recoverKVcallback)
{
CDataStream ssKey(row.first, SER_DISK, CLIENT_VERSION);
CDataStream ssValue(row.second, SER_DISK, CLIENT_VERSION);
if (!(*recoverKVcallback)(callbackDataIn, ssKey, ssValue))
continue;
}
Dbt datKey(&row.first[0], row.first.size());
Dbt datValue(&row.second[0], row.second.size());
int ret2 = pdbCopy->put(ptxn, &datKey, &datValue, DB_NOOVERWRITE);
if (ret2 > 0)
fSuccess = false;
}
ptxn->commit(0);
pdbCopy->close(0);
return fSuccess;<|fim▁hole|>}
bool BerkeleyBatch::VerifyEnvironment(const fs::path& file_path, std::string& errorStr)
{
std::string walletFile;
BerkeleyEnvironment* env = GetWalletEnv(file_path, walletFile);
fs::path walletDir = env->Directory();
LogPrintf("Using BerkeleyDB version %s\n", DbEnv::version(0, 0, 0));
LogPrintf("Using wallet %s\n", walletFile);
// Wallet file must be a plain filename without a directory
if (walletFile != fs::basename(walletFile) + fs::extension(walletFile))
{
errorStr = strprintf(_("Wallet %s resides outside wallet directory %s"), walletFile, walletDir.string());
return false;
}
if (!env->Open(true /* retry */)) {
errorStr = strprintf(_("Error initializing wallet database environment %s!"), walletDir);
return false;
}
return true;
}
bool BerkeleyBatch::VerifyDatabaseFile(const fs::path& file_path, std::string& warningStr, std::string& errorStr, BerkeleyEnvironment::recoverFunc_type recoverFunc)
{
std::string walletFile;
BerkeleyEnvironment* env = GetWalletEnv(file_path, walletFile);
fs::path walletDir = env->Directory();
if (fs::exists(walletDir / walletFile))
{
std::string backup_filename;
BerkeleyEnvironment::VerifyResult r = env->Verify(walletFile, recoverFunc, backup_filename);
if (r == BerkeleyEnvironment::VerifyResult::RECOVER_OK)
{
warningStr = strprintf(_("Warning: Wallet file corrupt, data salvaged!"
" Original %s saved as %s in %s; if"
" your balance or transactions are incorrect you should"
" restore from a backup."),
walletFile, backup_filename, walletDir);
}
if (r == BerkeleyEnvironment::VerifyResult::RECOVER_FAIL)
{
errorStr = strprintf(_("%s corrupt, salvage failed"), walletFile);
return false;
}
}
// also return true if files does not exists
return true;
}
/* End of headers, beginning of key/value data */
static const char *HEADER_END = "HEADER=END";
/* End of key/value data */
static const char *DATA_END = "DATA=END";
bool BerkeleyEnvironment::Salvage(const std::string& strFile, bool fAggressive, std::vector<BerkeleyEnvironment::KeyValPair>& vResult)
{
LOCK(cs_db);
assert(mapFileUseCount.count(strFile) == 0);
u_int32_t flags = DB_SALVAGE;
if (fAggressive)
flags |= DB_AGGRESSIVE;
std::stringstream strDump;
Db db(dbenv.get(), 0);
int result = db.verify(strFile.c_str(), nullptr, &strDump, flags);
if (result == DB_VERIFY_BAD) {
LogPrintf("BerkeleyEnvironment::Salvage: Database salvage found errors, all data may not be recoverable.\n");
if (!fAggressive) {
LogPrintf("BerkeleyEnvironment::Salvage: Rerun with aggressive mode to ignore errors and continue.\n");
return false;
}
}
if (result != 0 && result != DB_VERIFY_BAD) {
LogPrintf("BerkeleyEnvironment::Salvage: Database salvage failed with result %d.\n", result);
return false;
}
// Format of bdb dump is ascii lines:
// header lines...
// HEADER=END
// hexadecimal key
// hexadecimal value
// ... repeated
// DATA=END
std::string strLine;
while (!strDump.eof() && strLine != HEADER_END)
getline(strDump, strLine); // Skip past header
std::string keyHex, valueHex;
while (!strDump.eof() && keyHex != DATA_END) {
getline(strDump, keyHex);
if (keyHex != DATA_END) {
if (strDump.eof())
break;
getline(strDump, valueHex);
if (valueHex == DATA_END) {
LogPrintf("BerkeleyEnvironment::Salvage: WARNING: Number of keys in data does not match number of values.\n");
break;
}
vResult.push_back(make_pair(ParseHex(keyHex), ParseHex(valueHex)));
}
}
if (keyHex != DATA_END) {
LogPrintf("BerkeleyEnvironment::Salvage: WARNING: Unexpected end of file while reading salvage output.\n");
return false;
}
return (result == 0);
}
void BerkeleyEnvironment::CheckpointLSN(const std::string& strFile)
{
dbenv->txn_checkpoint(0, 0, 0);
if (fMockDb)
return;
dbenv->lsn_reset(strFile.c_str(), 0);
}
BerkeleyBatch::BerkeleyBatch(BerkeleyDatabase& database, const char* pszMode, bool fFlushOnCloseIn) : pdb(nullptr), activeTxn(nullptr)
{
fReadOnly = (!strchr(pszMode, '+') && !strchr(pszMode, 'w'));
fFlushOnClose = fFlushOnCloseIn;
env = database.env;
if (database.IsDummy()) {
return;
}
const std::string &strFilename = database.strFile;
bool fCreate = strchr(pszMode, 'c') != nullptr;
unsigned int nFlags = DB_THREAD;
if (fCreate)
nFlags |= DB_CREATE;
{
LOCK(cs_db);
if (!env->Open(false /* retry */))
throw std::runtime_error("BerkeleyBatch: Failed to open database environment.");
pdb = env->mapDb[strFilename];
if (pdb == nullptr) {
int ret;
std::unique_ptr<Db> pdb_temp = MakeUnique<Db>(env->dbenv.get(), 0);
bool fMockDb = env->IsMock();
if (fMockDb) {
DbMpoolFile* mpf = pdb_temp->get_mpf();
ret = mpf->set_flags(DB_MPOOL_NOFILE, 1);
if (ret != 0) {
throw std::runtime_error(strprintf("BerkeleyBatch: Failed to configure for no temp file backing for database %s", strFilename));
}
}
ret = pdb_temp->open(nullptr, // Txn pointer
fMockDb ? nullptr : strFilename.c_str(), // Filename
fMockDb ? strFilename.c_str() : "main", // Logical db name
DB_BTREE, // Database type
nFlags, // Flags
0);
if (ret != 0) {
throw std::runtime_error(strprintf("BerkeleyBatch: Error %d, can't open database %s", ret, strFilename));
}
// Call CheckUniqueFileid on the containing BDB environment to
// avoid BDB data consistency bugs that happen when different data
// files in the same environment have the same fileid.
//
// Also call CheckUniqueFileid on all the other g_dbenvs to prevent
// digibyte from opening the same data file through another
// environment when the file is referenced through equivalent but
// not obviously identical symlinked or hard linked or bind mounted
// paths. In the future a more relaxed check for equal inode and
// device ids could be done instead, which would allow opening
// different backup copies of a wallet at the same time. Maybe even
// more ideally, an exclusive lock for accessing the database could
// be implemented, so no equality checks are needed at all. (Newer
// versions of BDB have an set_lk_exclusive method for this
// purpose, but the older version we use does not.)
for (auto& env : g_dbenvs) {
CheckUniqueFileid(env.second, strFilename, *pdb_temp);
}
pdb = pdb_temp.release();
env->mapDb[strFilename] = pdb;
if (fCreate && !Exists(std::string("version"))) {
bool fTmp = fReadOnly;
fReadOnly = false;
WriteVersion(CLIENT_VERSION);
fReadOnly = fTmp;
}
}
++env->mapFileUseCount[strFilename];
strFile = strFilename;
}
}
void BerkeleyBatch::Flush()
{
if (activeTxn)
return;
// Flush database activity from memory pool to disk log
unsigned int nMinutes = 0;
if (fReadOnly)
nMinutes = 1;
env->dbenv->txn_checkpoint(nMinutes ? gArgs.GetArg("-dblogsize", DEFAULT_WALLET_DBLOGSIZE) * 1024 : 0, nMinutes, 0);
}
void BerkeleyDatabase::IncrementUpdateCounter()
{
++nUpdateCounter;
}
void BerkeleyBatch::Close()
{
if (!pdb)
return;
if (activeTxn)
activeTxn->abort();
activeTxn = nullptr;
pdb = nullptr;
if (fFlushOnClose)
Flush();
{
LOCK(cs_db);
--env->mapFileUseCount[strFile];
}
}
void BerkeleyEnvironment::CloseDb(const std::string& strFile)
{
{
LOCK(cs_db);
if (mapDb[strFile] != nullptr) {
// Close the database handle
Db* pdb = mapDb[strFile];
pdb->close(0);
delete pdb;
mapDb[strFile] = nullptr;
}
}
}
bool BerkeleyBatch::Rewrite(BerkeleyDatabase& database, const char* pszSkip)
{
if (database.IsDummy()) {
return true;
}
BerkeleyEnvironment *env = database.env;
const std::string& strFile = database.strFile;
while (true) {
{
LOCK(cs_db);
if (!env->mapFileUseCount.count(strFile) || env->mapFileUseCount[strFile] == 0) {
// Flush log data to the dat file
env->CloseDb(strFile);
env->CheckpointLSN(strFile);
env->mapFileUseCount.erase(strFile);
bool fSuccess = true;
LogPrintf("BerkeleyBatch::Rewrite: Rewriting %s...\n", strFile);
std::string strFileRes = strFile + ".rewrite";
{ // surround usage of db with extra {}
BerkeleyBatch db(database, "r");
std::unique_ptr<Db> pdbCopy = MakeUnique<Db>(env->dbenv.get(), 0);
int ret = pdbCopy->open(nullptr, // Txn pointer
strFileRes.c_str(), // Filename
"main", // Logical db name
DB_BTREE, // Database type
DB_CREATE, // Flags
0);
if (ret > 0) {
LogPrintf("BerkeleyBatch::Rewrite: Can't create database file %s\n", strFileRes);
fSuccess = false;
}
Dbc* pcursor = db.GetCursor();
if (pcursor)
while (fSuccess) {
CDataStream ssKey(SER_DISK, CLIENT_VERSION);
CDataStream ssValue(SER_DISK, CLIENT_VERSION);
int ret1 = db.ReadAtCursor(pcursor, ssKey, ssValue);
if (ret1 == DB_NOTFOUND) {
pcursor->close();
break;
} else if (ret1 != 0) {
pcursor->close();
fSuccess = false;
break;
}
if (pszSkip &&
strncmp(ssKey.data(), pszSkip, std::min(ssKey.size(), strlen(pszSkip))) == 0)
continue;
if (strncmp(ssKey.data(), "\x07version", 8) == 0) {
// Update version:
ssValue.clear();
ssValue << CLIENT_VERSION;
}
Dbt datKey(ssKey.data(), ssKey.size());
Dbt datValue(ssValue.data(), ssValue.size());
int ret2 = pdbCopy->put(nullptr, &datKey, &datValue, DB_NOOVERWRITE);
if (ret2 > 0)
fSuccess = false;
}
if (fSuccess) {
db.Close();
env->CloseDb(strFile);
if (pdbCopy->close(0))
fSuccess = false;
} else {
pdbCopy->close(0);
}
}
if (fSuccess) {
Db dbA(env->dbenv.get(), 0);
if (dbA.remove(strFile.c_str(), nullptr, 0))
fSuccess = false;
Db dbB(env->dbenv.get(), 0);
if (dbB.rename(strFileRes.c_str(), nullptr, strFile.c_str(), 0))
fSuccess = false;
}
if (!fSuccess)
LogPrintf("BerkeleyBatch::Rewrite: Failed to rewrite database file %s\n", strFileRes);
return fSuccess;
}
}
MilliSleep(100);
}
}
void BerkeleyEnvironment::Flush(bool fShutdown)
{
int64_t nStart = GetTimeMillis();
// Flush log data to the actual data file on all files that are not in use
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: Flush(%s)%s\n", fShutdown ? "true" : "false", fDbEnvInit ? "" : " database not started");
if (!fDbEnvInit)
return;
{
LOCK(cs_db);
std::map<std::string, int>::iterator mi = mapFileUseCount.begin();
while (mi != mapFileUseCount.end()) {
std::string strFile = (*mi).first;
int nRefCount = (*mi).second;
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: Flushing %s (refcount = %d)...\n", strFile, nRefCount);
if (nRefCount == 0) {
// Move log data to the dat file
CloseDb(strFile);
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: %s checkpoint\n", strFile);
dbenv->txn_checkpoint(0, 0, 0);
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: %s detach\n", strFile);
if (!fMockDb)
dbenv->lsn_reset(strFile.c_str(), 0);
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: %s closed\n", strFile);
mapFileUseCount.erase(mi++);
} else
mi++;
}
LogPrint(BCLog::DB, "BerkeleyEnvironment::Flush: Flush(%s)%s took %15dms\n", fShutdown ? "true" : "false", fDbEnvInit ? "" : " database not started", GetTimeMillis() - nStart);
if (fShutdown) {
char** listp;
if (mapFileUseCount.empty()) {
dbenv->log_archive(&listp, DB_ARCH_REMOVE);
Close();
if (!fMockDb) {
fs::remove_all(fs::path(strPath) / "database");
}
g_dbenvs.erase(strPath);
}
}
}
}
bool BerkeleyBatch::PeriodicFlush(BerkeleyDatabase& database)
{
if (database.IsDummy()) {
return true;
}
bool ret = false;
BerkeleyEnvironment *env = database.env;
const std::string& strFile = database.strFile;
TRY_LOCK(cs_db, lockDb);
if (lockDb)
{
// Don't do this if any databases are in use
int nRefCount = 0;
std::map<std::string, int>::iterator mit = env->mapFileUseCount.begin();
while (mit != env->mapFileUseCount.end())
{
nRefCount += (*mit).second;
mit++;
}
if (nRefCount == 0)
{
boost::this_thread::interruption_point();
std::map<std::string, int>::iterator mi = env->mapFileUseCount.find(strFile);
if (mi != env->mapFileUseCount.end())
{
LogPrint(BCLog::DB, "Flushing %s\n", strFile);
int64_t nStart = GetTimeMillis();
// Flush wallet file so it's self contained
env->CloseDb(strFile);
env->CheckpointLSN(strFile);
env->mapFileUseCount.erase(mi++);
LogPrint(BCLog::DB, "Flushed %s %dms\n", strFile, GetTimeMillis() - nStart);
ret = true;
}
}
}
return ret;
}
bool BerkeleyDatabase::Rewrite(const char* pszSkip)
{
return BerkeleyBatch::Rewrite(*this, pszSkip);
}
bool BerkeleyDatabase::Backup(const std::string& strDest)
{
if (IsDummy()) {
return false;
}
while (true)
{
{
LOCK(cs_db);
if (!env->mapFileUseCount.count(strFile) || env->mapFileUseCount[strFile] == 0)
{
// Flush log data to the dat file
env->CloseDb(strFile);
env->CheckpointLSN(strFile);
env->mapFileUseCount.erase(strFile);
// Copy wallet file
fs::path pathSrc = env->Directory() / strFile;
fs::path pathDest(strDest);
if (fs::is_directory(pathDest))
pathDest /= strFile;
try {
if (fs::equivalent(pathSrc, pathDest)) {
LogPrintf("cannot backup to wallet source file %s\n", pathDest.string());
return false;
}
fs::copy_file(pathSrc, pathDest, fs::copy_option::overwrite_if_exists);
LogPrintf("copied %s to %s\n", strFile, pathDest.string());
return true;
} catch (const fs::filesystem_error& e) {
LogPrintf("error copying %s to %s - %s\n", strFile, pathDest.string(), e.what());
return false;
}
}
}
MilliSleep(100);
}
}
void BerkeleyDatabase::Flush(bool shutdown)
{
if (!IsDummy()) {
env->Flush(shutdown);
if (shutdown) env = nullptr;
}
}<|fim▁end|> | |
<|file_name|>test_context.py<|end_file_name|><|fim▁begin|>import pytest
import time
from v8py import JavaScriptTerminated, current_context, new
def test_glob(context):
context.eval('foo = "bar"')
assert context.glob.foo == 'bar'
def test_getattr(context):
context.foo = 'bar'
assert context.foo == 'bar'
assert context.glob.foo == 'bar'
assert context.eval('foo') == 'bar'
def test_getitem(context):
context['foo'] = 'bar'
assert context['foo'] == 'bar'
assert context.glob['foo'] == 'bar'
assert context.eval('foo') == 'bar'
def test_timeout(context):
with pytest.raises(JavaScriptTerminated):
context.eval('for(;;) {}', timeout=0.1)
def test_timeout_property(context_with_timeout):
assert context_with_timeout.timeout == 0.1
start = time.time()
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
diff = time.time() - start
assert diff >= 0.1 and diff < 0.2
context_with_timeout.timeout = 0.25
assert context_with_timeout.timeout == 0.25
start = time.time()
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
diff = time.time() - start
assert diff >= 0.25 and diff < 0.3
def test_timeout_context_level(context_with_timeout):
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
def test_timeout_new(context_with_timeout):
context_with_timeout.eval('function Freeze() { while(true); }')
with pytest.raises(JavaScriptTerminated):
new(context_with_timeout.glob.Freeze)
def test_timeout_call(context_with_timeout):
context_with_timeout.eval('function freeze() { while(true); }')
with pytest.raises(JavaScriptTerminated):
context_with_timeout.glob.freeze()
def test_timeout_proxy(context_with_timeout):
context_with_timeout.eval("""
user = {};
user.testA = 0;
user.testC = 10;
proxy = new Proxy(user, {
get(target, prop) {
if (prop == "testA") while(true);
},
set(target, prop, value) {<|fim▁hole|> if (phrase == "testC") while(true);
return false;
}
});
""")
proxy = context_with_timeout.glob.proxy
with pytest.raises(JavaScriptTerminated):
testA = proxy.testA
with pytest.raises(JavaScriptTerminated):
proxy.testB = 5
with pytest.raises(JavaScriptTerminated):
del proxy.testC
def test_expose(context):
def f(): return 'f'
def g(): return 'g'
context.expose(f, g, h=f)
assert context.eval('f()') == 'f'
assert context.eval('g()') == 'g'
assert context.eval('h()') == 'f'
def f(): pass
def test_expose_module(context):
import test_context
context.expose_module(test_context)
assert context.eval('f()') is None
def test_current_context(context):
assert current_context() is None
def f():
assert current_context() is context
context.expose(f)
context.eval('f()')<|fim▁end|> | if (prop == "testB") while(true);
return false;
},
deleteProperty(target, phrase) { |
<|file_name|>sass.js<|end_file_name|><|fim▁begin|>'use strict';
// Gulp & plugins<|fim▁hole|>
// BrowerSync
var browserSync = require('browser-sync');
// Utilities
var handleErrors = require('../util/handleErrors');
// Configs
var config = require('../config').sass;
gulp.task('sass', function () {
return gulp.src(config.src)
.pipe(sass(config.settings))
.on('error', handleErrors)
.pipe(autoprefixer({browsers: ['last 2 version']}))
.pipe(gulp.dest(config.dest))
.pipe(browserSync.reload({stream: true}));
});<|fim▁end|> | var gulp = require('gulp');
var autoprefixer = require('gulp-autoprefixer');
var sass = require('gulp-sass'); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.utils.translation import ugettext_noop as _
from geonode.notifications_helper import NotificationsAppConfigBase
class PeopleAppConfig(NotificationsAppConfigBase):
name = 'geonode.people'
NOTIFICATIONS = (("user_follow", _("User following you"), _("Another user has started following you"),),
("account_approve", _("User requested access"),
_("A new user has requested access to the site"),),
("account_active", _("Account activated"),
_("This account is now active and can log in the site"),),
)<|fim▁hole|>
default_app_config = 'geonode.people.PeopleAppConfig'<|fim▁end|> |
def ready(self):
super(PeopleAppConfig, self).ready() |
<|file_name|>test_middleware.py<|end_file_name|><|fim▁begin|>import warnings<|fim▁hole|>from honeybadger.middleware import DjangoHoneybadgerMiddleware
__all__ = ['MiddlewareTestCase']
class MiddlewareTestCase(DjangoMiddlewareTestCase):
def test_middleware_import_warning(self):
default_plugin_manager._registered = OrderedDict()
with warnings.catch_warnings(record=True) as w:
middleware = DjangoHoneybadgerMiddleware()
assert len(w) == 1
assert issubclass(w[-1].category, FutureWarning)
assert "moved" in str(w[-1].message)<|fim▁end|> | from collections import OrderedDict
from honeybadger.plugins import default_plugin_manager
from .contrib.test_django import DjangoMiddlewareTestCase |
<|file_name|>time.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::time::Duration;
#[test]
fn creation() {
assert!(Duration::from_secs(1) != Duration::from_secs(0));
assert_eq!(Duration::from_secs(1) + Duration::from_secs(2),
Duration::from_secs(3));
assert_eq!(Duration::from_millis(10) + Duration::from_secs(4),
Duration::new(4, 10 * 1_000_000));
assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
}
#[test]
fn secs() {
assert_eq!(Duration::new(0, 0).as_secs(), 0);
assert_eq!(Duration::new(0, 500_000_005).as_secs(), 0);
assert_eq!(Duration::new(0, 1_050_000_001).as_secs(), 1);
assert_eq!(Duration::from_secs(1).as_secs(), 1);
assert_eq!(Duration::from_millis(999).as_secs(), 0);
assert_eq!(Duration::from_millis(1001).as_secs(), 1);
assert_eq!(Duration::from_micros(999_999).as_secs(), 0);
assert_eq!(Duration::from_micros(1_000_001).as_secs(), 1);
assert_eq!(Duration::from_nanos(999_999_999).as_secs(), 0);
assert_eq!(Duration::from_nanos(1_000_000_001).as_secs(), 1);
}
#[test]
fn millis() {
assert_eq!(Duration::new(0, 0).subsec_millis(), 0);
assert_eq!(Duration::new(0, 500_000_005).subsec_millis(), 500);
assert_eq!(Duration::new(0, 1_050_000_001).subsec_millis(), 50);
assert_eq!(Duration::from_secs(1).subsec_millis(), 0);
assert_eq!(Duration::from_millis(999).subsec_millis(), 999);
assert_eq!(Duration::from_millis(1001).subsec_millis(), 1);
assert_eq!(Duration::from_micros(999_999).subsec_millis(), 999);
assert_eq!(Duration::from_micros(1_001_000).subsec_millis(), 1);
assert_eq!(Duration::from_nanos(999_999_999).subsec_millis(), 999);
assert_eq!(Duration::from_nanos(1_001_000_000).subsec_millis(), 1);
}
#[test]
fn micros() {
assert_eq!(Duration::new(0, 0).subsec_micros(), 0);
assert_eq!(Duration::new(0, 500_000_005).subsec_micros(), 500_000);
assert_eq!(Duration::new(0, 1_050_000_001).subsec_micros(), 50_000);
assert_eq!(Duration::from_secs(1).subsec_micros(), 0);
assert_eq!(Duration::from_millis(999).subsec_micros(), 999_000);
assert_eq!(Duration::from_millis(1001).subsec_micros(), 1_000);
assert_eq!(Duration::from_micros(999_999).subsec_micros(), 999_999);
assert_eq!(Duration::from_micros(1_000_001).subsec_micros(), 1);
assert_eq!(Duration::from_nanos(999_999_999).subsec_micros(), 999_999);
assert_eq!(Duration::from_nanos(1_000_001_000).subsec_micros(), 1);
}
#[test]
fn nanos() {
assert_eq!(Duration::new(0, 0).subsec_nanos(), 0);
assert_eq!(Duration::new(0, 5).subsec_nanos(), 5);
assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1);
assert_eq!(Duration::from_secs(1).subsec_nanos(), 0);
assert_eq!(Duration::from_millis(999).subsec_nanos(), 999_000_000);
assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1_000_000);
assert_eq!(Duration::from_micros(999_999).subsec_nanos(), 999_999_000);
assert_eq!(Duration::from_micros(1_000_001).subsec_nanos(), 1000);
assert_eq!(Duration::from_nanos(999_999_999).subsec_nanos(), 999_999_999);
assert_eq!(Duration::from_nanos(1_000_000_001).subsec_nanos(), 1);
}
#[test]
fn add() {
assert_eq!(Duration::new(0, 0) + Duration::new(0, 1),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001),
Duration::new(1, 1));
}
#[test]
fn checked_add() {
assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)),
Some(Duration::new(0, 1)));
assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)),
Some(Duration::new(1, 1)));
assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::core::u64::MAX, 0)), None);
}
#[test]
fn sub() {
assert_eq!(Duration::new(0, 1) - Duration::new(0, 0),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000),
Duration::new(0, 1));
assert_eq!(Duration::new(1, 0) - Duration::new(0, 1),
Duration::new(0, 999_999_999));
}
#[test]
fn checked_sub() {
let zero = Duration::new(0, 0);
let one_nano = Duration::new(0, 1);
let one_sec = Duration::new(1, 0);
assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
assert_eq!(one_sec.checked_sub(one_nano),
Some(Duration::new(0, 999_999_999)));
assert_eq!(zero.checked_sub(one_nano), None);
assert_eq!(zero.checked_sub(one_sec), None);
}
#[test]
#[should_panic]
fn sub_bad1() {
let _ = Duration::new(0, 0) - Duration::new(0, 1);
}
#[test]
#[should_panic]
fn sub_bad2() {
let _ = Duration::new(0, 0) - Duration::new(1, 0);
}
#[test]
fn mul() {
assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2));
assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3));
assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4));
assert_eq!(Duration::new(0, 500_000_001) * 4000,<|fim▁hole|>fn checked_mul() {
assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2)));
assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000),
Some(Duration::new(2000, 4000)));
assert_eq!(Duration::new(::core::u64::MAX - 1, 0).checked_mul(2), None);
}
#[test]
fn div() {
assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0));
assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333));
assert_eq!(Duration::new(99, 999_999_000) / 100,
Duration::new(0, 999_999_990));
}
#[test]
fn checked_div() {
assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
assert_eq!(Duration::new(2, 0).checked_div(0), None);
}
#[test]
fn correct_sum() {
let durations = [
Duration::new(1, 999_999_999),
Duration::new(2, 999_999_999),
Duration::new(0, 999_999_999),
Duration::new(0, 999_999_999),
Duration::new(0, 999_999_999),
Duration::new(5, 0),
];
let sum = durations.iter().sum::<Duration>();
assert_eq!(sum, Duration::new(1+2+5+4, 1_000_000_000 - 5));
}
#[test]
fn debug_formatting_extreme_values() {
assert_eq!(
format!("{:?}", Duration::new(18_446_744_073_709_551_615, 123_456_789)),
"18446744073709551615.123456789s"
);
}
#[test]
fn debug_formatting_secs() {
assert_eq!(format!("{:?}", Duration::new(7, 000_000_000)), "7s");
assert_eq!(format!("{:?}", Duration::new(7, 100_000_000)), "7.1s");
assert_eq!(format!("{:?}", Duration::new(7, 000_010_000)), "7.00001s");
assert_eq!(format!("{:?}", Duration::new(7, 000_000_001)), "7.000000001s");
assert_eq!(format!("{:?}", Duration::new(7, 123_456_789)), "7.123456789s");
assert_eq!(format!("{:?}", Duration::new(88, 000_000_000)), "88s");
assert_eq!(format!("{:?}", Duration::new(88, 100_000_000)), "88.1s");
assert_eq!(format!("{:?}", Duration::new(88, 000_010_000)), "88.00001s");
assert_eq!(format!("{:?}", Duration::new(88, 000_000_001)), "88.000000001s");
assert_eq!(format!("{:?}", Duration::new(88, 123_456_789)), "88.123456789s");
assert_eq!(format!("{:?}", Duration::new(999, 000_000_000)), "999s");
assert_eq!(format!("{:?}", Duration::new(999, 100_000_000)), "999.1s");
assert_eq!(format!("{:?}", Duration::new(999, 000_010_000)), "999.00001s");
assert_eq!(format!("{:?}", Duration::new(999, 000_000_001)), "999.000000001s");
assert_eq!(format!("{:?}", Duration::new(999, 123_456_789)), "999.123456789s");
}
#[test]
fn debug_formatting_millis() {
assert_eq!(format!("{:?}", Duration::new(0, 7_000_000)), "7ms");
assert_eq!(format!("{:?}", Duration::new(0, 7_100_000)), "7.1ms");
assert_eq!(format!("{:?}", Duration::new(0, 7_000_001)), "7.000001ms");
assert_eq!(format!("{:?}", Duration::new(0, 7_123_456)), "7.123456ms");
assert_eq!(format!("{:?}", Duration::new(0, 88_000_000)), "88ms");
assert_eq!(format!("{:?}", Duration::new(0, 88_100_000)), "88.1ms");
assert_eq!(format!("{:?}", Duration::new(0, 88_000_001)), "88.000001ms");
assert_eq!(format!("{:?}", Duration::new(0, 88_123_456)), "88.123456ms");
assert_eq!(format!("{:?}", Duration::new(0, 999_000_000)), "999ms");
assert_eq!(format!("{:?}", Duration::new(0, 999_100_000)), "999.1ms");
assert_eq!(format!("{:?}", Duration::new(0, 999_000_001)), "999.000001ms");
assert_eq!(format!("{:?}", Duration::new(0, 999_123_456)), "999.123456ms");
}
#[test]
fn debug_formatting_micros() {
assert_eq!(format!("{:?}", Duration::new(0, 7_000)), "7µs");
assert_eq!(format!("{:?}", Duration::new(0, 7_100)), "7.1µs");
assert_eq!(format!("{:?}", Duration::new(0, 7_001)), "7.001µs");
assert_eq!(format!("{:?}", Duration::new(0, 7_123)), "7.123µs");
assert_eq!(format!("{:?}", Duration::new(0, 88_000)), "88µs");
assert_eq!(format!("{:?}", Duration::new(0, 88_100)), "88.1µs");
assert_eq!(format!("{:?}", Duration::new(0, 88_001)), "88.001µs");
assert_eq!(format!("{:?}", Duration::new(0, 88_123)), "88.123µs");
assert_eq!(format!("{:?}", Duration::new(0, 999_000)), "999µs");
assert_eq!(format!("{:?}", Duration::new(0, 999_100)), "999.1µs");
assert_eq!(format!("{:?}", Duration::new(0, 999_001)), "999.001µs");
assert_eq!(format!("{:?}", Duration::new(0, 999_123)), "999.123µs");
}
#[test]
fn debug_formatting_nanos() {
assert_eq!(format!("{:?}", Duration::new(0, 0)), "0ns");
assert_eq!(format!("{:?}", Duration::new(0, 1)), "1ns");
assert_eq!(format!("{:?}", Duration::new(0, 88)), "88ns");
assert_eq!(format!("{:?}", Duration::new(0, 999)), "999ns");
}
#[test]
fn debug_formatting_precision_zero() {
assert_eq!(format!("{:.0?}", Duration::new(0, 0)), "0ns");
assert_eq!(format!("{:.0?}", Duration::new(0, 123)), "123ns");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_001)), "1µs");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_499)), "1µs");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_500)), "2µs");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_999)), "2µs");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_000_001)), "1ms");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_499_999)), "1ms");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_500_000)), "2ms");
assert_eq!(format!("{:.0?}", Duration::new(0, 1_999_999)), "2ms");
assert_eq!(format!("{:.0?}", Duration::new(1, 000_000_001)), "1s");
assert_eq!(format!("{:.0?}", Duration::new(1, 499_999_999)), "1s");
assert_eq!(format!("{:.0?}", Duration::new(1, 500_000_000)), "2s");
assert_eq!(format!("{:.0?}", Duration::new(1, 999_999_999)), "2s");
}
#[test]
fn debug_formatting_precision_two() {
assert_eq!(format!("{:.2?}", Duration::new(0, 0)), "0.00ns");
assert_eq!(format!("{:.2?}", Duration::new(0, 123)), "123.00ns");
assert_eq!(format!("{:.2?}", Duration::new(0, 1_000)), "1.00µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 7_001)), "7.00µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 7_100)), "7.10µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 7_109)), "7.11µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 7_199)), "7.20µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 1_999)), "2.00µs");
assert_eq!(format!("{:.2?}", Duration::new(0, 1_000_000)), "1.00ms");
assert_eq!(format!("{:.2?}", Duration::new(0, 3_001_000)), "3.00ms");
assert_eq!(format!("{:.2?}", Duration::new(0, 3_100_000)), "3.10ms");
assert_eq!(format!("{:.2?}", Duration::new(0, 1_999_999)), "2.00ms");
assert_eq!(format!("{:.2?}", Duration::new(1, 000_000_000)), "1.00s");
assert_eq!(format!("{:.2?}", Duration::new(4, 001_000_000)), "4.00s");
assert_eq!(format!("{:.2?}", Duration::new(2, 100_000_000)), "2.10s");
assert_eq!(format!("{:.2?}", Duration::new(2, 104_990_000)), "2.10s");
assert_eq!(format!("{:.2?}", Duration::new(2, 105_000_000)), "2.11s");
assert_eq!(format!("{:.2?}", Duration::new(8, 999_999_999)), "9.00s");
}
#[test]
fn debug_formatting_precision_high() {
assert_eq!(format!("{:.5?}", Duration::new(0, 23_678)), "23.67800µs");
assert_eq!(format!("{:.9?}", Duration::new(1, 000_000_000)), "1.000000000s");
assert_eq!(format!("{:.10?}", Duration::new(4, 001_000_000)), "4.0010000000s");
assert_eq!(format!("{:.20?}", Duration::new(4, 001_000_000)), "4.00100000000000000000s");
}<|fim▁end|> | Duration::new(2000, 4000));
}
#[test] |
<|file_name|>array.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//! Facilities for working with `v8::Array`s.
/// Mutates the `out` argument provided to refer to a newly created `v8::Array`.
pub use neon_sys::Neon_Array_New as new;
/// Gets the length of an `v8::Array`.
pub use neon_sys::Neon_Array_Length as len;<|fim▁end|> | |
<|file_name|>home.component.ts<|end_file_name|><|fim▁begin|>import * as _ from 'lodash';
import { Component } from '@angular/core';
import {HeroesService} from "../../services/heroes-service";
@Component({
selector: 'home',
templateUrl: './home.component.html'
})<|fim▁hole|> constructor(private heroesService: HeroesService) {
this.heroesService.getHeroes();
}
ngOnInit() {
this.heroesService.heroes.subscribe(data => {
if (data) {
this.heroes = _.concat(this.heroes, data);
}
});
}
getHeroes() {
this.heroesService.getHeroes();
}
}<|fim▁end|> | export class HomeComponent {
public heroes = [];
|
<|file_name|>flaskr.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Flaskr
~~~~~~
A microblog example application written as Flask tutorial with
Flask and sqlite3.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from sqlite3 import dbapi2 as sqlite3
from flask import (Flask, request, session,
redirect, url_for, abort,
render_template, flash, _app_ctx_stack)
# configuration
DATABASE = '/tmp/flaskr.db'
# 调试模式
DEBUG = True
# 安全会话
SECRET_KEY = 'development key'
USERNAME = 'admin'
PASSWORD = 'default'
# create our little application :)
# 设置环境变量:Flassk_settings,指向预加载的配置文件
app = Flask(__name__)
app.config.from_object(__name__)
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
def init_db():
"""Creates the database tables."""
# 手动创建应用环境,在with语句的内部,g和app关联,之后自动销毁
with app.app_context():
db = get_db()
# 打开应用提供的资源,从资源所在处打开文件并读取
# 使用游标cursor来执行sql脚本
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
1,config配置对象
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
sqlite_db = sqlite3.connect(app.config['DATABASE'])
sqlite_db.row_factory = sqlite3.Row
top.sqlite_db = sqlite_db
return top.sqlite_db
@app.teardown_appcontext
def close_db_connection(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_db'):
top.sqlite_db.close()
<|fim▁hole|> db = get_db()
cur = db.execute('select title, text from entries order by id desc')
entries = cur.fetchall()
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('insert into entries (title, text) values (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
# 向下一次请求发送提示消息
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
# 如果登录失败,提示错误信息
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
# 删除会话中的提示信息
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
if __name__ == '__main__':
# 初始化数据库
init_db()
# 将当前文件作为一个独立应用来执行
app.run(host='0.0.0.0')<|fim▁end|> |
@app.route('/')
def show_entries():
"""show_entries:显示所有的db条目""" |
<|file_name|>Actioner.java<|end_file_name|><|fim▁begin|>package eu.ehri.project.models.base;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.frames.modules.javahandler.JavaHandler;
import com.tinkerpop.frames.modules.javahandler.JavaHandlerContext;
import eu.ehri.project.definitions.Ontology;<|fim▁hole|> /**
* Fetch a list of Actions for this user in newest-first order.
*
* @return
*/
@JavaHandler
public Iterable<SystemEvent> getActions();
@JavaHandler
public Iterable<SystemEvent> getLatestAction();
/**
* Implementation of complex methods.
*/
abstract class Impl implements JavaHandlerContext<Vertex>, Actioner {
public Iterable<SystemEvent> getLatestAction() {
return frameVertices(gremlin()
.out(Ontology.ACTIONER_HAS_LIFECYCLE_ACTION)
.out(Ontology.ENTITY_HAS_EVENT));
}
public Iterable<SystemEvent> getActions() {
return frameVertices(gremlin().as("n").out(Ontology.ACTIONER_HAS_LIFECYCLE_ACTION)
.loop("n", JavaHandlerUtils.noopLoopFunc, JavaHandlerUtils.noopLoopFunc)
.out(Ontology.ENTITY_HAS_EVENT));
}
}
}<|fim▁end|> | import eu.ehri.project.models.events.SystemEvent;
import eu.ehri.project.models.utils.JavaHandlerUtils;
public interface Actioner extends NamedEntity { |
<|file_name|>files.contribution.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./media/files.contribution';
import URI from 'vs/base/common/uri';
import { ViewletRegistry, Extensions as ViewletExtensions, ViewletDescriptor, ToggleViewletAction } from 'vs/workbench/browser/viewlet';
import nls = require('vs/nls');
import { SyncActionDescriptor } from 'vs/platform/actions/common/actions';
import { Registry } from 'vs/platform/platform';
import { IConfigurationRegistry, Extensions as ConfigurationExtensions } from 'vs/platform/configuration/common/configurationRegistry';
import { IWorkbenchActionRegistry, Extensions as ActionExtensions } from 'vs/workbench/common/actionRegistry';
import { IWorkbenchContributionsRegistry, Extensions as WorkbenchExtensions } from 'vs/workbench/common/contributions';
import { IEditorRegistry, Extensions as EditorExtensions, IEditorInputFactory, EditorInput, IFileEditorInput } from 'vs/workbench/common/editor';
import { AutoSaveConfiguration, SUPPORTED_ENCODINGS } from 'vs/platform/files/common/files';
import { EditorDescriptor } from 'vs/workbench/browser/parts/editor/baseEditor';
import { FILE_EDITOR_INPUT_ID, VIEWLET_ID } from 'vs/workbench/parts/files/common/files';
import { FileEditorTracker } from 'vs/workbench/parts/files/common/editors/fileEditorTracker';
import { SaveErrorHandler } from 'vs/workbench/parts/files/browser/saveErrorHandler';
import { FileEditorInput } from 'vs/workbench/parts/files/common/editors/fileEditorInput';
import { TextFileEditor } from 'vs/workbench/parts/files/browser/editors/textFileEditor';
import { BinaryFileEditor } from 'vs/workbench/parts/files/browser/editors/binaryFileEditor';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { SyncDescriptor, AsyncDescriptor } from 'vs/platform/instantiation/common/descriptors';
import { IKeybindings } from 'vs/platform/keybinding/common/keybinding';
import { IViewletService } from 'vs/workbench/services/viewlet/browser/viewlet';
import { IWorkbenchEditorService } from 'vs/workbench/services/editor/common/editorService';
import { KeyMod, KeyCode } from 'vs/base/common/keyCodes';
import * as platform from 'vs/base/common/platform';
// Viewlet Action
export class OpenExplorerViewletAction extends ToggleViewletAction {
public static ID = VIEWLET_ID;
public static LABEL = nls.localize('showExplorerViewlet', "Show Explorer");
constructor(
id: string,
label: string,
@IViewletService viewletService: IViewletService,
@IWorkbenchEditorService editorService: IWorkbenchEditorService
) {
super(id, label, VIEWLET_ID, viewletService, editorService);
}
}
// Register Viewlet
Registry.as<ViewletRegistry>(ViewletExtensions.Viewlets).registerViewlet(new ViewletDescriptor(
'vs/workbench/parts/files/browser/explorerViewlet',
'ExplorerViewlet',
VIEWLET_ID,
nls.localize('explore', "Explorer"),
'explore',
0
));
Registry.as<ViewletRegistry>(ViewletExtensions.Viewlets).setDefaultViewletId(VIEWLET_ID);
const openViewletKb: IKeybindings = {
primary: KeyMod.CtrlCmd | KeyMod.Shift | KeyCode.KEY_E
};
// Register Action to Open Viewlet
const registry = Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions);
registry.registerWorkbenchAction(
new SyncActionDescriptor(OpenExplorerViewletAction, OpenExplorerViewletAction.ID, OpenExplorerViewletAction.LABEL, openViewletKb),
'View: Show Explorer',
nls.localize('view', "View")
);
// Register file editors
Registry.as<IEditorRegistry>(EditorExtensions.Editors).registerEditor(
new EditorDescriptor(
TextFileEditor.ID, // explicit dependency because we don't want these editors lazy loaded
nls.localize('textFileEditor', "Text File Editor"),
'vs/workbench/parts/files/browser/editors/textFileEditor',
'TextFileEditor'
),
[
new SyncDescriptor<EditorInput>(FileEditorInput)
]
);
Registry.as<IEditorRegistry>(EditorExtensions.Editors).registerEditor(
new EditorDescriptor(
BinaryFileEditor.ID, // explicit dependency because we don't want these editors lazy loaded
nls.localize('binaryFileEditor', "Binary File Editor"),
'vs/workbench/parts/files/browser/editors/binaryFileEditor',
'BinaryFileEditor'
),
[
new SyncDescriptor<EditorInput>(FileEditorInput)
]
);
// Register default file input handler
// Note: because of service injection, the descriptor needs to have the exact count
// of arguments as the FileEditorInput constructor. Otherwise when creating an
// instance through the instantiation service he will inject the services wrong!
const descriptor = new AsyncDescriptor<IFileEditorInput>('vs/workbench/parts/files/common/editors/fileEditorInput', 'FileEditorInput', /* DO NOT REMOVE */ void 0, /* DO NOT REMOVE */ void 0);
Registry.as<IEditorRegistry>(EditorExtensions.Editors).registerDefaultFileInput(descriptor);
interface ISerializedFileInput {
resource: string;
}
// Register Editor Input Factory
class FileEditorInputFactory implements IEditorInputFactory {
constructor() { }
public serialize(editorInput: EditorInput): string {
const fileEditorInput = <FileEditorInput>editorInput;
const fileInput: ISerializedFileInput = {
resource: fileEditorInput.getResource().toString()
};
return JSON.stringify(fileInput);
}
public deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput {
const fileInput: ISerializedFileInput = JSON.parse(serializedEditorInput);
return instantiationService.createInstance(FileEditorInput, URI.parse(fileInput.resource), void 0);
}
}
Registry.as<IEditorRegistry>(EditorExtensions.Editors).registerEditorInputFactory(FILE_EDITOR_INPUT_ID, FileEditorInputFactory);
// Register File Editor Tracker
Registry.as<IWorkbenchContributionsRegistry>(WorkbenchExtensions.Workbench).registerWorkbenchContribution(
FileEditorTracker
);
// Register Save Error Handler
Registry.as<IWorkbenchContributionsRegistry>(WorkbenchExtensions.Workbench).registerWorkbenchContribution(
SaveErrorHandler
);
// Configuration
const configurationRegistry = Registry.as<IConfigurationRegistry>(ConfigurationExtensions.Configuration);
configurationRegistry.registerConfiguration({
'id': 'files',
'order': 9,
'title': nls.localize('filesConfigurationTitle', "Files"),
'type': 'object',
'properties': {
'files.exclude': {
'type': 'object',
'description': nls.localize('exclude', "Configure glob patterns for excluding files and folders."),
'default': { '**/.git': true, '**/.svn': true, '**/.hg': true, '**/.DS_Store': true },
'additionalProperties': {
'anyOf': [
{
'type': 'boolean',
'description': nls.localize('files.exclude.boolean', "The glob pattern to match file paths against. Set to true or false to enable or disable the pattern."),
},
{
'type': 'object',
'properties': {
'when': {
'type': 'string', // expression ({ "**/*.js": { "when": "$(basename).js" } })
'pattern': '\\w*\\$\\(basename\\)\\w*',
'default': '$(basename).ext',
'description': nls.localize('files.exclude.when', 'Additional check on the siblings of a matching file. Use $(basename) as variable for the matching file name.')
}
}
}
]
}
},
'files.associations': {
'type': 'object',
'description': nls.localize('associations', "Configure file associations to languages (e.g. \"*.extension\": \"html\"). These have precedence over the default associations of the languages installed."),
},
'files.encoding': {
'type': 'string',
'enum': Object.keys(SUPPORTED_ENCODINGS),
'default': 'utf8',
'description': nls.localize('encoding', "The default character set encoding to use when reading and writing files."),
},
'files.eol': {
'type': 'string',
'enum': [
'\n',
'\r\n'
],
'default': (platform.isLinux || platform.isMacintosh) ? '\n' : '\r\n',
'description': nls.localize('eol', "The default end of line character."),
},
'files.trimTrailingWhitespace': {
'type': 'boolean',
'default': false,
'description': nls.localize('trimTrailingWhitespace', "When enabled, will trim trailing whitespace when saving a file.")
},
'files.insertFinalNewline': {
'type': 'boolean',
'default': false,<|fim▁hole|> 'type': 'string',
'enum': [AutoSaveConfiguration.OFF, AutoSaveConfiguration.AFTER_DELAY, AutoSaveConfiguration.ON_FOCUS_CHANGE, , AutoSaveConfiguration.ON_WINDOW_CHANGE],
'default': AutoSaveConfiguration.OFF,
'description': nls.localize('autoSave', "Controls auto save of dirty files. Accepted values: \"{0}\", \"{1}\", \"{2}\" (editor loses focus), \"{3}\" (window loses focus). If set to \"{4}\", you can configure the delay in \"files.autoSaveDelay\".", AutoSaveConfiguration.OFF, AutoSaveConfiguration.AFTER_DELAY, AutoSaveConfiguration.ON_FOCUS_CHANGE, AutoSaveConfiguration.ON_WINDOW_CHANGE, AutoSaveConfiguration.AFTER_DELAY)
},
'files.autoSaveDelay': {
'type': 'number',
'default': 1000,
'description': nls.localize('autoSaveDelay', "Controls the delay in ms after which a dirty file is saved automatically. Only applies when \"files.autoSave\" is set to \"{0}\"", AutoSaveConfiguration.AFTER_DELAY)
},
'files.watcherExclude': {
'type': 'object',
'default': (platform.isLinux || platform.isMacintosh) ? { '**/.git/objects/**': true, '**/node_modules/**': true } : { '**/.git/objects/**': true },
'description': nls.localize('watcherExclude', "Configure glob patterns of file paths to exclude from file watching. Changing this setting requires a restart. When you experience Code consuming lots of cpu time on startup, you can exclude large folders to reduce the initial load.")
},
'files.hotExit': {
'type': 'boolean',
'default': true,
'description': nls.localize('hotExit', "Controls whether unsaved files are restored after relaunching. If this is enabled there will be no prompt to save when exiting the editor.")
}
}
});
configurationRegistry.registerConfiguration({
id: 'editor',
order: 5,
title: nls.localize('editorConfigurationTitle', "Editor"),
type: 'object',
properties: {
'editor.formatOnSave': {
'type': 'boolean',
'default': false,
'description': nls.localize('formatOnSave', "Format a file on save. A formatter must be available, the file must not be auto-saved, and editor must not be shutting down.")
}
}
});
configurationRegistry.registerConfiguration({
'id': 'explorer',
'order': 10,
'title': nls.localize('explorerConfigurationTitle', "File Explorer"),
'type': 'object',
'properties': {
'explorer.openEditors.visible': {
'type': 'number',
'description': nls.localize({ key: 'openEditorsVisible', comment: ['Open is an adjective'] }, "Number of editors shown in the Open Editors pane. Set it to 0 to hide the pane."),
'default': 9
},
'explorer.openEditors.dynamicHeight': {
'type': 'boolean',
'description': nls.localize({ key: 'dynamicHeight', comment: ['Open is an adjective'] }, "Controls if the height of the open editors section should adapt dynamically to the number of elements or not."),
'default': true
},
'explorer.autoReveal': {
'type': 'boolean',
'description': nls.localize('autoReveal', "Controls if the explorer should automatically reveal files when opening them."),
'default': true
},
'explorer.enableDragAndDrop': {
'type': 'boolean',
'description': nls.localize('enableDragAndDrop', "Controls if the explorer should allow to move files and folders via drag and drop."),
'default': true
}
}
});<|fim▁end|> | 'description': nls.localize('insertFinalNewline', "When enabled, insert a final new line at the end of the file when saving it.")
},
'files.autoSave': { |
<|file_name|>webpack.prod.js<|end_file_name|><|fim▁begin|>/**
* @author: @AngularClass
*/
const helpers = require('./helpers');
const webpackMerge = require('webpack-merge'); // used to merge webpack configs
const commonConfig = require('./webpack.common.js'); // the settings that are common to prod and dev
/**
* Webpack Plugins
*/
const DefinePlugin = require('webpack/lib/DefinePlugin');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const IgnorePlugin = require('webpack/lib/IgnorePlugin');
const LoaderOptionsPlugin = require('webpack/lib/LoaderOptionsPlugin');
const NormalModuleReplacementPlugin = require('webpack/lib/NormalModuleReplacementPlugin');
const ProvidePlugin = require('webpack/lib/ProvidePlugin');
const UglifyJsPlugin = require('webpack/lib/optimize/UglifyJsPlugin');
const OptimizeJsPlugin = require('optimize-js-plugin');
/**
* Webpack Constants
*/
const ENV = process.env.NODE_ENV = process.env.ENV = 'production';
const HOST = process.env.HOST || 'localhost';
const PORT = process.env.PORT || 8080;
const METADATA = webpackMerge(commonConfig({
env: ENV
}).metadata, {
host: HOST,
port: PORT,
ENV: ENV,
HMR: false
});
module.exports = function (env) {
return webpackMerge(commonConfig({
env: ENV
}), {
/**
* Developer tool to enhance debugging
*
* See: http://webpack.github.io/docs/configuration.html#devtool
* See: https://github.com/webpack/docs/wiki/build-performance#sourcemaps
*/
devtool: 'source-map',
/**
* Options affecting the output of the compilation.
*
* See: http://webpack.github.io/docs/configuration.html#output
*/
output: {
/**
* The output directory as absolute path (required).
*
* See: http://webpack.github.io/docs/configuration.html#output-path
*/
path: helpers.root('dist'),
/**
* Specifies the name of each output file on disk.
* IMPORTANT: You must not specify an absolute path here!
*
* See: http://webpack.github.io/docs/configuration.html#output-filename
*/
filename: '[name].[chunkhash].bundle.js',
/**
* The filename of the SourceMaps for the JavaScript files.
* They are inside the output.path directory.
*
* See: http://webpack.github.io/docs/configuration.html#output-sourcemapfilename
*/
sourceMapFilename: '[name].[chunkhash].bundle.map',
/**
* The filename of non-entry chunks as relative path
* inside the output.path directory.
*
* See: http://webpack.github.io/docs/configuration.html#output-chunkfilename
*/
chunkFilename: '[id].[chunkhash].chunk.js'
},
module: {
rules: [
/*
* Extract CSS files from .src/styles directory to external CSS file
*/
{
test: /\.css$/,
loader: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: 'css-loader'
}),
include: [helpers.root('src', 'styles')]
},
/*
* Extract and compile SCSS files from .src/styles directory to external CSS file
*/
<|fim▁hole|> use: 'css-loader!sass-loader'
}),
include: [helpers.root('src', 'styles')]
},
]
},
/**
* Add additional plugins to the compiler.
*
* See: http://webpack.github.io/docs/configuration.html#plugins
*/
plugins: [
/**
* Webpack plugin to optimize a JavaScript file for faster initial load
* by wrapping eagerly-invoked functions.
*
* See: https://github.com/vigneshshanmugam/optimize-js-plugin
*/
new OptimizeJsPlugin({
sourceMap: false
}),
/**
* Plugin: ExtractTextPlugin
* Description: Extracts imported CSS files into external stylesheet
*
* See: https://github.com/webpack/extract-text-webpack-plugin
*/
new ExtractTextPlugin('[name].[contenthash].css'),
/**
* Plugin: DefinePlugin
* Description: Define free variables.
* Useful for having development builds with debug logging or adding global constants.
*
* Environment helpers
*
* See: https://webpack.github.io/docs/list-of-plugins.html#defineplugin
*/
// NOTE: when adding more properties make sure you include them in custom-typings.d.ts
new DefinePlugin({
'ENV': JSON.stringify(METADATA.ENV),
'HMR': METADATA.HMR,
'process.env': {
'ENV': JSON.stringify(METADATA.ENV),
'NODE_ENV': JSON.stringify(METADATA.ENV),
'HMR': METADATA.HMR,
}
}),
/**
* Plugin: UglifyJsPlugin
* Description: Minimize all JavaScript output of chunks.
* Loaders are switched into minimizing mode.
*
* See: https://webpack.github.io/docs/list-of-plugins.html#uglifyjsplugin
*/
// NOTE: To debug prod builds uncomment //debug lines and comment //prod lines
new UglifyJsPlugin({
// beautify: true, //debug
// mangle: false, //debug
// dead_code: false, //debug
// unused: false, //debug
// deadCode: false, //debug
// compress: {
// screw_ie8: true,
// keep_fnames: true,
// drop_debugger: false,
// dead_code: false,
// unused: false
// }, // debug
// comments: true, //debug
beautify: false, //prod
output: {
comments: false
}, //prod
mangle: {
screw_ie8: true
}, //prod
compress: {
screw_ie8: true,
warnings: false,
conditionals: true,
unused: true,
comparisons: true,
sequences: true,
dead_code: true,
evaluate: true,
if_return: true,
join_vars: true,
negate_iife: false // we need this for lazy v8
},
}),
/**
* Plugin: NormalModuleReplacementPlugin
* Description: Replace resources that matches resourceRegExp with newResource
*
* See: http://webpack.github.io/docs/list-of-plugins.html#normalmodulereplacementplugin
*/
new NormalModuleReplacementPlugin(
/angular2-hmr/,
helpers.root('config/empty.js')
),
new NormalModuleReplacementPlugin(
/zone\.js(\\|\/)dist(\\|\/)long-stack-trace-zone/,
helpers.root('config/empty.js')
),
// AoT
// new NormalModuleReplacementPlugin(
// /@angular(\\|\/)upgrade/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /@angular(\\|\/)compiler/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /@angular(\\|\/)platform-browser-dynamic/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /dom(\\|\/)debug(\\|\/)ng_probe/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /dom(\\|\/)debug(\\|\/)by/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /src(\\|\/)debug(\\|\/)debug_node/,
// helpers.root('config/empty.js')
// ),
// new NormalModuleReplacementPlugin(
// /src(\\|\/)debug(\\|\/)debug_renderer/,
// helpers.root('config/empty.js')
// ),
/**
* Plugin: CompressionPlugin
* Description: Prepares compressed versions of assets to serve
* them with Content-Encoding
*
* See: https://github.com/webpack/compression-webpack-plugin
*/
// install compression-webpack-plugin
// new CompressionPlugin({
// regExp: /\.css$|\.html$|\.js$|\.map$/,
// threshold: 2 * 1024
// })
/**
* Plugin LoaderOptionsPlugin (experimental)
*
* See: https://gist.github.com/sokra/27b24881210b56bbaff7
*/
new LoaderOptionsPlugin({
minimize: true,
debug: false,
options: {
/**
* Html loader advanced options
*
* See: https://github.com/webpack/html-loader#advanced-options
*/
// TODO: Need to workaround Angular 2's html syntax => #id [bind] (event) *ngFor
htmlLoader: {
minimize: false,
removeAttributeQuotes: false,
caseSensitive: true,
customAttrSurround: [
[/#/, /(?:)/],
[/\*/, /(?:)/],
[/\[?\(?/, /(?:)/]
],
customAttrAssign: [/\)?\]?=/]
},
}
}),
/**
* Plugin: BundleAnalyzerPlugin
* Description: Webpack plugin and CLI utility that represents
* bundle content as convenient interactive zoomable treemap
*
* `npm run build:prod -- --env.analyze` to use
*
* See: https://github.com/th0r/webpack-bundle-analyzer
*/
],
/*
* Include polyfills or mocks for various node stuff
* Description: Node configuration
*
* See: https://webpack.github.io/docs/configuration.html#node
*/
node: {
global: true,
crypto: 'empty',
process: false,
module: false,
clearImmediate: false,
setImmediate: false
}
});
}<|fim▁end|> | {
test: /\.scss$/,
loader: ExtractTextPlugin.extract({
fallback: 'style-loader',
|
<|file_name|>content-view-versions.controller.test.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public
* License as published by the Free Software Foundation; either version
* 2 of the License (GPLv2) or (at your option) any later version.
* There is NO WARRANTY for this software, express or implied,
* including the implied warranties of MERCHANTABILITY,
* NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
* have received a copy of GPLv2 along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
**/
describe('Controller: ContentViewVersionsController', function() {
var $scope
beforeEach(module('Bastion.content-views', 'Bastion.test-mocks'));
beforeEach(inject(function($injector) {
var $controller = $injector.get('$controller'),
ContentView = $injector.get('MockResource').$new(),
translate = function (string) {
return string;
};
$scope = $injector.get('$rootScope').$new();
$scope.contentView = ContentView.get({id: 1});
$scope.reloadVersions = function () {};
$scope.taskTypes = {
promotion: "promotion",
publish: "publish"
};
spyOn($scope, 'reloadVersions');
$controller('ContentViewVersionsController', {
$scope: $scope,
translate: translate
});
}));
it("puts an empty table object on the scope", function() {
expect($scope.table).toBeDefined();
});
it("correctly hide a version's progress", function() {
var version = {active_history: [], task: {state: 'running', progressbar: {type: 'success'}}};
expect($scope.hideProgress(version)).toBe(true);
version = {active_history: [{}], task: {state: 'running', progressbar: {type: 'success'}}};
expect($scope.hideProgress(version)).toBe(false);
version = {active_history: [], task: {state: 'stopped', progressbar: {type: 'success'}}};
expect($scope.hideProgress(version)).toBe(true);
version = {active_history: [{}], task: {state: 'stopped', progressbar: {type: 'error'}}};
expect($scope.hideProgress(version)).toBe(false);
});
it("determines what history text to display", function() {
var version = {active_history: [],
last_event: {environment: {name: 'test'},
task: {label: $scope.taskTypes.promotion}
}};
expect($scope.historyText(version)).toBe("Promoted to test");
version.last_event.task.label = $scope.taskTypes.publish;
expect($scope.historyText(version)).toBe("Published");<|fim▁hole|>});<|fim▁end|> | }); |
<|file_name|>FFTSceneManager.py<|end_file_name|><|fim▁begin|>import numpy as np
from PyQt5.QtGui import QPainterPath, QPen
from PyQt5.QtWidgets import QGraphicsPathItem
from urh import settings
from urh.cythonext import path_creator<|fim▁hole|>from urh.ui.painting.SceneManager import SceneManager
class FFTSceneManager(SceneManager):
def __init__(self, parent, graphic_view=None):
self.peak = []
super().__init__(parent)
self.scene = GridScene(parent=graphic_view)
self.scene.setBackgroundBrush(settings.BGCOLOR)
self.peak_item = self.scene.addPath(QPainterPath(), QPen(settings.PEAK_COLOR, 0)) # type: QGraphicsPathItem
def show_scene_section(self, x1: float, x2: float, subpath_ranges=None, colors=None):
start = int(x1) if x1 > 0 else 0
end = int(x2) if x2 < self.num_samples else self.num_samples
paths = path_creator.create_path(np.log10(self.plot_data), start, end)
self.set_path(paths, colors=None)
try:
if len(self.peak) > 0:
peak_path = path_creator.create_path(np.log10(self.peak), start, end)[0]
self.peak_item.setPath(peak_path)
except RuntimeWarning:
pass
def init_scene(self, draw_grid=True):
self.scene.draw_grid = draw_grid
self.peak = self.plot_data if len(self.peak) < self.num_samples else np.maximum(self.peak, self.plot_data)
self.scene.setSceneRect(0, -5, self.num_samples, 10)
def clear_path(self):
for item in self.scene.items():
if isinstance(item, QGraphicsPathItem) and item != self.peak_item:
self.scene.removeItem(item)
item.setParentItem(None)
del item
def clear_peak(self):
self.peak = []
if self.peak_item:
self.peak_item.setPath(QPainterPath())
def eliminate(self):
super().eliminate()
self.peak = None
self.peak_item = None<|fim▁end|> | from urh.ui.painting.GridScene import GridScene |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* The Furnace namespace
*
* @module furnace
* @class Furnace
* @static
*/
import Validation from 'furnace/packages/furnace-validation';<|fim▁hole|>import I18n from 'furnace/packages/furnace-i18n';
import Forms from 'furnace/packages/furnace-forms';
export default {
/**
*
* @property Forms
* @type Furnace.Forms
*/
Forms : Forms,
/**
* @property I18n
* @type Furnace.I18n
*/
I18n : I18n,
/**
* @property Validation
* @type Furnace.Validation
*/
Validation: Validation
};<|fim▁end|> | |
<|file_name|>comfixed.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, absolute_import
from .script_interface import ScriptInterfaceHelper, script_interface_register
@script_interface_register
class ComFixed(ScriptInterfaceHelper):<|fim▁hole|> the particles after each force calculation. This
keeps the center of mass of the type fixed iff
the total momentum of the type is zero.
Parameters
----------
types : array_like
List of types of which the center of mass
should be fixed.
"""
_so_name = "ComFixed"
_so_creation_policy = "GLOBAL"<|fim▁end|> | """Fix the center of mass of specific types.
Subtracts mass-weighted fraction of the total
force action on all particles of the type from |
<|file_name|>EarlyStoppingLSTM.py<|end_file_name|><|fim▁begin|>from semeval import helper as helper
from semeval.lstms.LSTMModel import LSTMModel
import numpy
from keras.models import Sequential
from keras.layers import Dense, Activation, Bidirectional, LSTM, Dropout
from keras.callbacks import EarlyStopping
class EarlyStoppingLSTM(LSTMModel):
'''Model that can train an LSTM and apply the trainned model to unseen
data. Inherits from LSTMModel.
Instance Arguments:
self._word2vec_model - gensim.models.Word2Vec required as an argument to __init__
self._max_length = 0
self._model = None
public methods:
train - trains a Bi-directional LSTM with dropout and early stopping on
the texts and sentiment values given.
test - Using the trained model saved at self._model will return a list of
sentiment values given the texts in the argument of the method.
'''
def __init__(self, word2vec_model):
super().__init__(word2vec_model)
def fit(self, train_texts, sentiment_values):
'''Given a list of Strings and a list of floats (sentiments) or numpy
array of floats. It will return a trained LSTM model and `save` the model to
self._model for future use using self.test(texts).
The model converts the list of strings into list of numpy matrixs
which has the following dimensions:
length of the longest train text broken down into tokens
by
the vector size of the word2vec model given in the constructor
e.g. 21, 300 if the word2vec model vector size if 300 and the length of
the longest train text in tokens is 21.
For more details on the layers use read the source or after training
visualise using visualise_model function.
'''
super().fit()
max_length = self._set_max_length(train_texts)
vector_length = self._word2vec_model.vector_size
train_vectors = self._text2vector(train_texts)
model = Sequential()
model.add(Dropout(0.5, input_shape=(max_length, vector_length)))
# Output of this layer is of max_length by max_length * 2 dimension
# instead of max_length, vector_length<|fim▁hole|> model.add(Bidirectional(LSTM(max_length, activation='softsign')))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('linear'))
model.compile(loss='mse',
optimizer='rmsprop',
metrics=['cosine_proximity'],
clipvalue=5)
early_stopping = EarlyStopping(monitor='val_loss', patience=10)
model.fit(train_vectors, sentiment_values, validation_split=0.1,
callbacks=[early_stopping] , nb_epoch=100)
return self._set_model(model)<|fim▁end|> | model.add(Bidirectional(LSTM(max_length, activation='softsign',
return_sequences=True)))
model.add(Dropout(0.5)) |
<|file_name|>endpoint.ts<|end_file_name|><|fim▁begin|>import * as _ from 'lodash';
import * as rTracer from 'cls-rtracer';
import { Logger } from 'winston';
import { createServiceConfig } from '@restorecommerce/service-config';
const middlewareClsTracer = rTracer.koaMiddleware({
useHeader: true,
headerName: 'x-request-id'
});
const cfg = createServiceConfig(process.cwd());
const oneOfFieldsConfig = cfg.get('oneOfFields');
/**
* calls each middleware
* @param middleware
*/
export const chainMiddleware = (middleware: any): any => {
return async (request, next: any): Promise<any> => {
let n = next;
if (next) {
for (let i = middleware.length - 1; i >= 0; i -= 1) {
const reqClone = _.clone(request);
Object.assign(request, { req: reqClone }, { res: reqClone });
const result = await middleware[i](request, async () => {
const grpcRequest = { request: request.request };
delete grpcRequest.request.headers;
return await next(grpcRequest);
});
if (i == 0) {
return result;
}
}
} else {
n = request;
for (let i = middleware.length - 1; i >= 1; i -= 1) {
n = await middleware[i](n);
}
}
return await middleware[0](n);
};
};
// iterate iterates an object recursively
// and deletes an object's property if
// it matches the oneOfNotUsed field
const iterate = (obj, oneOfNotUsed) => {
Object.keys(obj).forEach(key => {
if (key === oneOfNotUsed) {
delete (obj[key]);
}
if (typeof obj[key] === 'object' && !_.isNil(obj[key])) {
iterate(obj[key], oneOfNotUsed);
}
});
};
const removeBufferFileds = (object, ctx) => {
// Check if the cfg file contains any bufferFields and remove them
if (!object) {
object = {};
}
if (ctx.config && ctx.config.services) {
const service = ctx.config.services;
const servicesKeys = Object.keys(ctx.config.services);
for (let key of servicesKeys) {
// bufferFields
if (service[key] && service[key][ctx.method] && service[key][ctx.method].bufferFields) {
let bufferFields = service[key][ctx.method].bufferFields;
const bufferKeys = Object.keys(bufferFields);
for (let key of bufferKeys) {
const bufferField = bufferFields[key];
// if any bufferField is found
// delete it from the cloned object
if (object[bufferField]) {
delete object[bufferField];
}
// delete it from the test case
if (object.items && object.items[0]
&& object.items[0].data) {
delete object.items[0].data;
}
}
}
// maskFields
if (service[key] && service[key][ctx.method] && service[key][ctx.method].maskFields) {
let maskFields = service[key][ctx.method].maskFields;
for (let maskField of maskFields) {
// if any maskField is configured, mask it
if (object[maskField]) {
const maskLength = object[maskField].length;
object[maskField] = '*'.repeat(maskLength);
}
// delete it from the test case
if (object.items && object.items[0]
&& object.items[0].data) {
delete object.items[0].data;
}
}
}
}
}
return object;
};
/**
* Calls middleware and business logic.
* @param middleware
* @param service
* @param transportName
* @param methodName
* @param logger
* @param cfg
*/
export const makeEndpoint = (middleware: any[], service: any, transportName: string,
methodName: string, logger: Logger, cfg?: any): any => {
return async (request: any, context: any): Promise<any> => {
const ctx = context || {};
ctx.transport = transportName;
ctx.method = methodName;
ctx.logger = logger;
ctx.config = cfg;
let e;
let rid = '';
let middlewareChain = [];
if (middleware && middleware.length > 0) {
middlewareChain.push(middleware);
}
// Check configuration if oneOf fields are configured for a resource
// and then remove unnecessary oneOf fields from the request items to
// avoid gRPC protobuf error.
// To avoid accidental removal it is important
// not to have fields which are named as one of the oneOf fields
if (oneOfFieldsConfig && !_.isEmpty(oneOfFieldsConfig)) {
if (ctx.method) {
if (
ctx.method === 'create' ||
ctx.method === 'update' ||
ctx.method === 'upsert'
) {
// Read configuration for requested resource and make typeToFieldsMap
// oneOfType => oneOfFields[]
let oneOfFields = [];
let typeToFieldsMap = new Map<string, string[]>();
if (service && service.name) {
let name = service.name;
if (name in oneOfFieldsConfig) {
oneOfFields = oneOfFieldsConfig[name];
let oneOfFieldsKeys = Object.keys(oneOfFields);
for (let oneOfFieldsKey of oneOfFieldsKeys) {
typeToFieldsMap.set(oneOfFieldsKey, oneOfFields[oneOfFieldsKey]);
}
}
}
// Iterate through all the items and for each item check which of the
// oneOf fields is set (can be multiple oneOf fields).
// Then push the ones not being used in a list.
// Finally based on this list remove fields which are not used
// (recursively) from each item.
if (!_.isEmpty(typeToFieldsMap)) {
if (request && request.request && request.request.items) {
for (let item of request.request.items) {
let oneOfNotUsedList = [];
let itemKeys = Object.keys(item);
for (let itemKey of itemKeys) {<|fim▁hole|> if (field !== oneOfUsed) {
oneOfNotUsedList.push(field);
}
}
}
}
for (let oneOfNotUsed of oneOfNotUsedList) {
iterate(item, oneOfNotUsed);
}
}
}
}
}
}
}
/*
bufferFields are defined in the config under each service's method as:
"bufferFields": {
"Request": "context"
}
As described in the proto file of each service,
Request is the type of message and context is the type of data being sent.
*/
// deep clone the request
const deepClone = _.cloneDeep(request);
let Request = deepClone.request;
try {
if (Request && Request.request) {
Request = Request.request;
}
Request = removeBufferFileds(Request, ctx);
logger.debug('invoking endpoint with request:', Request);
if (request && request.request && request.request.headers
&& request.request.headers['x-request-id']) {
rid = request.request.headers['x-request-id'];
}
if (rid) {
middlewareChain.push(middlewareClsTracer);
}
if (middlewareChain.length > 0) {
logger.verbose(`[rid: ${rid}] received request to method ${ctx.method} over transport ${ctx.transport}`, Request);
const chain = chainMiddleware(middlewareChain);
const result = await chain(request, service[methodName].bind(service));
let response = _.cloneDeep(result);
response = removeBufferFileds(response, ctx);
logger.verbose(`[rid: ${rid}] request to method ${ctx.method} over transport ${ctx.transport} response`, { Request, response });
return result;
} else {
e = service[methodName].bind(service);
}
logger.verbose(`received request to method ${ctx.method} over transport ${ctx.transport}`,
Request);
const result = await e(request, ctx);
let response = _.cloneDeep(result);
response = removeBufferFileds(response, ctx);
logger.verbose(`request to method ${ctx.method} over transport ${ctx.transport} response`,
{ Request, response });
return result;
} catch (err) {
if (rid) {
rid = `[rid: ${rid}]`;
}
if (err instanceof SyntaxError || err instanceof RangeError ||
err instanceof ReferenceError || err instanceof TypeError) {
logger.error(`${rid} request to method ${ctx.method} over transport ${ctx.transport} error`,
{
Request,
err: err.stack
});
} else {
logger.info(`${rid} request to method ${ctx.method} over transport ${ctx.transport} error`,
{ Request, err });
}
throw err;
}
};
};<|fim▁end|> | if (typeToFieldsMap.has(itemKey)) {
let oneOfUsed = item[itemKey];
let fieldsArr = typeToFieldsMap.get(itemKey);
for (let field of fieldsArr) { |
<|file_name|>densenet.py<|end_file_name|><|fim▁begin|>"""Pytorch Densenet implementation w/ tweaks
This file is a copy of https://github.com/pytorch/vision 'densenet.py' (BSD-3-Clause) with
fixed kwargs passthrough and addition of dynamic global avg/max pool.
"""
import re
from collections import OrderedDict
from functools import partial
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as cp
from torch.jit.annotations import List
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from .helpers import build_model_with_cfg
from .layers import BatchNormAct2d, create_norm_act, BlurPool2d, create_classifier<|fim▁hole|>__all__ = ['DenseNet']
def _cfg(url=''):
return {
'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),
'crop_pct': 0.875, 'interpolation': 'bicubic',
'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,
'first_conv': 'features.conv0', 'classifier': 'classifier',
}
default_cfgs = {
'densenet121': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/densenet121_ra-50efcf5c.pth'),
'densenet121d': _cfg(url=''),
'densenetblur121d': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/densenetblur121d_ra-100dcfbc.pth'),
'densenet169': _cfg(url='https://download.pytorch.org/models/densenet169-b2777c0a.pth'),
'densenet201': _cfg(url='https://download.pytorch.org/models/densenet201-c1103571.pth'),
'densenet161': _cfg(url='https://download.pytorch.org/models/densenet161-8d451a50.pth'),
'densenet264': _cfg(url=''),
'densenet264d_iabn': _cfg(url=''),
'tv_densenet121': _cfg(url='https://download.pytorch.org/models/densenet121-a639ec97.pth'),
}
class DenseLayer(nn.Module):
def __init__(self, num_input_features, growth_rate, bn_size, norm_layer=BatchNormAct2d,
drop_rate=0., memory_efficient=False):
super(DenseLayer, self).__init__()
self.add_module('norm1', norm_layer(num_input_features)),
self.add_module('conv1', nn.Conv2d(
num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', norm_layer(bn_size * growth_rate)),
self.add_module('conv2', nn.Conv2d(
bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = float(drop_rate)
self.memory_efficient = memory_efficient
def bottleneck_fn(self, xs):
# type: (List[torch.Tensor]) -> torch.Tensor
concated_features = torch.cat(xs, 1)
bottleneck_output = self.conv1(self.norm1(concated_features)) # noqa: T484
return bottleneck_output
# todo: rewrite when torchscript supports any
def any_requires_grad(self, x):
# type: (List[torch.Tensor]) -> bool
for tensor in x:
if tensor.requires_grad:
return True
return False
@torch.jit.unused # noqa: T484
def call_checkpoint_bottleneck(self, x):
# type: (List[torch.Tensor]) -> torch.Tensor
def closure(*xs):
return self.bottleneck_fn(xs)
return cp.checkpoint(closure, *x)
@torch.jit._overload_method # noqa: F811
def forward(self, x):
# type: (List[torch.Tensor]) -> (torch.Tensor)
pass
@torch.jit._overload_method # noqa: F811
def forward(self, x):
# type: (torch.Tensor) -> (torch.Tensor)
pass
# torchscript does not yet support *args, so we overload method
# allowing it to take either a List[Tensor] or single Tensor
def forward(self, x): # noqa: F811
if isinstance(x, torch.Tensor):
prev_features = [x]
else:
prev_features = x
if self.memory_efficient and self.any_requires_grad(prev_features):
if torch.jit.is_scripting():
raise Exception("Memory Efficient not supported in JIT")
bottleneck_output = self.call_checkpoint_bottleneck(prev_features)
else:
bottleneck_output = self.bottleneck_fn(prev_features)
new_features = self.conv2(self.norm2(bottleneck_output))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return new_features
class DenseBlock(nn.ModuleDict):
_version = 2
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, norm_layer=nn.ReLU,
drop_rate=0., memory_efficient=False):
super(DenseBlock, self).__init__()
for i in range(num_layers):
layer = DenseLayer(
num_input_features + i * growth_rate,
growth_rate=growth_rate,
bn_size=bn_size,
norm_layer=norm_layer,
drop_rate=drop_rate,
memory_efficient=memory_efficient,
)
self.add_module('denselayer%d' % (i + 1), layer)
def forward(self, init_features):
features = [init_features]
for name, layer in self.items():
new_features = layer(features)
features.append(new_features)
return torch.cat(features, 1)
class DenseTransition(nn.Sequential):
def __init__(self, num_input_features, num_output_features, norm_layer=nn.BatchNorm2d, aa_layer=None):
super(DenseTransition, self).__init__()
self.add_module('norm', norm_layer(num_input_features))
self.add_module('conv', nn.Conv2d(
num_input_features, num_output_features, kernel_size=1, stride=1, bias=False))
if aa_layer is not None:
self.add_module('pool', aa_layer(num_output_features, stride=2))
else:
self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))
class DenseNet(nn.Module):
r"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 4 ints) - how many layers in each pooling block
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_
"""
def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16), bn_size=4, stem_type='',
num_classes=1000, in_chans=3, global_pool='avg',
norm_layer=BatchNormAct2d, aa_layer=None, drop_rate=0, memory_efficient=False,
aa_stem_only=True):
self.num_classes = num_classes
self.drop_rate = drop_rate
super(DenseNet, self).__init__()
# Stem
deep_stem = 'deep' in stem_type # 3x3 deep stem
num_init_features = growth_rate * 2
if aa_layer is None:
stem_pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
else:
stem_pool = nn.Sequential(*[
nn.MaxPool2d(kernel_size=3, stride=1, padding=1),
aa_layer(channels=num_init_features, stride=2)])
if deep_stem:
stem_chs_1 = stem_chs_2 = growth_rate
if 'tiered' in stem_type:
stem_chs_1 = 3 * (growth_rate // 4)
stem_chs_2 = num_init_features if 'narrow' in stem_type else 6 * (growth_rate // 4)
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(in_chans, stem_chs_1, 3, stride=2, padding=1, bias=False)),
('norm0', norm_layer(stem_chs_1)),
('conv1', nn.Conv2d(stem_chs_1, stem_chs_2, 3, stride=1, padding=1, bias=False)),
('norm1', norm_layer(stem_chs_2)),
('conv2', nn.Conv2d(stem_chs_2, num_init_features, 3, stride=1, padding=1, bias=False)),
('norm2', norm_layer(num_init_features)),
('pool0', stem_pool),
]))
else:
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(in_chans, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
('norm0', norm_layer(num_init_features)),
('pool0', stem_pool),
]))
self.feature_info = [
dict(num_chs=num_init_features, reduction=2, module=f'features.norm{2 if deep_stem else 0}')]
current_stride = 4
# DenseBlocks
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = DenseBlock(
num_layers=num_layers,
num_input_features=num_features,
bn_size=bn_size,
growth_rate=growth_rate,
norm_layer=norm_layer,
drop_rate=drop_rate,
memory_efficient=memory_efficient
)
module_name = f'denseblock{(i + 1)}'
self.features.add_module(module_name, block)
num_features = num_features + num_layers * growth_rate
transition_aa_layer = None if aa_stem_only else aa_layer
if i != len(block_config) - 1:
self.feature_info += [
dict(num_chs=num_features, reduction=current_stride, module='features.' + module_name)]
current_stride *= 2
trans = DenseTransition(
num_input_features=num_features, num_output_features=num_features // 2,
norm_layer=norm_layer, aa_layer=transition_aa_layer)
self.features.add_module(f'transition{i + 1}', trans)
num_features = num_features // 2
# Final batch norm
self.features.add_module('norm5', norm_layer(num_features))
self.feature_info += [dict(num_chs=num_features, reduction=current_stride, module='features.norm5')]
self.num_features = num_features
# Linear layer
self.global_pool, self.classifier = create_classifier(
self.num_features, self.num_classes, pool_type=global_pool)
# Official init from torch repo.
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
def get_classifier(self):
return self.classifier
def reset_classifier(self, num_classes, global_pool='avg'):
self.num_classes = num_classes
self.global_pool, self.classifier = create_classifier(
self.num_features, self.num_classes, pool_type=global_pool)
def forward_features(self, x):
return self.features(x)
def forward(self, x):
x = self.forward_features(x)
x = self.global_pool(x)
# both classifier and block drop?
# if self.drop_rate > 0.:
# x = F.dropout(x, p=self.drop_rate, training=self.training)
x = self.classifier(x)
return x
def _filter_torchvision_pretrained(state_dict):
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
return state_dict
def _create_densenet(variant, growth_rate, block_config, pretrained, **kwargs):
kwargs['growth_rate'] = growth_rate
kwargs['block_config'] = block_config
return build_model_with_cfg(
DenseNet, variant, pretrained,
default_cfg=default_cfgs[variant],
feature_cfg=dict(flatten_sequential=True), pretrained_filter_fn=_filter_torchvision_pretrained,
**kwargs)
@register_model
def densenet121(pretrained=False, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet121', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, **kwargs)
return model
@register_model
def densenetblur121d(pretrained=False, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, stem_type='deep',
aa_layer=BlurPool2d, **kwargs)
return model
@register_model
def densenet121d(pretrained=False, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet121d', growth_rate=32, block_config=(6, 12, 24, 16), stem_type='deep',
pretrained=pretrained, **kwargs)
return model
@register_model
def densenet169(pretrained=False, **kwargs):
r"""Densenet-169 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet169', growth_rate=32, block_config=(6, 12, 32, 32), pretrained=pretrained, **kwargs)
return model
@register_model
def densenet201(pretrained=False, **kwargs):
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet201', growth_rate=32, block_config=(6, 12, 48, 32), pretrained=pretrained, **kwargs)
return model
@register_model
def densenet161(pretrained=False, **kwargs):
r"""Densenet-161 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet161', growth_rate=48, block_config=(6, 12, 36, 24), pretrained=pretrained, **kwargs)
return model
@register_model
def densenet264(pretrained=False, **kwargs):
r"""Densenet-264 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenet264', growth_rate=48, block_config=(6, 12, 64, 48), pretrained=pretrained, **kwargs)
return model
@register_model
def densenet264d_iabn(pretrained=False, **kwargs):
r"""Densenet-264 model with deep stem and Inplace-ABN
"""
def norm_act_fn(num_features, **kwargs):
return create_norm_act('iabn', num_features, **kwargs)
model = _create_densenet(
'densenet264d_iabn', growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep',
norm_layer=norm_act_fn, pretrained=pretrained, **kwargs)
return model
@register_model
def tv_densenet121(pretrained=False, **kwargs):
r"""Densenet-121 model with original Torchvision weights, from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'tv_densenet121', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, **kwargs)
return model<|fim▁end|> | from .registry import register_model
|
<|file_name|>Factory.hpp<|end_file_name|><|fim▁begin|>/* ************************************************************************ */
/* Georgiev Lab (c) 2015 */
/* ************************************************************************ */
/* Department of Cybernetics */
/* Faculty of Applied Sciences */
/* University of West Bohemia in Pilsen */
/* ************************************************************************ */
/* */
/* This file is part of CeCe. */
/* */
/* CeCe is free software: you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation, either version 3 of the License, or */
/* (at your option) any later version. */
/* */
/* CeCe is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with CeCe. If not, see <http://www.gnu.org/licenses/>. */
/* */
/* ************************************************************************ */
#pragma once
/* ************************************************************************ */
// CeCe
#include "cece/core/Factory.hpp"
#include "cece/init/Initializer.hpp"
/* ************************************************************************ */
namespace cece {
namespace init {
/* ************************************************************************ */
/**
* @brief Initializer factory interface.
*/
using Factory = Factory<Initializer>;
/* ************************************************************************ */
/**
* @brief Initializer factory for specific module.
*
* @tparam InitializerType
*/
template<typename InitializerType>
using FactoryTyped = FactoryTyped<core::Factory, InitializerType, Initializer>;
/* ************************************************************************ */
/**
* @brief Initializer factory with callable backend.
*
* @tparam Callable
*/
template<typename Callable>
using FactoryCallable = FactoryCallable<core::Factory, Callable, Initializer>;
/* ************************************************************************ */
/**
* @brief Make callable module factory.
*
* @param callable Callable object.
*
* @return Callable module factory.
*/
template<typename Callable>
FactoryCallable<Callable> makeCallableFactory(Callable callable) noexcept<|fim▁hole|>{
return FactoryCallable<Callable>{std::move(callable)};
}
/* ************************************************************************ */
}
}
/* ************************************************************************ */<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import
<|fim▁hole|>from .pod_response import PodResponse # noqa<|fim▁end|> | from .build_response import BuildResponse # noqa |
<|file_name|>chevron-left.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';<|fim▁hole|><|fim▁end|> | import { IconBaseProps } from 'react-icon-base';
export default class IoChevronLeft extends React.Component<IconBaseProps, any> { } |
<|file_name|>StarContentActivity.java<|end_file_name|><|fim▁begin|>package cn.edu.ustc.appseed.clubseed.activity;
/*
* Show the detail content of the event which you select.
* Why to use a custom toolbar instead of the default toolbar in ActionBarActivity?
* Because the custom toolbar is very convenient to edit it and good to unify the GUI.
*/
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.alibaba.fastjson.JSON;
import cn.edu.ustc.appseed.clubseed.R;
import cn.edu.ustc.appseed.clubseed.data.Event;
import cn.edu.ustc.appseed.clubseed.data.ViewActivityPhp;
import cn.edu.ustc.appseed.clubseed.fragment.NoticeFragment;
import cn.edu.ustc.appseed.clubseed.fragment.StarFragment;
import cn.edu.ustc.appseed.clubseed.utils.AppUtils;
public class StarContentActivity extends ActionBarActivity {
private Toolbar toolbar;
private TextView mTextView;
private ImageView mImageView;
private TextView nullTextView;
private Event mEvent;
int ID;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_event_content);
toolbar = (Toolbar) findViewById(R.id.toolbar);<|fim▁hole|> toolbar.setNavigationIcon(getResources().getDrawable(R.drawable.ic_arrow_back));
setSupportActionBar(toolbar);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
}
ID = getIntent().getIntExtra(StarFragment.EXTRA_ID, 0);
mEvent = AppUtils.savedEvents.get(ID);
setTitle(mEvent.getTitle());
mTextView.setText(mEvent.getContent());
mImageView.setImageBitmap(mEvent.getBitmap());
}
}<|fim▁end|> | mTextView = (TextView) findViewById(R.id.textViewEventContent);
mImageView = (ImageView) findViewById(R.id.imgContent);
if (toolbar != null) { |
<|file_name|>config.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"fmt"
"time"
v1 "k8s.io/api/core/v1"
discovery "k8s.io/api/discovery/v1"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
coreinformers "k8s.io/client-go/informers/core/v1"
discoveryinformers "k8s.io/client-go/informers/discovery/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/klog/v2"
)
// ServiceHandler is an abstract interface of objects which receive
// notifications about service object changes.
type ServiceHandler interface {
// OnServiceAdd is called whenever creation of new service object
// is observed.
OnServiceAdd(service *v1.Service)
// OnServiceUpdate is called whenever modification of an existing
// service object is observed.
OnServiceUpdate(oldService, service *v1.Service)
// OnServiceDelete is called whenever deletion of an existing service
// object is observed.
OnServiceDelete(service *v1.Service)
// OnServiceSynced is called once all the initial event handlers were
// called and the state is fully propagated to local cache.
OnServiceSynced()
}
// EndpointsHandler is an abstract interface of objects which receive
// notifications about endpoints object changes.
type EndpointsHandler interface {
// OnEndpointsAdd is called whenever creation of new endpoints object
// is observed.
OnEndpointsAdd(endpoints *v1.Endpoints)
// OnEndpointsUpdate is called whenever modification of an existing
// endpoints object is observed.
OnEndpointsUpdate(oldEndpoints, endpoints *v1.Endpoints)
// OnEndpointsDelete is called whenever deletion of an existing endpoints
// object is observed.
OnEndpointsDelete(endpoints *v1.Endpoints)
// OnEndpointsSynced is called once all the initial event handlers were
// called and the state is fully propagated to local cache.
OnEndpointsSynced()
}
// EndpointSliceHandler is an abstract interface of objects which receive
// notifications about endpoint slice object changes.
type EndpointSliceHandler interface {
// OnEndpointSliceAdd is called whenever creation of new endpoint slice
// object is observed.
OnEndpointSliceAdd(endpointSlice *discovery.EndpointSlice)
// OnEndpointSliceUpdate is called whenever modification of an existing
// endpoint slice object is observed.
OnEndpointSliceUpdate(oldEndpointSlice, newEndpointSlice *discovery.EndpointSlice)
// OnEndpointSliceDelete is called whenever deletion of an existing
// endpoint slice object is observed.
OnEndpointSliceDelete(endpointSlice *discovery.EndpointSlice)
// OnEndpointSlicesSynced is called once all the initial event handlers were
// called and the state is fully propagated to local cache.
OnEndpointSlicesSynced()
}
// NoopEndpointSliceHandler is a noop handler for proxiers that have not yet
// implemented a full EndpointSliceHandler.
type NoopEndpointSliceHandler struct{}
// OnEndpointSliceAdd is a noop handler for EndpointSlice creates.
func (*NoopEndpointSliceHandler) OnEndpointSliceAdd(endpointSlice *discovery.EndpointSlice) {}
// OnEndpointSliceUpdate is a noop handler for EndpointSlice updates.
func (*NoopEndpointSliceHandler) OnEndpointSliceUpdate(oldEndpointSlice, newEndpointSlice *discovery.EndpointSlice) {
}
// OnEndpointSliceDelete is a noop handler for EndpointSlice deletes.
func (*NoopEndpointSliceHandler) OnEndpointSliceDelete(endpointSlice *discovery.EndpointSlice) {}
// OnEndpointSlicesSynced is a noop handler for EndpointSlice syncs.
func (*NoopEndpointSliceHandler) OnEndpointSlicesSynced() {}
var _ EndpointSliceHandler = &NoopEndpointSliceHandler{}
// EndpointsConfig tracks a set of endpoints configurations.
type EndpointsConfig struct {
listerSynced cache.InformerSynced
eventHandlers []EndpointsHandler
}
// NewEndpointsConfig creates a new EndpointsConfig.
func NewEndpointsConfig(endpointsInformer coreinformers.EndpointsInformer, resyncPeriod time.Duration) *EndpointsConfig {
result := &EndpointsConfig{
listerSynced: endpointsInformer.Informer().HasSynced,
}
endpointsInformer.Informer().AddEventHandlerWithResyncPeriod(
cache.ResourceEventHandlerFuncs{
AddFunc: result.handleAddEndpoints,
UpdateFunc: result.handleUpdateEndpoints,
DeleteFunc: result.handleDeleteEndpoints,
},
resyncPeriod,
)
return result
}
// RegisterEventHandler registers a handler which is called on every endpoints change.
func (c *EndpointsConfig) RegisterEventHandler(handler EndpointsHandler) {
c.eventHandlers = append(c.eventHandlers, handler)
}
// Run waits for cache synced and invokes handlers after syncing.
func (c *EndpointsConfig) Run(stopCh <-chan struct{}) {
klog.Info("Starting endpoints config controller")
if !cache.WaitForNamedCacheSync("endpoints config", stopCh, c.listerSynced) {
return
}
for i := range c.eventHandlers {
klog.V(3).Infof("Calling handler.OnEndpointsSynced()")
c.eventHandlers[i].OnEndpointsSynced()
}
}
func (c *EndpointsConfig) handleAddEndpoints(obj interface{}) {
endpoints, ok := obj.(*v1.Endpoints)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
for i := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointsAdd")
c.eventHandlers[i].OnEndpointsAdd(endpoints)
}
}
func (c *EndpointsConfig) handleUpdateEndpoints(oldObj, newObj interface{}) {
oldEndpoints, ok := oldObj.(*v1.Endpoints)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", oldObj))
return
}
endpoints, ok := newObj.(*v1.Endpoints)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", newObj))
return
}
for i := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointsUpdate")
c.eventHandlers[i].OnEndpointsUpdate(oldEndpoints, endpoints)
}
}
func (c *EndpointsConfig) handleDeleteEndpoints(obj interface{}) {
endpoints, ok := obj.(*v1.Endpoints)
if !ok {
tombstone, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
if endpoints, ok = tombstone.Obj.(*v1.Endpoints); !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
}
for i := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointsDelete")
c.eventHandlers[i].OnEndpointsDelete(endpoints)
}
}
// EndpointSliceConfig tracks a set of endpoints configurations.
type EndpointSliceConfig struct {
listerSynced cache.InformerSynced
eventHandlers []EndpointSliceHandler
}
// NewEndpointSliceConfig creates a new EndpointSliceConfig.
func NewEndpointSliceConfig(endpointSliceInformer discoveryinformers.EndpointSliceInformer, resyncPeriod time.Duration) *EndpointSliceConfig {
result := &EndpointSliceConfig{
listerSynced: endpointSliceInformer.Informer().HasSynced,
}
endpointSliceInformer.Informer().AddEventHandlerWithResyncPeriod(
cache.ResourceEventHandlerFuncs{
AddFunc: result.handleAddEndpointSlice,
UpdateFunc: result.handleUpdateEndpointSlice,
DeleteFunc: result.handleDeleteEndpointSlice,
},
resyncPeriod,
)
return result
}
// RegisterEventHandler registers a handler which is called on every endpoint slice change.
func (c *EndpointSliceConfig) RegisterEventHandler(handler EndpointSliceHandler) {
c.eventHandlers = append(c.eventHandlers, handler)
}
// Run waits for cache synced and invokes handlers after syncing.
func (c *EndpointSliceConfig) Run(stopCh <-chan struct{}) {
klog.Info("Starting endpoint slice config controller")
<|fim▁hole|> return
}
for _, h := range c.eventHandlers {
klog.V(3).Infof("Calling handler.OnEndpointSlicesSynced()")
h.OnEndpointSlicesSynced()
}
}
func (c *EndpointSliceConfig) handleAddEndpointSlice(obj interface{}) {
endpointSlice, ok := obj.(*discovery.EndpointSlice)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %T", obj))
return
}
for _, h := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointSliceAdd %+v", endpointSlice)
h.OnEndpointSliceAdd(endpointSlice)
}
}
func (c *EndpointSliceConfig) handleUpdateEndpointSlice(oldObj, newObj interface{}) {
oldEndpointSlice, ok := oldObj.(*discovery.EndpointSlice)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %T", newObj))
return
}
newEndpointSlice, ok := newObj.(*discovery.EndpointSlice)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %T", newObj))
return
}
for _, h := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointSliceUpdate")
h.OnEndpointSliceUpdate(oldEndpointSlice, newEndpointSlice)
}
}
func (c *EndpointSliceConfig) handleDeleteEndpointSlice(obj interface{}) {
endpointSlice, ok := obj.(*discovery.EndpointSlice)
if !ok {
tombstone, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %T", obj))
return
}
if endpointSlice, ok = tombstone.Obj.(*discovery.EndpointSlice); !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %T", obj))
return
}
}
for _, h := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnEndpointsDelete")
h.OnEndpointSliceDelete(endpointSlice)
}
}
// ServiceConfig tracks a set of service configurations.
type ServiceConfig struct {
listerSynced cache.InformerSynced
eventHandlers []ServiceHandler
}
// NewServiceConfig creates a new ServiceConfig.
func NewServiceConfig(serviceInformer coreinformers.ServiceInformer, resyncPeriod time.Duration) *ServiceConfig {
result := &ServiceConfig{
listerSynced: serviceInformer.Informer().HasSynced,
}
serviceInformer.Informer().AddEventHandlerWithResyncPeriod(
cache.ResourceEventHandlerFuncs{
AddFunc: result.handleAddService,
UpdateFunc: result.handleUpdateService,
DeleteFunc: result.handleDeleteService,
},
resyncPeriod,
)
return result
}
// RegisterEventHandler registers a handler which is called on every service change.
func (c *ServiceConfig) RegisterEventHandler(handler ServiceHandler) {
c.eventHandlers = append(c.eventHandlers, handler)
}
// Run waits for cache synced and invokes handlers after syncing.
func (c *ServiceConfig) Run(stopCh <-chan struct{}) {
klog.Info("Starting service config controller")
if !cache.WaitForNamedCacheSync("service config", stopCh, c.listerSynced) {
return
}
for i := range c.eventHandlers {
klog.V(3).Info("Calling handler.OnServiceSynced()")
c.eventHandlers[i].OnServiceSynced()
}
}
func (c *ServiceConfig) handleAddService(obj interface{}) {
service, ok := obj.(*v1.Service)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
for i := range c.eventHandlers {
klog.V(4).Info("Calling handler.OnServiceAdd")
c.eventHandlers[i].OnServiceAdd(service)
}
}
func (c *ServiceConfig) handleUpdateService(oldObj, newObj interface{}) {
oldService, ok := oldObj.(*v1.Service)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", oldObj))
return
}
service, ok := newObj.(*v1.Service)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", newObj))
return
}
for i := range c.eventHandlers {
klog.V(4).Info("Calling handler.OnServiceUpdate")
c.eventHandlers[i].OnServiceUpdate(oldService, service)
}
}
func (c *ServiceConfig) handleDeleteService(obj interface{}) {
service, ok := obj.(*v1.Service)
if !ok {
tombstone, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
if service, ok = tombstone.Obj.(*v1.Service); !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
}
for i := range c.eventHandlers {
klog.V(4).Info("Calling handler.OnServiceDelete")
c.eventHandlers[i].OnServiceDelete(service)
}
}
// NodeHandler is an abstract interface of objects which receive
// notifications about node object changes.
type NodeHandler interface {
// OnNodeAdd is called whenever creation of new node object
// is observed.
OnNodeAdd(node *v1.Node)
// OnNodeUpdate is called whenever modification of an existing
// node object is observed.
OnNodeUpdate(oldNode, node *v1.Node)
// OnNodeDelete is called whenever deletion of an existing node
// object is observed.
OnNodeDelete(node *v1.Node)
// OnNodeSynced is called once all the initial event handlers were
// called and the state is fully propagated to local cache.
OnNodeSynced()
}
// NoopNodeHandler is a noop handler for proxiers that have not yet
// implemented a full NodeHandler.
type NoopNodeHandler struct{}
// OnNodeAdd is a noop handler for Node creates.
func (*NoopNodeHandler) OnNodeAdd(node *v1.Node) {}
// OnNodeUpdate is a noop handler for Node updates.
func (*NoopNodeHandler) OnNodeUpdate(oldNode, node *v1.Node) {}
// OnNodeDelete is a noop handler for Node deletes.
func (*NoopNodeHandler) OnNodeDelete(node *v1.Node) {}
// OnNodeSynced is a noop handler for Node syncs.
func (*NoopNodeHandler) OnNodeSynced() {}
var _ NodeHandler = &NoopNodeHandler{}
// NodeConfig tracks a set of node configurations.
// It accepts "set", "add" and "remove" operations of node via channels, and invokes registered handlers on change.
type NodeConfig struct {
listerSynced cache.InformerSynced
eventHandlers []NodeHandler
}
// NewNodeConfig creates a new NodeConfig.
func NewNodeConfig(nodeInformer coreinformers.NodeInformer, resyncPeriod time.Duration) *NodeConfig {
result := &NodeConfig{
listerSynced: nodeInformer.Informer().HasSynced,
}
nodeInformer.Informer().AddEventHandlerWithResyncPeriod(
cache.ResourceEventHandlerFuncs{
AddFunc: result.handleAddNode,
UpdateFunc: result.handleUpdateNode,
DeleteFunc: result.handleDeleteNode,
},
resyncPeriod,
)
return result
}
// RegisterEventHandler registers a handler which is called on every node change.
func (c *NodeConfig) RegisterEventHandler(handler NodeHandler) {
c.eventHandlers = append(c.eventHandlers, handler)
}
// Run starts the goroutine responsible for calling registered handlers.
func (c *NodeConfig) Run(stopCh <-chan struct{}) {
klog.Info("Starting node config controller")
if !cache.WaitForNamedCacheSync("node config", stopCh, c.listerSynced) {
return
}
for i := range c.eventHandlers {
klog.V(3).Infof("Calling handler.OnNodeSynced()")
c.eventHandlers[i].OnNodeSynced()
}
}
func (c *NodeConfig) handleAddNode(obj interface{}) {
node, ok := obj.(*v1.Node)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
for i := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnNodeAdd")
c.eventHandlers[i].OnNodeAdd(node)
}
}
func (c *NodeConfig) handleUpdateNode(oldObj, newObj interface{}) {
oldNode, ok := oldObj.(*v1.Node)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", oldObj))
return
}
node, ok := newObj.(*v1.Node)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", newObj))
return
}
for i := range c.eventHandlers {
klog.V(5).Infof("Calling handler.OnNodeUpdate")
c.eventHandlers[i].OnNodeUpdate(oldNode, node)
}
}
func (c *NodeConfig) handleDeleteNode(obj interface{}) {
node, ok := obj.(*v1.Node)
if !ok {
tombstone, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
if node, ok = tombstone.Obj.(*v1.Node); !ok {
utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj))
return
}
}
for i := range c.eventHandlers {
klog.V(4).Infof("Calling handler.OnNodeDelete")
c.eventHandlers[i].OnNodeDelete(node)
}
}<|fim▁end|> | if !cache.WaitForNamedCacheSync("endpoint slice config", stopCh, c.listerSynced) { |
<|file_name|>Get_string_no_quote.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Waratek Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.waratek.spiracle.sql.servlet.oracle;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;<|fim▁hole|>import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.waratek.spiracle.sql.servlet.util.ParameterNullFix;
import com.waratek.spiracle.sql.util.SelectUtil;
/**
* Servlet implementation class Get_string_no_quote
*/
@WebServlet({"/Get_string_no_quote", "/MsSql_Get_string_no_quote", "/MySql_Get_string_no_quote"})
public class Get_string_no_quote extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public Get_string_no_quote() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
executeRequest(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
executeRequest(request, response);
}
private void executeRequest(HttpServletRequest request, HttpServletResponse response) throws IOException {
ServletContext application = this.getServletConfig().getServletContext();
List<String> queryStringList = new ArrayList<String>();
queryStringList.add("name");
Map<String, String> nullSanitizedMap = ParameterNullFix.sanitizeNull(queryStringList, request);
String name = nullSanitizedMap.get("name");
String sql = "SELECT * FROM users WHERE name = " + name;
Boolean showErrors = true;
Boolean allResults = true;
Boolean showOutput = true;
SelectUtil.executeQuery(sql, application, request, response, showErrors, allResults, showOutput);
}
}<|fim▁end|> |
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet; |
<|file_name|>example.js<|end_file_name|><|fim▁begin|>'use strict';
/**<|fim▁hole|> * Module dependencies.
*/
var mongoose = require('mongoose'),
Example = mongoose.model('Example'),
_ = require('lodash'),
upload = require('./upload');
/**
* Find example by id
*/
exports.example = function(req, res, next, id) {
Example.load(id, function(err, example) {
if (err) return next(err);
if (!example) return next(new Error('Failed to load example ' + id));
req.example = example;
next();
});
};
/**
* Create a example
*/
exports.create = function(req, res) {
var example = new Example(req.body);
example.user = req.user;
example.save(function(err) {
if (err) {
return res.send('/login', {
errors: err.errors,
example: example
});
} else {
res.jsonp(example);
}
});
};
/**
* Update a example
*/
exports.update = function(req, res) {
var example = req.example;
example = _.extend(example, req.body);
example.save(function(err) {
if (err) {
console.log("Error -" + err);
return res.send('/login', {
errors: err,
example: example
});
} else {
console.log("Example Saved - " + example);
res.jsonp(example);
}
});
};
/**
* Delete an example
*/
exports.destroy = function(req, res) {
var example = req.example;
example.remove(function(err) {
if (err) {
return res.send('/login', {
errors: err.errors,
example: example
});
} else {
res.jsonp(example);
}
});
};
/**
* Show an example
*/
exports.show = function(req, res) {
res.jsonp(req.example);
};
/**
* List of Examples
*/
exports.all = function(req, res) {
Example.find().sort('-created').populate('user', 'name username').exec(function(err, examples) {
if (err) {
res.render('error', {
status: 500
});
} else {
res.jsonp(examples);
}
});
};<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.views.generic.base import TemplateView
from sarGraphs.lib.sar import get_cpu, get_load
from sarGraphs.lib.sar import get_swap, get_memory
class HomeView(TemplateView):<|fim▁hole|> def get_context_data(self, **kwargs):
context = {}
context['cpu'] = get_cpu('%idle')
context['iowait'] = get_cpu('%iowait')
context['swap'] = get_swap('%swpused')
context['mem'] = get_memory()
context['load'] = get_load()
return context<|fim▁end|> | 'Home Page View'
template_name = "home.html" |
<|file_name|>functools_lru_cache.py<|end_file_name|><|fim▁begin|># backports.functools_lru_cache v1.5
# https://github.com/jaraco/backports.functools_lru_cache
# Copyright (c) 2014-2018 Jason R. Coombs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Backport of functools.lru_cache from Python 3.3 as published at ActiveState"""
from __future__ import absolute_import
import functools
from collections import namedtuple
from threading import RLock
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
@functools.wraps(functools.update_wrapper)
def update_wrapper(wrapper,
wrapped,
assigned = functools.WRAPPER_ASSIGNMENTS,
updated = functools.WRAPPER_UPDATES):
"""
Patch two bugs in functools.update_wrapper.
"""
# workaround for http://bugs.python.org/issue3445
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
# workaround for https://bugs.python.org/issue17482
wrapper.__wrapped__ = wrapped
return wrapper
class _HashedSeq(list):
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(args, kwds, typed,
kwd_mark=(object(),),
fasttypes=set([int, str, frozenset, type(None)])):
"""Make a cache key from optionally typed positional and keyword arguments"""
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=100, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with
distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
def decorating_function(user_function):
cache = dict()
stats = [0, 0] # make statistics updateable non-locally
HITS, MISSES = 0, 1 # names for the stats fields
make_key = _make_key
cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocal_root = [root] # make updateable non-locally
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
if maxsize == 0:
def wrapper(*args, **kwds):
# no caching, just do a statistics update after a successful call
result = user_function(*args, **kwds)
stats[MISSES] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# simple caching without ordering or size limit<|fim▁hole|> if result is not root:
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
stats[MISSES] += 1
return result
else:
def wrapper(*args, **kwds):
# size limited caching that tracks accesses by recency
key = make_key(args, kwds, typed) if kwds or typed else args
with lock:
link = cache_get(key)
if link is not None:
# record recent use of the key by moving it to the front of the list
root, = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
with lock:
root, = nonlocal_root
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif _len(cache) >= maxsize:
# use the old root to store the new key and result
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# empty the oldest link and make it the new root
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# now update the cache dictionary for the new links
del cache[oldkey]
cache[key] = oldroot
else:
# put result in a new link at the front of the list
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function<|fim▁end|> | key = make_key(args, kwds, typed)
result = cache_get(key, root) # root used here as a unique not-found sentinel |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
fn main() {
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let different_scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();<|fim▁hole|> let team_name = String::from("Blue");
let score = different_scores.get(&team_name);
for (key, value) in &different_scores {
println!("{}: {}", key, value);
}
// Insert overwrites things by default
scores.entry(String::from("Blue")).or_insert(100);
scores.entry(String::from("Red")).or_insert(200);
println!("{:?}", scores);
let text = "hello wonderful world world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0)
*count += 1;
}
println!("{:?}", map);
// It's possible to use different hasher than default one
}<|fim▁end|> |
// Hash map becomes the owner of values used during inserting
|
<|file_name|>pthread.rs<|end_file_name|><|fim▁begin|>use libc::{self, pthread_t};
pub type Pthread = pthread_t;
/// Obtain ID of the calling thread (see
/// [pthread_self(3)](http://pubs.opengroup.org/onlinepubs/9699919799/functions/pthread_self.html)
///
/// The thread ID returned by pthread_self() is not the same thing as
/// the kernel thread ID returned by a call to gettid(2).<|fim▁hole|>#[inline]
pub fn pthread_self() -> Pthread {
unsafe { libc::pthread_self() }
}<|fim▁end|> | |
<|file_name|>module.ts<|end_file_name|><|fim▁begin|>import {NgModule} from '@angular/core';
import {CommonModule} from '@angular/common';
import {MdcListDivider} from './list-divider';
import {
MdcList,
MdcListGroup,
MdcListGroupSubheader
} from './list';
import {
MdcListItem,
MdcListItemGraphic,
MdcListItemMeta,
MdcListItemSecondary,
MdcListItemText
} from './list-item';
<|fim▁hole|> MdcListGroup,
MdcListGroupSubheader,
MdcListItem,
MdcListItemGraphic,
MdcListItemMeta,
MdcListItemSecondary,
MdcListItemText
];
@NgModule({
imports: [CommonModule],
exports: LIST_DECLARATIONS,
declarations: LIST_DECLARATIONS,
})
export class MdcListModule { }<|fim▁end|> | const LIST_DECLARATIONS = [
MdcList,
MdcListDivider, |
<|file_name|>l3agentscheduler.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import webob.exc
from neutron.api import extensions
from neutron.api.v2 import base
from neutron.api.v2 import resource
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.extensions import agent
from neutron.i18n import _LE
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as service_constants
from neutron import policy
from neutron import wsgi
LOG = logging.getLogger(__name__)
L3_ROUTER = 'l3-router'
L3_ROUTERS = L3_ROUTER + 's'
L3_AGENT = 'l3-agent'
L3_AGENTS = L3_AGENT + 's'
class RouterSchedulerController(wsgi.Controller):
def get_plugin(self):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if not plugin:
LOG.error(_LE('No plugin for L3 routing registered to handle '
'router scheduling'))
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
return plugin
def index(self, request, **kwargs):
plugin = self.get_plugin()
policy.enforce(request.context,
"get_%s" % L3_ROUTERS,
{})
return plugin.list_routers_on_l3_agent(
request.context, kwargs['agent_id'])
def create(self, request, body, **kwargs):
plugin = self.get_plugin()
policy.enforce(request.context,
"create_%s" % L3_ROUTER,
{})
agent_id = kwargs['agent_id']
router_id = body['router_id']
result = plugin.add_router_to_l3_agent(request.context, agent_id,
router_id)
notify(request.context, 'l3_agent.router.add', router_id, agent_id)
return result
def delete(self, request, id, **kwargs):
plugin = self.get_plugin()
policy.enforce(request.context,
"delete_%s" % L3_ROUTER,
{})
agent_id = kwargs['agent_id']
result = plugin.remove_router_from_l3_agent(request.context, agent_id,
id)
notify(request.context, 'l3_agent.router.remove', id, agent_id)
return result
class L3AgentsHostingRouterController(wsgi.Controller):
def get_plugin(self):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if not plugin:
LOG.error(_LE('No plugin for L3 routing registered to handle '
'router scheduling'))
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
return plugin
def index(self, request, **kwargs):
plugin = self.get_plugin()
policy.enforce(request.context,
"get_%s" % L3_AGENTS,
{})
return plugin.list_l3_agents_hosting_router(
request.context, kwargs['router_id'])
class L3agentscheduler(extensions.ExtensionDescriptor):
"""Extension class supporting l3 agent scheduler.
"""
@classmethod
def get_name(cls):
return "L3 Agent Scheduler"
@classmethod
def get_alias(cls):
return constants.L3_AGENT_SCHEDULER_EXT_ALIAS
@classmethod
def get_description(cls):
return "Schedule routers among l3 agents"
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/l3_agent_scheduler/api/v1.0"
@classmethod
def get_updated(cls):
return "2013-02-07T10:00:00-00:00"<|fim▁hole|> def get_resources(cls):
"""Returns Ext Resources."""
exts = []
parent = dict(member_name="agent",
collection_name="agents")
controller = resource.Resource(RouterSchedulerController(),
base.FAULT_MAP)
exts.append(extensions.ResourceExtension(
L3_ROUTERS, controller, parent))
parent = dict(member_name="router",
collection_name="routers")
controller = resource.Resource(L3AgentsHostingRouterController(),
base.FAULT_MAP)
exts.append(extensions.ResourceExtension(
L3_AGENTS, controller, parent))
return exts
def get_extended_resources(self, version):
return {}
class InvalidL3Agent(agent.AgentNotFound):
message = _("Agent %(id)s is not a L3 Agent or has been disabled")
class RouterHostedByL3Agent(exceptions.Conflict):
message = _("The router %(router_id)s has been already hosted"
" by the L3 Agent %(agent_id)s.")
class RouterSchedulingFailed(exceptions.Conflict):
message = _("Failed scheduling router %(router_id)s to"
" the L3 Agent %(agent_id)s.")
class RouterReschedulingFailed(exceptions.Conflict):
message = _("Failed rescheduling router %(router_id)s: "
"no eligible l3 agent found.")
class RouterNotHostedByL3Agent(exceptions.Conflict):
message = _("The router %(router_id)s is not hosted"
" by L3 agent %(agent_id)s.")
class RouterL3AgentMismatch(exceptions.Conflict):
message = _("Cannot host %(router_type)s router %(router_id)s "
"on %(agent_mode)s L3 agent %(agent_id)s.")
class DVRL3CannotAssignToDvrAgent(exceptions.Conflict):
message = _("Not allowed to manually assign a %(router_type)s "
"router %(router_id)s from an existing DVR node "
"to another L3 agent %(agent_id)s.")
class L3AgentSchedulerPluginBase(object):
"""REST API to operate the l3 agent scheduler.
All of method must be in an admin context.
"""
@abc.abstractmethod
def add_router_to_l3_agent(self, context, id, router_id):
pass
@abc.abstractmethod
def remove_router_from_l3_agent(self, context, id, router_id):
pass
@abc.abstractmethod
def list_routers_on_l3_agent(self, context, id):
pass
@abc.abstractmethod
def list_l3_agents_hosting_router(self, context, router_id):
pass
def notify(context, action, router_id, agent_id):
info = {'id': agent_id, 'router_id': router_id}
notifier = n_rpc.get_notifier('router')
notifier.info(context, action, {'agent': info})<|fim▁end|> |
@classmethod |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import os.path
import requests
import wtforms
from wtforms import validators
from ..forms import TextDatasetForm
from origae import utils
from origae.utils.forms import validate_required_iff, validate_greater_than
class TextClassificationDatasetForm(TextDatasetForm):
"""
Defines the form used to create a new TextClassificationDatasetJob
"""
backend = wtforms.SelectField('DB backend',
choices=[
('lmdb', 'LMDB'),
('hdf5', 'HDF5')
],
default='lmdb',
)
def validate_backend(form, field):
if field.data == 'lmdb':
form.compression.data = 'none'
elif field.data == 'tfrecords':
form.compression.data = 'none'
elif field.data == 'hdf5':
form.encoding.data = 'none'
compression = utils.forms.SelectField(
'DB compression',
choices=[
('none', 'None'),
('gzip', 'GZIP'),
],
default='none',
tooltip=('Compressing the dataset may significantly decrease the size '
'of your database files, but it may increase read and write times.'),
)
# Use a SelectField instead of a HiddenField so that the default value
# is used when nothing is provided (through the REST API)
method = wtforms.SelectField(u'Dataset type',
choices=[
('folder', 'Folder'),
('textfile', 'Textfiles'),
],
default='folder',
)
def validate_folder_path(form, field):
if not field.data:
pass
elif utils.is_url(field.data):
# make sure the URL exists
try:
r = requests.get(field.data,
allow_redirects=False,
timeout=utils.HTTP_TIMEOUT)
if r.status_code not in [requests.codes.ok, requests.codes.moved, requests.codes.found]:
raise validators.ValidationError('URL not found')
except Exception as e:
raise validators.ValidationError('Caught %s while checking URL: %s' % (type(e).__name__, e))
else:
return True
else:
# make sure the filesystem path exists
# and make sure the filesystem path is absolute
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError('Folder does not exist')
elif not os.path.isabs(field.data):
raise validators.ValidationError('Filesystem path is not absolute')
else:
return True
#
# Method - folder
#
folder_train = utils.forms.StringField(
u'Training Images',
validators=[
validate_required_iff(method='folder'),
validate_folder_path,
],
tooltip=('Indicate a folder which holds subfolders full of images. '
'Each subfolder should be named according to the desired label for the images that it holds. '
'Can also be a URL for an apache/nginx auto-indexed folder.'),
)
folder_pct_val = utils.forms.IntegerField(
u'% for validation',
default=25,
validators=[
validate_required_iff(method='folder'),
validators.NumberRange(min=0, max=100)
],
tooltip=('You can choose to set apart a certain percentage of images '
'from the training images for the validation set.'),
)
folder_pct_test = utils.forms.IntegerField(
u'% for testing',
default=0,
validators=[
validate_required_iff(method='folder'),
validators.NumberRange(min=0, max=100)
],
tooltip=('You can choose to set apart a certain percentage of images '
'from the training images for the test set.'),
)
folder_train_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1),
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer samples than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_train_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[<|fim▁hole|> validators.NumberRange(min=1),
validate_greater_than('folder_train_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
has_val_folder = wtforms.BooleanField(
'Separate validation images folder',
default=False,
validators=[
validate_required_iff(method='folder')
]
)
folder_val = wtforms.StringField(
u'Validation Images',
validators=[
validate_required_iff(
method='folder',
has_val_folder=True),
]
)
folder_val_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1),
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer samples than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_val_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[
validators.Optional(),
validators.NumberRange(min=1),
validate_greater_than('folder_val_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
has_test_folder = wtforms.BooleanField(
'Separate test images folder',
default=False,
validators=[
validate_required_iff(method='folder')
]
)
folder_test = wtforms.StringField(
u'Test Images',
validators=[
validate_required_iff(
method='folder',
has_test_folder=True),
validate_folder_path,
]
)
folder_test_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1)
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer samples than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_test_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[
validators.Optional(),
validators.NumberRange(min=1),
validate_greater_than('folder_test_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
#
# Method - textfile
#
textfile_use_local_files = wtforms.BooleanField(
u'Use local files',
default=False,
)
textfile_train_images = utils.forms.FileField(
u'Training images',
validators=[
validate_required_iff(method='textfile',
textfile_use_local_files=False)
]
)
textfile_local_train_images = wtforms.StringField(
u'Training images',
validators=[
validate_required_iff(method='textfile',
textfile_use_local_files=True)
]
)
textfile_train_folder = wtforms.StringField(u'Training images folder')
def validate_textfile_train_folder(form, field):
if form.method.data != 'textfile':
field.errors[:] = []
raise validators.StopValidation()
if not field.data.strip():
# allow null
return True
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError('folder does not exist')
return True
textfile_use_val = wtforms.BooleanField(u'Validation set',
default=True,
validators=[
validate_required_iff(method='textfile')
]
)
textfile_val_images = utils.forms.FileField(u'Validation images',
validators=[
validate_required_iff(
method='textfile',
textfile_use_val=True,
textfile_use_local_files=False)
]
)
textfile_local_val_images = wtforms.StringField(u'Validation images',
validators=[
validate_required_iff(
method='textfile',
textfile_use_val=True,
textfile_use_local_files=True)
]
)
textfile_val_folder = wtforms.StringField(u'Validation images folder')
def validate_textfile_val_folder(form, field):
if form.method.data != 'textfile' or not form.textfile_use_val.data:
field.errors[:] = []
raise validators.StopValidation()
if not field.data.strip():
# allow null
return True
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError('folder does not exist')
return True
textfile_use_test = wtforms.BooleanField(u'Test set',
default=False,
validators=[
validate_required_iff(method='textfile')
]
)
textfile_test_images = utils.forms.FileField(u'Test images',
validators=[
validate_required_iff(
method='textfile',
textfile_use_test=True,
textfile_use_local_files=False)
]
)
textfile_local_test_images = wtforms.StringField(u'Test images',
validators=[
validate_required_iff(
method='textfile',
textfile_use_test=True,
textfile_use_local_files=True)
]
)
textfile_test_folder = wtforms.StringField(u'Test images folder')
def validate_textfile_test_folder(form, field):
if form.method.data != 'textfile' or not form.textfile_use_test.data:
field.errors[:] = []
raise validators.StopValidation()
if not field.data.strip():
# allow null
return True
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError('folder does not exist')
return True
# Can't use a BooleanField here because HTML doesn't submit anything
# for an unchecked checkbox. Since we want to use a REST API and have
# this default to True when nothing is supplied, we have to use a
# SelectField
textfile_shuffle = utils.forms.SelectField(
'Shuffle lines',
choices=[
(1, 'Yes'),
(0, 'No'),
],
coerce=int,
default=1,
tooltip="Shuffle the list[s] of images before creating the database."
)
textfile_labels_file = utils.forms.FileField(
u'Labels',
validators=[
validate_required_iff(method='textfile',
textfile_use_local_files=False)
],
tooltip=("The 'i'th line of the file should give the string label "
"associated with the '(i-1)'th numeric label. (E.g. the string label "
"for the numeric label 0 is supposed to be on line 1.)"),
)
textfile_local_labels_file = utils.forms.StringField(
u'Labels',
validators=[
validate_required_iff(method='textfile',
textfile_use_local_files=True)
],
tooltip=("The 'i'th line of the file should give the string label "
"associated with the '(i-1)'th numeric label. (E.g. the string label "
"for the numeric label 0 is supposed to be on line 1.)"),
)<|fim▁end|> | validators.Optional(), |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
setup(
name = "FreeCite",
version = "0.1",
py_modules = ['freecite'],
#install requirements
install_requires = [
'requests==1.1.0'
],
#author details
author = "James Ravenscroft",
author_email = "[email protected]",
description = "A wrapper around the FreeCite REST API",
url = "http://wwww.github.com/ravenscroftj/freecite"
)<|fim▁end|> | from setuptools import setup, find_packages |
<|file_name|>EventHandler.cpp<|end_file_name|><|fim▁begin|>// License: The Unlicense (https://unlicense.org)
#include "EventHandler.hpp"<|fim▁hole|>
int EventFilter(void *userData, SDL_Event *event);
void SetUpEvents(void) {
SDL_EventState(SDL_APP_TERMINATING, SDL_IGNORE);
SDL_EventState(SDL_APP_LOWMEMORY, SDL_IGNORE);
SDL_EventState(SDL_APP_WILLENTERBACKGROUND, SDL_IGNORE);
SDL_EventState(SDL_APP_DIDENTERBACKGROUND, SDL_IGNORE);
SDL_EventState(SDL_APP_WILLENTERFOREGROUND, SDL_IGNORE);
SDL_EventState(SDL_APP_DIDENTERFOREGROUND, SDL_IGNORE);
SDL_EventState(SDL_AUDIODEVICEADDED, SDL_IGNORE);
SDL_EventState(SDL_AUDIODEVICEREMOVED, SDL_IGNORE);
SDL_EventState(SDL_CLIPBOARDUPDATE, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERAXISMOTION, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERBUTTONDOWN, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERBUTTONUP, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERDEVICEADDED, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERDEVICEREMAPPED, SDL_IGNORE);
SDL_EventState(SDL_CONTROLLERDEVICEREMOVED, SDL_IGNORE);
SDL_EventState(SDL_DOLLARGESTURE, SDL_IGNORE);
SDL_EventState(SDL_DOLLARRECORD, SDL_IGNORE);
SDL_EventState(SDL_DROPFILE, SDL_IGNORE);
SDL_EventState(SDL_FINGERDOWN, SDL_IGNORE);
SDL_EventState(SDL_FINGERMOTION, SDL_IGNORE);
SDL_EventState(SDL_FINGERUP, SDL_IGNORE);
SDL_EventState(SDL_JOYAXISMOTION, SDL_IGNORE);
SDL_EventState(SDL_JOYBALLMOTION, SDL_IGNORE);
SDL_EventState(SDL_JOYBUTTONDOWN, SDL_IGNORE);
SDL_EventState(SDL_JOYBUTTONUP, SDL_IGNORE);
SDL_EventState(SDL_JOYDEVICEADDED, SDL_IGNORE);
SDL_EventState(SDL_JOYDEVICEREMOVED, SDL_IGNORE);
SDL_EventState(SDL_JOYHATMOTION, SDL_IGNORE);
SDL_EventState(SDL_KEYMAPCHANGED, SDL_IGNORE);
SDL_EventState(SDL_KEYUP, SDL_IGNORE);
SDL_EventState(SDL_LASTEVENT, SDL_IGNORE);
SDL_EventState(SDL_MOUSEBUTTONDOWN, SDL_IGNORE);
SDL_EventState(SDL_MOUSEBUTTONUP, SDL_IGNORE);
SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
SDL_EventState(SDL_MOUSEWHEEL, SDL_IGNORE);
SDL_EventState(SDL_MULTIGESTURE, SDL_IGNORE);
SDL_EventState(SDL_RENDER_TARGETS_RESET, SDL_IGNORE);
SDL_EventState(SDL_RENDER_DEVICE_RESET, SDL_IGNORE);
SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
SDL_EventState(SDL_TEXTEDITING, SDL_IGNORE);
SDL_EventState(SDL_TEXTINPUT, SDL_IGNORE);
SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
SDL_SetEventFilter(EventFilter, nullptr);
}
int EventFilter(void *, SDL_Event *event) {
int result = 0;
switch (event->type) {
case SDL_QUIT:
case SDL_WINDOWEVENT:
result = 1;
break;
case SDL_KEYDOWN:
switch (event->key.keysym.sym) {
case SDLK_DOWN:
case SDLK_UP:
case SDLK_LEFT:
case SDLK_RIGHT:
result = 1;
break;
default:
// Ignore all other keys
break;
}
break;
default:
printf("Something happened!\n");
break;
}
return result;
}<|fim▁end|> |
#include <SDL2/SDL.h> |
<|file_name|>nicEdit.js<|end_file_name|><|fim▁begin|>/* NicEdit - Micro Inline WYSIWYG
* Copyright 2007-2008 Brian Kirchoff
*
* NicEdit is distributed under the terms of the MIT license
* For more information visit http://nicedit.com/
* Do not remove this copyright message
*/
var bkExtend = function(){
var args = arguments;
if (args.length == 1) args = [this, args[0]];
for (var prop in args[1]) args[0][prop] = args[1][prop];
return args[0];
};
function bkClass() { }
bkClass.prototype.construct = function() {};
bkClass.extend = function(def) {
var classDef = function() {
if (arguments[0] !== bkClass) { return this.construct.apply(this, arguments); }
};
var proto = new this(bkClass);
bkExtend(proto,def);
classDef.prototype = proto;
classDef.extend = this.extend;
return classDef;
};
var bkElement = bkClass.extend({
construct : function(elm,d) {
if(typeof(elm) == "string") {
elm = (d || document).createElement(elm);
}
elm = $BK(elm);
return elm;
},
appendTo : function(elm) {
elm.appendChild(this);
return this;
},
appendBefore : function(elm) {
elm.parentNode.insertBefore(this,elm);
return this;
},
addEvent : function(type, fn) {
bkLib.addEvent(this,type,fn);
return this;
},
setContent : function(c) {
this.innerHTML = c;
return this;
},
pos : function() {
var curleft = curtop = 0;
var o = obj = this;
if (obj.offsetParent) {
do {
curleft += obj.offsetLeft;
curtop += obj.offsetTop;
} while (obj = obj.offsetParent);
}
var b = (!window.opera) ? parseInt(this.getStyle('border-width') || this.style.border) || 0 : 0;
return [curleft+b,curtop+b+this.offsetHeight];
},
noSelect : function() {
bkLib.noSelect(this);
return this;
},
parentTag : function(t) {
var elm = this;
do {
if(elm && elm.nodeName && elm.nodeName.toUpperCase() == t) {
return elm;
}
elm = elm.parentNode;
} while(elm);
return false;
},
hasClass : function(cls) {
return this.className.match(new RegExp('(\\s|^)nicEdit-'+cls+'(\\s|$)'));
},
addClass : function(cls) {
if (!this.hasClass(cls)) { this.className += " nicEdit-"+cls };
return this;
},
<|fim▁hole|> this.className = this.className.replace(new RegExp('(\\s|^)nicEdit-'+cls+'(\\s|$)'),' ');
}
return this;
},
setStyle : function(st) {
var elmStyle = this.style;
for(var itm in st) {
switch(itm) {
case 'float':
elmStyle['cssFloat'] = elmStyle['styleFloat'] = st[itm];
break;
case 'opacity':
elmStyle.opacity = st[itm];
elmStyle.filter = "alpha(opacity=" + Math.round(st[itm]*100) + ")";
break;
case 'className':
this.className = st[itm];
break;
default:
//if(document.compatMode || itm != "cursor") { // Nasty Workaround for IE 5.5
elmStyle[itm] = st[itm];
//}
}
}
return this;
},
getStyle : function( cssRule, d ) {
var doc = (!d) ? document.defaultView : d;
if(this.nodeType == 1)
return (doc && doc.getComputedStyle) ? doc.getComputedStyle( this, null ).getPropertyValue(cssRule) : this.currentStyle[ bkLib.camelize(cssRule) ];
},
remove : function() {
this.parentNode.removeChild(this);
return this;
},
setAttributes : function(at) {
for(var itm in at) {
this[itm] = at[itm];
}
return this;
}
});
var bkLib = {
isMSIE : (navigator.appVersion.indexOf("MSIE") != -1),
addEvent : function(obj, type, fn) {
(obj.addEventListener) ? obj.addEventListener( type, fn, false ) : obj.attachEvent("on"+type, fn);
},
toArray : function(iterable) {
var length = iterable.length, results = new Array(length);
while (length--) { results[length] = iterable[length] };
return results;
},
noSelect : function(element) {
if(element.setAttribute && element.nodeName.toLowerCase() != 'input' && element.nodeName.toLowerCase() != 'textarea') {
element.setAttribute('unselectable','on');
}
for(var i=0;i<element.childNodes.length;i++) {
bkLib.noSelect(element.childNodes[i]);
}
},
camelize : function(s) {
return s.replace(/\-(.)/g, function(m, l){return l.toUpperCase()});
},
inArray : function(arr,item) {
return (bkLib.search(arr,item) != null);
},
search : function(arr,itm) {
for(var i=0; i < arr.length; i++) {
if(arr[i] == itm)
return i;
}
return null;
},
cancelEvent : function(e) {
e = e || window.event;
if(e.preventDefault && e.stopPropagation) {
e.preventDefault();
e.stopPropagation();
}
return false;
},
domLoad : [],
domLoaded : function() {
if (arguments.callee.done) return;
arguments.callee.done = true;
for (i = 0;i < bkLib.domLoad.length;i++) bkLib.domLoad[i]();
},
onDomLoaded : function(fireThis) {
this.domLoad.push(fireThis);
if (document.addEventListener) {
document.addEventListener("DOMContentLoaded", bkLib.domLoaded, null);
} else if(bkLib.isMSIE) {
document.write("<style>.nicEdit-main p { margin: 0; }</style><scr"+"ipt id=__ie_onload defer " + ((location.protocol == "https:") ? "src='javascript:void(0)'" : "src=//0") + "><\/scr"+"ipt>");
$BK("__ie_onload").onreadystatechange = function() {
if (this.readyState == "complete"){bkLib.domLoaded();}
};
}
window.onload = bkLib.domLoaded;
}
};
function $BK(elm) {
if(typeof(elm) == "string") {
elm = document.getElementById(elm);
}
return (elm && !elm.appendTo) ? bkExtend(elm,bkElement.prototype) : elm;
}
var bkEvent = {
addEvent : function(evType, evFunc) {
if(evFunc) {
this.eventList = this.eventList || {};
this.eventList[evType] = this.eventList[evType] || [];
this.eventList[evType].push(evFunc);
}
return this;
},
fireEvent : function() {
var args = bkLib.toArray(arguments), evType = args.shift();
if(this.eventList && this.eventList[evType]) {
for(var i=0;i<this.eventList[evType].length;i++) {
this.eventList[evType][i].apply(this,args);
}
}
}
};
function __(s) {
return s;
}
Function.prototype.closure = function() {
var __method = this, args = bkLib.toArray(arguments), obj = args.shift();
return function() { if(typeof(bkLib) != 'undefined') { return __method.apply(obj,args.concat(bkLib.toArray(arguments))); } };
}
Function.prototype.closureListener = function() {
var __method = this, args = bkLib.toArray(arguments), object = args.shift();
return function(e) {
e = e || window.event;
if(e.target) { var target = e.target; } else { var target = e.srcElement };
return __method.apply(object, [e,target].concat(args) );
};
}
/* START CONFIG */
var nicEditorConfig = bkClass.extend({
buttons : {
'bold' : {name : __('Click to Bold'), command : 'Bold', tags : ['B','STRONG'], css : {'font-weight' : 'bold'}, key : 'b'},
'italic' : {name : __('Click to Italic'), command : 'Italic', tags : ['EM','I'], css : {'font-style' : 'italic'}, key : 'i'},
'underline' : {name : __('Click to Underline'), command : 'Underline', tags : ['U'], css : {'text-decoration' : 'underline'}, key : 'u'},
'left' : {name : __('Left Align'), command : 'justifyleft', noActive : true},
'center' : {name : __('Center Align'), command : 'justifycenter', noActive : true},
'right' : {name : __('Right Align'), command : 'justifyright', noActive : true},
'justify' : {name : __('Justify Align'), command : 'justifyfull', noActive : true},
'ol' : {name : __('Insert Ordered List'), command : 'insertorderedlist', tags : ['OL']},
'ul' : {name : __('Insert Unordered List'), command : 'insertunorderedlist', tags : ['UL']},
'subscript' : {name : __('Click to Subscript'), command : 'subscript', tags : ['SUB']},
'superscript' : {name : __('Click to Superscript'), command : 'superscript', tags : ['SUP']},
'strikethrough' : {name : __('Click to Strike Through'), command : 'strikeThrough', css : {'text-decoration' : 'line-through'}},
'removeformat' : {name : __('Remove Formatting'), command : 'removeformat', noActive : true},
'indent' : {name : __('Indent Text'), command : 'indent', noActive : true},
'outdent' : {name : __('Remove Indent'), command : 'outdent', noActive : true},
'hr' : {name : __('Horizontal Rule'), command : 'insertHorizontalRule', noActive : true}
},
iconsPath : '../nicEditorIcons.gif',
buttonList : ['save','bold','italic','underline','left','center','right','justify','ol','ul','fontSize','fontFamily','fontFormat','indent','outdent','image','upload','link','unlink','forecolor','bgcolor'],
iconList : {"xhtml":1,"bgcolor":2,"forecolor":3,"bold":4,"center":5,"hr":6,"indent":7,"italic":8,"justify":9,"left":10,"ol":11,"outdent":12,"removeformat":13,"right":14,"save":25,"strikethrough":16,"subscript":17,"superscript":18,"ul":19,"underline":20,"image":21,"link":22,"unlink":23,"close":24,"arrow":26,"upload":27}
});
/* END CONFIG */
var nicEditors = {
nicPlugins : [],
editors : [],
registerPlugin : function(plugin,options) {
this.nicPlugins.push({p : plugin, o : options});
},
allTextAreas : function(nicOptions) {
var textareas = document.getElementsByTagName("textarea");
for(var i=0;i<textareas.length;i++) {
nicEditors.editors.push(new nicEditor(nicOptions).panelInstance(textareas[i]));
}
return nicEditors.editors;
},
findEditor : function(e) {
var editors = nicEditors.editors;
for(var i=0;i<editors.length;i++) {
if(editors[i].instanceById(e)) {
return editors[i].instanceById(e);
}
}
}
};
var nicEditor = bkClass.extend({
construct : function(o) {
this.options = new nicEditorConfig();
bkExtend(this.options,o);
this.nicInstances = new Array();
this.loadedPlugins = new Array();
var plugins = nicEditors.nicPlugins;
for(var i=0;i<plugins.length;i++) {
this.loadedPlugins.push(new plugins[i].p(this,plugins[i].o));
}
nicEditors.editors.push(this);
bkLib.addEvent(document.body,'mousedown', this.selectCheck.closureListener(this) );
},
panelInstance : function(e,o) {
e = this.checkReplace($BK(e));
var panelElm = new bkElement('DIV').setStyle({width : (parseInt(e.getStyle('width')) || e.clientWidth)+'px'}).appendBefore(e);
this.setPanel(panelElm);
return this.addInstance(e,o);
},
checkReplace : function(e) {
var r = nicEditors.findEditor(e);
if(r) {
r.removeInstance(e);
r.removePanel();
}
return e;
},
addInstance : function(e,o) {
e = this.checkReplace($BK(e));
if( e.contentEditable || !!window.opera ) {
var newInstance = new nicEditorInstance(e,o,this);
} else {
var newInstance = new nicEditorIFrameInstance(e,o,this);
}
this.nicInstances.push(newInstance);
return this;
},
removeInstance : function(e) {
e = $BK(e);
var instances = this.nicInstances;
for(var i=0;i<instances.length;i++) {
if(instances[i].e == e) {
instances[i].remove();
this.nicInstances.splice(i,1);
}
}
},
removePanel : function(e) {
if(this.nicPanel) {
this.nicPanel.remove();
this.nicPanel = null;
}
},
instanceById : function(e) {
e = $BK(e);
var instances = this.nicInstances;
for(var i=0;i<instances.length;i++) {
if(instances[i].e == e) {
return instances[i];
}
}
},
setPanel : function(e) {
this.nicPanel = new nicEditorPanel($BK(e),this.options,this);
this.fireEvent('panel',this.nicPanel);
return this;
},
nicCommand : function(cmd,args) {
if(this.selectedInstance) {
this.selectedInstance.nicCommand(cmd,args);
}
},
getIcon : function(iconName,options) {
var icon = this.options.iconList[iconName];
var file = (options.iconFiles) ? options.iconFiles[iconName] : '';
return {backgroundImage : "url('"+((icon) ? this.options.iconsPath : file)+"')", backgroundPosition : ((icon) ? ((icon-1)*-18) : 0)+'px 0px'};
},
selectCheck : function(e,t) {
var found = false;
do{
if(t.className && t.className.indexOf('nicEdit') != -1) {
return false;
}
} while(t = t.parentNode);
this.fireEvent('blur',this.selectedInstance,t);
this.lastSelectedInstance = this.selectedInstance;
this.selectedInstance = null;
return false;
}
});
nicEditor = nicEditor.extend(bkEvent);
var nicEditorInstance = bkClass.extend({
isSelected : false,
construct : function(e,options,nicEditor) {
this.ne = nicEditor;
this.elm = this.e = e;
this.options = options || {};
newX = parseInt(e.getStyle('width')) || e.clientWidth;
newY = parseInt(e.getStyle('height')) || e.clientHeight;
this.initialHeight = newY-8;
var isTextarea = (e.nodeName.toLowerCase() == "textarea");
if(isTextarea || this.options.hasPanel) {
var ie7s = (bkLib.isMSIE && !((typeof document.body.style.maxHeight != "undefined") && document.compatMode == "CSS1Compat"))
var s = {width: newX+'px', border : '1px solid #ccc', borderTop : 0, overflowY : 'auto', overflowX: 'hidden' };
s[(ie7s) ? 'height' : 'maxHeight'] = (this.ne.options.maxHeight) ? this.ne.options.maxHeight+'px' : null;
this.editorContain = new bkElement('DIV').setStyle(s).appendBefore(e);
var editorElm = new bkElement('DIV').setStyle({width : (newX-8)+'px', margin: '4px', minHeight : newY+'px'}).addClass('main').appendTo(this.editorContain);
e.setStyle({display : 'none'});
editorElm.innerHTML = e.innerHTML;
if(isTextarea) {
editorElm.setContent(e.value);
this.copyElm = e;
var f = e.parentTag('FORM');
if(f) { bkLib.addEvent( f, 'submit', this.saveContent.closure(this)); }
}
editorElm.setStyle((ie7s) ? {height : newY+'px'} : {overflow: 'hidden'});
this.elm = editorElm;
}
this.ne.addEvent('blur',this.blur.closure(this));
this.init();
this.blur();
},
init : function() {
this.elm.setAttribute('contentEditable','true');
if(this.getContent() == "") {
this.setContent('<br />');
}
this.instanceDoc = document.defaultView;
this.elm.addEvent('mousedown',this.selected.closureListener(this)).addEvent('keypress',this.keyDown.closureListener(this)).addEvent('focus',this.selected.closure(this)).addEvent('blur',this.blur.closure(this)).addEvent('keyup',this.selected.closure(this));
this.ne.fireEvent('add',this);
},
remove : function() {
this.saveContent();
if(this.copyElm || this.options.hasPanel) {
this.editorContain.remove();
this.e.setStyle({'display' : 'block'});
this.ne.removePanel();
}
this.disable();
this.ne.fireEvent('remove',this);
},
disable : function() {
this.elm.setAttribute('contentEditable','false');
},
getSel : function() {
return (window.getSelection) ? window.getSelection() : document.selection;
},
getRng : function() {
var s = this.getSel();
if(!s || s.rangeCount === 0) { return; }
return (s.rangeCount > 0) ? s.getRangeAt(0) : s.createRange();
},
selRng : function(rng,s) {
if(window.getSelection) {
s.removeAllRanges();
s.addRange(rng);
} else {
rng.select();
}
},
selElm : function() {
var r = this.getRng();
if(!r) { return; }
if(r.startContainer) {
var contain = r.startContainer;
if(r.cloneContents().childNodes.length == 1) {
for(var i=0;i<contain.childNodes.length;i++) {
var rng = contain.childNodes[i].ownerDocument.createRange();
rng.selectNode(contain.childNodes[i]);
if(r.compareBoundaryPoints(Range.START_TO_START,rng) != 1 &&
r.compareBoundaryPoints(Range.END_TO_END,rng) != -1) {
return $BK(contain.childNodes[i]);
}
}
}
return $BK(contain);
} else {
return $BK((this.getSel().type == "Control") ? r.item(0) : r.parentElement());
}
},
saveRng : function() {
this.savedRange = this.getRng();
this.savedSel = this.getSel();
},
restoreRng : function() {
if(this.savedRange) {
this.selRng(this.savedRange,this.savedSel);
}
},
keyDown : function(e,t) {
if(e.ctrlKey) {
this.ne.fireEvent('key',this,e);
}
},
selected : function(e,t) {
if(!t && !(t = this.selElm)) { t = this.selElm(); }
if(!e.ctrlKey) {
var selInstance = this.ne.selectedInstance;
if(selInstance != this) {
if(selInstance) {
this.ne.fireEvent('blur',selInstance,t);
}
this.ne.selectedInstance = this;
this.ne.fireEvent('focus',selInstance,t);
}
this.ne.fireEvent('selected',selInstance,t);
this.isFocused = true;
this.elm.addClass('selected');
}
return false;
},
blur : function() {
this.isFocused = false;
this.elm.removeClass('selected');
},
saveContent : function() {
if(this.copyElm || this.options.hasPanel) {
this.ne.fireEvent('save',this);
(this.copyElm) ? this.copyElm.value = this.getContent() : this.e.innerHTML = this.getContent();
}
},
getElm : function() {
return this.elm;
},
getContent : function() {
this.content = this.getElm().innerHTML;
this.ne.fireEvent('get',this);
return this.content;
},
setContent : function(e) {
this.content = e;
this.ne.fireEvent('set',this);
this.elm.innerHTML = this.content;
},
nicCommand : function(cmd,args) {
document.execCommand(cmd,false,args);
}
});
var nicEditorIFrameInstance = nicEditorInstance.extend({
savedStyles : [],
init : function() {
var c = this.elm.innerHTML.replace(/^\s+|\s+$/g, '');
this.elm.innerHTML = '';
(!c) ? c = "<br />" : c;
this.initialContent = c;
this.elmFrame = new bkElement('iframe').setAttributes({'src' : 'javascript:;', 'frameBorder' : 0, 'allowTransparency' : 'true', 'scrolling' : 'no'}).setStyle({height: '100px', width: '100%'}).addClass('frame').appendTo(this.elm);
if(this.copyElm) { this.elmFrame.setStyle({width : (this.elm.offsetWidth-4)+'px'}); }
var styleList = ['font-size','font-family','font-weight','color'];
for(itm in styleList) {
this.savedStyles[bkLib.camelize(itm)] = this.elm.getStyle(itm);
}
setTimeout(this.initFrame.closure(this),50);
},
disable : function() {
this.elm.innerHTML = this.getContent();
},
initFrame : function() {
var fd = $BK(this.elmFrame.contentWindow.document);
fd.designMode = "on";
fd.open();
var css = this.ne.options.externalCSS;
fd.write('<html><head>'+((css) ? '<link href="'+css+'" rel="stylesheet" type="text/css" />' : '')+'</head><body id="nicEditContent" style="margin: 0 !important; background-color: transparent !important;">'+this.initialContent+'</body></html>');
fd.close();
this.frameDoc = fd;
this.frameWin = $BK(this.elmFrame.contentWindow);
this.frameContent = $BK(this.frameWin.document.body).setStyle(this.savedStyles);
this.instanceDoc = this.frameWin.document.defaultView;
this.heightUpdate();
this.frameDoc.addEvent('mousedown', this.selected.closureListener(this)).addEvent('keyup',this.heightUpdate.closureListener(this)).addEvent('keydown',this.keyDown.closureListener(this)).addEvent('keyup',this.selected.closure(this));
this.ne.fireEvent('add',this);
},
getElm : function() {
return this.frameContent;
},
setContent : function(c) {
this.content = c;
this.ne.fireEvent('set',this);
this.frameContent.innerHTML = this.content;
this.heightUpdate();
},
getSel : function() {
return (this.frameWin) ? this.frameWin.getSelection() : this.frameDoc.selection;
},
heightUpdate : function() {
this.elmFrame.style.height = Math.max(this.frameContent.offsetHeight,this.initialHeight)+'px';
},
nicCommand : function(cmd,args) {
this.frameDoc.execCommand(cmd,false,args);
setTimeout(this.heightUpdate.closure(this),100);
}
});
var nicEditorPanel = bkClass.extend({
construct : function(e,options,nicEditor) {
this.elm = e;
this.options = options;
this.ne = nicEditor;
this.panelButtons = new Array();
this.buttonList = bkExtend([],this.ne.options.buttonList);
this.panelContain = new bkElement('DIV').setStyle({overflow : 'hidden', width : '100%', border : '1px solid #cccccc', backgroundColor : '#efefef'}).addClass('panelContain');
this.panelElm = new bkElement('DIV').setStyle({margin : '2px', marginTop : '0px', zoom : 1, overflow : 'hidden'}).addClass('panel').appendTo(this.panelContain);
this.panelContain.appendTo(e);
var opt = this.ne.options;
var buttons = opt.buttons;
for(button in buttons) {
this.addButton(button,opt,true);
}
this.reorder();
e.noSelect();
},
addButton : function(buttonName,options,noOrder) {
var button = options.buttons[buttonName];
var type = (button['type']) ? eval('(typeof('+button['type']+') == "undefined") ? null : '+button['type']+';') : nicEditorButton;
var hasButton = bkLib.inArray(this.buttonList,buttonName);
if(type && (hasButton || this.ne.options.fullPanel)) {
this.panelButtons.push(new type(this.panelElm,buttonName,options,this.ne));
if(!hasButton) {
this.buttonList.push(buttonName);
}
}
},
findButton : function(itm) {
for(var i=0;i<this.panelButtons.length;i++) {
if(this.panelButtons[i].name == itm)
return this.panelButtons[i];
}
},
reorder : function() {
var bl = this.buttonList;
for(var i=0;i<bl.length;i++) {
var button = this.findButton(bl[i]);
if(button) {
this.panelElm.appendChild(button.margin);
}
}
},
remove : function() {
this.elm.remove();
}
});
var nicEditorButton = bkClass.extend({
construct : function(e,buttonName,options,nicEditor) {
this.options = options.buttons[buttonName];
this.name = buttonName;
this.ne = nicEditor;
this.elm = e;
this.margin = new bkElement('DIV').setStyle({'float' : 'left', marginTop : '2px'}).appendTo(e);
this.contain = new bkElement('DIV').setStyle({width : '20px', height : '20px'}).addClass('buttonContain').appendTo(this.margin);
this.border = new bkElement('DIV').setStyle({backgroundColor : '#efefef', border : '1px solid #efefef'}).appendTo(this.contain);
this.button = new bkElement('DIV').setStyle({width : '18px', height : '18px', overflow : 'hidden', zoom : 1, cursor : 'pointer'}).addClass('button').setStyle(this.ne.getIcon(buttonName,options)).appendTo(this.border);
this.button.addEvent('mouseover', this.hoverOn.closure(this)).addEvent('mouseout',this.hoverOff.closure(this)).addEvent('mousedown',this.mouseClick.closure(this)).noSelect();
if(!window.opera) {
this.button.onmousedown = this.button.onclick = bkLib.cancelEvent;
}
nicEditor.addEvent('selected', this.enable.closure(this)).addEvent('blur', this.disable.closure(this)).addEvent('key',this.key.closure(this));
this.disable();
this.init();
},
init : function() { },
hide : function() {
this.contain.setStyle({display : 'none'});
},
updateState : function() {
if(this.isDisabled) { this.setBg(); }
else if(this.isHover) { this.setBg('hover'); }
else if(this.isActive) { this.setBg('active'); }
else { this.setBg(); }
},
setBg : function(state) {
switch(state) {
case 'hover':
var stateStyle = {border : '1px solid #666', backgroundColor : '#ddd'};
break;
case 'active':
var stateStyle = {border : '1px solid #666', backgroundColor : '#ccc'};
break;
default:
var stateStyle = {border : '1px solid #efefef', backgroundColor : '#efefef'};
}
this.border.setStyle(stateStyle).addClass('button-'+state);
},
checkNodes : function(e) {
var elm = e;
do {
if(this.options.tags && bkLib.inArray(this.options.tags,elm.nodeName)) {
this.activate();
return true;
}
} while(elm = elm.parentNode && elm.className != "nicEdit");
elm = $BK(e);
while(elm.nodeType == 3) {
elm = $BK(elm.parentNode);
}
if(this.options.css) {
for(itm in this.options.css) {
if(elm.getStyle(itm,this.ne.selectedInstance.instanceDoc) == this.options.css[itm]) {
this.activate();
return true;
}
}
}
this.deactivate();
return false;
},
activate : function() {
if(!this.isDisabled) {
this.isActive = true;
this.updateState();
this.ne.fireEvent('buttonActivate',this);
}
},
deactivate : function() {
this.isActive = false;
this.updateState();
if(!this.isDisabled) {
this.ne.fireEvent('buttonDeactivate',this);
}
},
enable : function(ins,t) {
this.isDisabled = false;
this.contain.setStyle({'opacity' : 1}).addClass('buttonEnabled');
this.updateState();
this.checkNodes(t);
},
disable : function(ins,t) {
this.isDisabled = true;
this.contain.setStyle({'opacity' : 0.6}).removeClass('buttonEnabled');
this.updateState();
},
toggleActive : function() {
(this.isActive) ? this.deactivate() : this.activate();
},
hoverOn : function() {
if(!this.isDisabled) {
this.isHover = true;
this.updateState();
this.ne.fireEvent("buttonOver",this);
}
},
hoverOff : function() {
this.isHover = false;
this.updateState();
this.ne.fireEvent("buttonOut",this);
},
mouseClick : function() {
if(this.options.command) {
this.ne.nicCommand(this.options.command,this.options.commandArgs);
if(!this.options.noActive) {
this.toggleActive();
}
}
this.ne.fireEvent("buttonClick",this);
},
key : function(nicInstance,e) {
if(this.options.key && e.ctrlKey && String.fromCharCode(e.keyCode || e.charCode).toLowerCase() == this.options.key) {
this.mouseClick();
if(e.preventDefault) e.preventDefault();
}
}
});
var nicPlugin = bkClass.extend({
construct : function(nicEditor,options) {
this.options = options;
this.ne = nicEditor;
this.ne.addEvent('panel',this.loadPanel.closure(this));
this.init();
},
loadPanel : function(np) {
var buttons = this.options.buttons;
for(var button in buttons) {
np.addButton(button,this.options);
}
np.reorder();
},
init : function() { }
});
/* START CONFIG */
var nicPaneOptions = { };
/* END CONFIG */
var nicEditorPane = bkClass.extend({
construct : function(elm,nicEditor,options,openButton) {
this.ne = nicEditor;
this.elm = elm;
this.pos = elm.pos();
this.contain = new bkElement('div').setStyle({zIndex : '99999', overflow : 'hidden', position : 'absolute', left : this.pos[0]+'px', top : this.pos[1]+'px'})
this.pane = new bkElement('div').setStyle({fontSize : '12px', border : '1px solid #ccc', 'overflow': 'hidden', padding : '4px', textAlign: 'left', backgroundColor : '#ffffc9'}).addClass('pane').setStyle(options).appendTo(this.contain);
if(openButton && !openButton.options.noClose) {
this.close = new bkElement('div').setStyle({'float' : 'right', height: '16px', width : '16px', cursor : 'pointer'}).setStyle(this.ne.getIcon('close',nicPaneOptions)).addEvent('mousedown',openButton.removePane.closure(this)).appendTo(this.pane);
}
this.contain.noSelect().appendTo(document.body);
this.position();
this.init();
},
init : function() { },
position : function() {
if(this.ne.nicPanel) {
var panelElm = this.ne.nicPanel.elm;
var panelPos = panelElm.pos();
var newLeft = panelPos[0]+parseInt(panelElm.getStyle('width'))-(parseInt(this.pane.getStyle('width'))+8);
if(newLeft < this.pos[0]) {
this.contain.setStyle({left : newLeft+'px'});
}
}
},
toggle : function() {
this.isVisible = !this.isVisible;
this.contain.setStyle({display : ((this.isVisible) ? 'block' : 'none')});
},
remove : function() {
if(this.contain) {
this.contain.remove();
this.contain = null;
}
},
append : function(c) {
c.appendTo(this.pane);
},
setContent : function(c) {
this.pane.setContent(c);
}
});
var nicEditorAdvancedButton = nicEditorButton.extend({
init : function() {
this.ne.addEvent('selected',this.removePane.closure(this)).addEvent('blur',this.removePane.closure(this));
},
mouseClick : function() {
if(!this.isDisabled) {
if(this.pane && this.pane.pane) {
this.removePane();
} else {
this.pane = new nicEditorPane(this.contain,this.ne,{width : (this.width || '270px'), backgroundColor : '#fff'},this);
this.addPane();
this.ne.selectedInstance.saveRng();
}
}
},
addForm : function(f,elm) {
this.form = new bkElement('form').addEvent('submit',this.submit.closureListener(this));
this.pane.append(this.form);
this.inputs = {};
for(itm in f) {
var field = f[itm];
var val = '';
if(elm) {
val = elm.getAttribute(itm);
}
if(!val) {
val = field['value'] || '';
}
var type = f[itm].type;
if(type == 'title') {
new bkElement('div').setContent(field.txt).setStyle({fontSize : '14px', fontWeight: 'bold', padding : '0px', margin : '2px 0'}).appendTo(this.form);
} else {
var contain = new bkElement('div').setStyle({overflow : 'hidden', clear : 'both'}).appendTo(this.form);
if(field.txt) {
new bkElement('label').setAttributes({'for' : itm}).setContent(field.txt).setStyle({margin : '2px 4px', fontSize : '13px', width: '50px', lineHeight : '20px', textAlign : 'right', 'float' : 'left'}).appendTo(contain);
}
switch(type) {
case 'text':
this.inputs[itm] = new bkElement('input').setAttributes({id : itm, 'value' : val, 'type' : 'text'}).setStyle({margin : '2px 0', fontSize : '13px', 'float' : 'left', height : '20px', border : '1px solid #ccc', overflow : 'hidden'}).setStyle(field.style).appendTo(contain);
break;
case 'select':
this.inputs[itm] = new bkElement('select').setAttributes({id : itm}).setStyle({border : '1px solid #ccc', 'float' : 'left', margin : '2px 0'}).appendTo(contain);
for(opt in field.options) {
var o = new bkElement('option').setAttributes({value : opt, selected : (opt == val) ? 'selected' : ''}).setContent(field.options[opt]).appendTo(this.inputs[itm]);
}
break;
case 'content':
this.inputs[itm] = new bkElement('textarea').setAttributes({id : itm}).setStyle({border : '1px solid #ccc', 'float' : 'left'}).setStyle(field.style).appendTo(contain);
this.inputs[itm].value = val;
}
}
}
new bkElement('input').setAttributes({'type' : 'submit'}).setStyle({backgroundColor : '#efefef',border : '1px solid #ccc', margin : '3px 0', 'float' : 'left', 'clear' : 'both'}).appendTo(this.form);
this.form.onsubmit = bkLib.cancelEvent;
},
submit : function() { },
findElm : function(tag,attr,val) {
var list = this.ne.selectedInstance.getElm().getElementsByTagName(tag);
for(var i=0;i<list.length;i++) {
if(list[i].getAttribute(attr) == val) {
return $BK(list[i]);
}
}
},
removePane : function() {
if(this.pane) {
this.pane.remove();
this.pane = null;
this.ne.selectedInstance.restoreRng();
}
}
});
var nicButtonTips = bkClass.extend({
construct : function(nicEditor) {
this.ne = nicEditor;
nicEditor.addEvent('buttonOver',this.show.closure(this)).addEvent('buttonOut',this.hide.closure(this));
},
show : function(button) {
this.timer = setTimeout(this.create.closure(this,button),400);
},
create : function(button) {
this.timer = null;
if(!this.pane) {
this.pane = new nicEditorPane(button.button,this.ne,{fontSize : '12px', marginTop : '5px'});
this.pane.setContent(button.options.name);
}
},
hide : function(button) {
if(this.timer) {
clearTimeout(this.timer);
}
if(this.pane) {
this.pane = this.pane.remove();
}
}
});
nicEditors.registerPlugin(nicButtonTips);
/* START CONFIG */
var nicSelectOptions = {
buttons : {
'fontSize' : {name : __('Select Font Size'), type : 'nicEditorFontSizeSelect', command : 'fontsize'},
'fontFamily' : {name : __('Select Font Family'), type : 'nicEditorFontFamilySelect', command : 'fontname'},
'fontFormat' : {name : __('Select Font Format'), type : 'nicEditorFontFormatSelect', command : 'formatBlock'}
}
};
/* END CONFIG */
var nicEditorSelect = bkClass.extend({
construct : function(e,buttonName,options,nicEditor) {
this.options = options.buttons[buttonName];
this.elm = e;
this.ne = nicEditor;
this.name = buttonName;
this.selOptions = new Array();
this.margin = new bkElement('div').setStyle({'float' : 'left', margin : '2px 1px 0 1px'}).appendTo(this.elm);
this.contain = new bkElement('div').setStyle({width: '90px', height : '20px', cursor : 'pointer', overflow: 'hidden'}).addClass('selectContain').addEvent('click',this.toggle.closure(this)).appendTo(this.margin);
this.items = new bkElement('div').setStyle({overflow : 'hidden', zoom : 1, border: '1px solid #ccc', paddingLeft : '3px', backgroundColor : '#fff'}).appendTo(this.contain);
this.control = new bkElement('div').setStyle({overflow : 'hidden', 'float' : 'right', height: '18px', width : '16px'}).addClass('selectControl').setStyle(this.ne.getIcon('arrow',options)).appendTo(this.items);
this.txt = new bkElement('div').setStyle({overflow : 'hidden', 'float' : 'left', width : '66px', height : '14px', marginTop : '1px', fontFamily : 'sans-serif', textAlign : 'center', fontSize : '12px'}).addClass('selectTxt').appendTo(this.items);
if(!window.opera) {
this.contain.onmousedown = this.control.onmousedown = this.txt.onmousedown = bkLib.cancelEvent;
}
this.margin.noSelect();
this.ne.addEvent('selected', this.enable.closure(this)).addEvent('blur', this.disable.closure(this));
this.disable();
this.init();
},
disable : function() {
this.isDisabled = true;
this.close();
this.contain.setStyle({opacity : 0.6});
},
enable : function(t) {
this.isDisabled = false;
this.close();
this.contain.setStyle({opacity : 1});
},
setDisplay : function(txt) {
this.txt.setContent(txt);
},
toggle : function() {
if(!this.isDisabled) {
(this.pane) ? this.close() : this.open();
}
},
open : function() {
this.pane = new nicEditorPane(this.items,this.ne,{width : '88px', padding: '0px', borderTop : 0, borderLeft : '1px solid #ccc', borderRight : '1px solid #ccc', borderBottom : '0px', backgroundColor : '#fff'});
for(var i=0;i<this.selOptions.length;i++) {
var opt = this.selOptions[i];
var itmContain = new bkElement('div').setStyle({overflow : 'hidden', borderBottom : '1px solid #ccc', width: '88px', textAlign : 'left', overflow : 'hidden', cursor : 'pointer'});
var itm = new bkElement('div').setStyle({padding : '0px 4px'}).setContent(opt[1]).appendTo(itmContain).noSelect();
itm.addEvent('click',this.update.closure(this,opt[0])).addEvent('mouseover',this.over.closure(this,itm)).addEvent('mouseout',this.out.closure(this,itm)).setAttributes('id',opt[0]);
this.pane.append(itmContain);
if(!window.opera) {
itm.onmousedown = bkLib.cancelEvent;
}
}
},
close : function() {
if(this.pane) {
this.pane = this.pane.remove();
}
},
over : function(opt) {
opt.setStyle({backgroundColor : '#ccc'});
},
out : function(opt) {
opt.setStyle({backgroundColor : '#fff'});
},
add : function(k,v) {
this.selOptions.push(new Array(k,v));
},
update : function(elm) {
this.ne.nicCommand(this.options.command,elm);
this.close();
}
});
var nicEditorFontSizeSelect = nicEditorSelect.extend({
sel : {1 : '1 (8pt)', 2 : '2 (10pt)', 3 : '3 (12pt)', 4 : '4 (14pt)', 5 : '5 (18pt)', 6 : '6 (24pt)'},
init : function() {
this.setDisplay('Font Size...');
for(itm in this.sel) {
this.add(itm,'<font size="'+itm+'">'+this.sel[itm]+'</font>');
}
}
});
var nicEditorFontFamilySelect = nicEditorSelect.extend({
sel : {'arial' : 'Arial','comic sans ms' : 'Comic Sans','courier new' : 'Courier New','georgia' : 'Georgia', 'helvetica' : 'Helvetica', 'impact' : 'Impact', 'times new roman' : 'Times', 'trebuchet ms' : 'Trebuchet', 'verdana' : 'Verdana'},
init : function() {
this.setDisplay('Font Family...');
for(itm in this.sel) {
this.add(itm,'<font face="'+itm+'">'+this.sel[itm]+'</font>');
}
}
});
var nicEditorFontFormatSelect = nicEditorSelect.extend({
sel : {'p' : 'Paragraph', 'pre' : 'Pre', 'h6' : 'Heading 6', 'h5' : 'Heading 5', 'h4' : 'Heading 4', 'h3' : 'Heading 3', 'h2' : 'Heading 2', 'h1' : 'Heading 1'},
init : function() {
this.setDisplay('Font Format...');
for(itm in this.sel) {
var tag = itm.toUpperCase();
this.add('<'+tag+'>','<'+itm+' style="padding: 0px; margin: 0px;">'+this.sel[itm]+'</'+tag+'>');
}
}
});
nicEditors.registerPlugin(nicPlugin,nicSelectOptions);
/* START CONFIG */
var nicLinkOptions = {
buttons : {
'link' : {name : 'Add Link', type : 'nicLinkButton', tags : ['A']},
'unlink' : {name : 'Remove Link', command : 'unlink', noActive : true}
}
};
/* END CONFIG */
var nicLinkButton = nicEditorAdvancedButton.extend({
addPane : function() {
this.ln = this.ne.selectedInstance.selElm().parentTag('A');
this.addForm({
'' : {type : 'title', txt : 'Add/Edit Link'},
'href' : {type : 'text', txt : 'URL', value : 'http://', style : {width: '150px'}},
'title' : {type : 'text', txt : 'Title'},
'target' : {type : 'select', txt : 'Open In', options : {'' : 'Current Window', '_blank' : 'New Window'},style : {width : '100px'}}
},this.ln);
},
submit : function(e) {
var url = this.inputs['href'].value;
if(url == "http://" || url == "") {
alert("You must enter a URL to Create a Link");
return false;
}
this.removePane();
if(!this.ln) {
var tmp = 'javascript:nicTemp();';
this.ne.nicCommand("createlink",tmp);
this.ln = this.findElm('A','href',tmp);
}
if(this.ln) {
this.ln.setAttributes({
href : this.inputs['href'].value,
title : this.inputs['title'].value,
target : this.inputs['target'].options[this.inputs['target'].selectedIndex].value
});
}
}
});
nicEditors.registerPlugin(nicPlugin,nicLinkOptions);
/* START CONFIG */
var nicColorOptions = {
buttons : {
'forecolor' : {name : __('Change Text Color'), type : 'nicEditorColorButton', noClose : true},
'bgcolor' : {name : __('Change Background Color'), type : 'nicEditorBgColorButton', noClose : true}
}
};
/* END CONFIG */
var nicEditorColorButton = nicEditorAdvancedButton.extend({
addPane : function() {
var colorList = {0 : '00',1 : '33',2 : '66',3 :'99',4 : 'CC',5 : 'FF'};
var colorItems = new bkElement('DIV').setStyle({width: '270px'});
for(var r in colorList) {
for(var b in colorList) {
for(var g in colorList) {
var colorCode = '#'+colorList[r]+colorList[g]+colorList[b];
var colorSquare = new bkElement('DIV').setStyle({'cursor' : 'pointer', 'height' : '15px', 'float' : 'left'}).appendTo(colorItems);
var colorBorder = new bkElement('DIV').setStyle({border: '2px solid '+colorCode}).appendTo(colorSquare);
var colorInner = new bkElement('DIV').setStyle({backgroundColor : colorCode, overflow : 'hidden', width : '11px', height : '11px'}).addEvent('click',this.colorSelect.closure(this,colorCode)).addEvent('mouseover',this.on.closure(this,colorBorder)).addEvent('mouseout',this.off.closure(this,colorBorder,colorCode)).appendTo(colorBorder);
if(!window.opera) {
colorSquare.onmousedown = colorInner.onmousedown = bkLib.cancelEvent;
}
}
}
}
this.pane.append(colorItems.noSelect());
},
colorSelect : function(c) {
this.ne.nicCommand('foreColor',c);
this.removePane();
},
on : function(colorBorder) {
colorBorder.setStyle({border : '2px solid #000'});
},
off : function(colorBorder,colorCode) {
colorBorder.setStyle({border : '2px solid '+colorCode});
}
});
var nicEditorBgColorButton = nicEditorColorButton.extend({
colorSelect : function(c) {
this.ne.nicCommand('hiliteColor',c);
this.removePane();
}
});
nicEditors.registerPlugin(nicPlugin,nicColorOptions);
/* START CONFIG */
var nicImageOptions = {
buttons : {
'image' : {name : 'Add Image', type : 'nicImageButton', tags : ['IMG']}
}
};
/* END CONFIG */
var nicImageButton = nicEditorAdvancedButton.extend({
addPane : function() {
this.im = this.ne.selectedInstance.selElm().parentTag('IMG');
this.addForm({
'' : {type : 'title', txt : 'Add/Edit Image'},
'src' : {type : 'text', txt : 'URL', 'value' : 'http://', style : {width: '150px'}},
'alt' : {type : 'text', txt : 'Alt Text', style : {width: '100px'}},
'align' : {type : 'select', txt : 'Align', options : {none : 'Default','left' : 'Left', 'right' : 'Right'}}
},this.im);
},
submit : function(e) {
var src = this.inputs['src'].value;
if(src == "" || src == "http://") {
alert("You must enter a Image URL to insert");
return false;
}
this.removePane();
if(!this.im) {
var tmp = 'javascript:nicImTemp();';
this.ne.nicCommand("insertImage",tmp);
this.im = this.findElm('IMG','src',tmp);
}
if(this.im) {
this.im.setAttributes({
src : this.inputs['src'].value,
alt : this.inputs['alt'].value,
align : this.inputs['align'].value
});
}
}
});
nicEditors.registerPlugin(nicPlugin,nicImageOptions);
/* START CONFIG */
var nicSaveOptions = {
buttons : {
'save' : {name : __('Save this content'), type : 'nicEditorSaveButton'}
}
};
/* END CONFIG */
var nicEditorSaveButton = nicEditorButton.extend({
init : function() {
if(!this.ne.options.onSave) {
this.margin.setStyle({'display' : 'none'});
}
},
mouseClick : function() {
var onSave = this.ne.options.onSave;
var selectedInstance = this.ne.selectedInstance;
onSave(selectedInstance.getContent(), selectedInstance.elm.id, selectedInstance);
}
});
nicEditors.registerPlugin(nicPlugin,nicSaveOptions);
/* START CONFIG */
var nicUploadOptions = {
buttons : {
'upload' : {name : 'Upload Image', type : 'nicUploadButton'}
}
};
/* END CONFIG */
var nicUploadButton = nicEditorAdvancedButton.extend({
nicURI : 'http://api.imgur.com/2/upload.json',
errorText : 'Failed to upload image',
addPane : function() {
if(typeof window.FormData === "undefined") {
return this.onError("Image uploads are not supported in this browser, use Chrome, Firefox, or Safari instead.");
}
this.im = this.ne.selectedInstance.selElm().parentTag('IMG');
var container = new bkElement('div')
.setStyle({ padding: '10px' })
.appendTo(this.pane.pane);
new bkElement('div')
.setStyle({ fontSize: '14px', fontWeight : 'bold', paddingBottom: '5px' })
.setContent('Insert an Image')
.appendTo(container);
this.fileInput = new bkElement('input')
.setAttributes({ 'type' : 'file' })
.appendTo(container);
this.progress = new bkElement('progress')
.setStyle({ width : '100%', display: 'none' })
.setAttributes('max', 100)
.appendTo(container);
this.fileInput.onchange = this.uploadFile.closure(this);
},
onError : function(msg) {
this.removePane();
alert(msg || "Failed to upload image");
},
uploadFile : function() {
var file = this.fileInput.files[0];
if (!file || !file.type.match(/image.*/)) {
this.onError("Only image files can be uploaded");
return;
}
this.fileInput.setStyle({ display: 'none' });
this.setProgress(0);
var fd = new FormData(); // https://hacks.mozilla.org/2011/01/how-to-develop-a-html5-image-uploader/
fd.append("image", file);
fd.append("key", "b7ea18a4ecbda8e92203fa4968d10660");
var xhr = new XMLHttpRequest();
xhr.open("POST", this.ne.options.uploadURI || this.nicURI);
xhr.onload = function() {
try {
var res = JSON.parse(xhr.responseText);
} catch(e) {
return this.onError();
}
this.onUploaded(res.upload);
}.closure(this);
xhr.onerror = this.onError.closure(this);
xhr.upload.onprogress = function(e) {
this.setProgress(e.loaded / e.total);
}.closure(this);
xhr.send(fd);
},
setProgress : function(percent) {
this.progress.setStyle({ display: 'block' });
if(percent < .98) {
this.progress.value = percent;
} else {
this.progress.removeAttribute('value');
}
},
onUploaded : function(options) {
this.removePane();
var src = options.links.original;
if(!this.im) {
this.ne.selectedInstance.restoreRng();
var tmp = 'javascript:nicImTemp();';
this.ne.nicCommand("insertImage", src);
this.im = this.findElm('IMG','src', src);
}
var w = parseInt(this.ne.selectedInstance.elm.getStyle('width'));
if(this.im) {
this.im.setAttributes({
src : src,
width : (w && options.image.width) ? Math.min(w, options.image.width) : ''
});
}
}
});
nicEditors.registerPlugin(nicPlugin,nicUploadOptions);
var nicXHTML = bkClass.extend({
stripAttributes : ['_moz_dirty','_moz_resizing','_extended'],
noShort : ['style','title','script','textarea','a'],
cssReplace : {'font-weight:bold;' : 'strong', 'font-style:italic;' : 'em'},
sizes : {1 : 'xx-small', 2 : 'x-small', 3 : 'small', 4 : 'medium', 5 : 'large', 6 : 'x-large'},
construct : function(nicEditor) {
this.ne = nicEditor;
if(this.ne.options.xhtml) {
nicEditor.addEvent('get',this.cleanup.closure(this));
}
},
cleanup : function(ni) {
var node = ni.getElm();
var xhtml = this.toXHTML(node);
ni.content = xhtml;
},
toXHTML : function(n,r,d) {
var txt = '';
var attrTxt = '';
var cssTxt = '';
var nType = n.nodeType;
var nName = n.nodeName.toLowerCase();
var nChild = n.hasChildNodes && n.hasChildNodes();
var extraNodes = new Array();
switch(nType) {
case 1:
var nAttributes = n.attributes;
switch(nName) {
case 'b':
nName = 'strong';
break;
case 'i':
nName = 'em';
break;
case 'font':
nName = 'span';
break;
}
if(r) {
for(var i=0;i<nAttributes.length;i++) {
var attr = nAttributes[i];
var attributeName = attr.nodeName.toLowerCase();
var attributeValue = attr.nodeValue;
if(!attr.specified || !attributeValue || bkLib.inArray(this.stripAttributes,attributeName) || typeof(attributeValue) == "function") {
continue;
}
switch(attributeName) {
case 'style':
var css = attributeValue.replace(/ /g,"");
for(itm in this.cssReplace) {
if(css.indexOf(itm) != -1) {
extraNodes.push(this.cssReplace[itm]);
css = css.replace(itm,'');
}
}
cssTxt += css;
attributeValue = "";
break;
case 'class':
attributeValue = attributeValue.replace("Apple-style-span","");
break;
case 'size':
cssTxt += "font-size:"+this.sizes[attributeValue]+';';
attributeValue = "";
break;
}
if(attributeValue) {
attrTxt += ' '+attributeName+'="'+attributeValue+'"';
}
}
if(cssTxt) {
attrTxt += ' style="'+cssTxt+'"';
}
for(var i=0;i<extraNodes.length;i++) {
txt += '<'+extraNodes[i]+'>';
}
if(attrTxt == "" && nName == "span") {
r = false;
}
if(r) {
txt += '<'+nName;
if(nName != 'br') {
txt += attrTxt;
}
}
}
if(!nChild && !bkLib.inArray(this.noShort,attributeName)) {
if(r) {
txt += ' />';
}
} else {
if(r) {
txt += '>';
}
for(var i=0;i<n.childNodes.length;i++) {
var results = this.toXHTML(n.childNodes[i],true,true);
if(results) {
txt += results;
}
}
}
if(r && nChild) {
txt += '</'+nName+'>';
}
for(var i=0;i<extraNodes.length;i++) {
txt += '</'+extraNodes[i]+'>';
}
break;
case 3:
//if(n.nodeValue != '\n') {
txt += n.nodeValue;
//}
break;
}
return txt;
}
});
nicEditors.registerPlugin(nicXHTML);
var nicBBCode = bkClass.extend({
construct : function(nicEditor) {
this.ne = nicEditor;
if(this.ne.options.bbCode) {
nicEditor.addEvent('get',this.bbGet.closure(this));
nicEditor.addEvent('set',this.bbSet.closure(this));
var loadedPlugins = this.ne.loadedPlugins;
for(itm in loadedPlugins) {
if(loadedPlugins[itm].toXHTML) {
this.xhtml = loadedPlugins[itm];
}
}
}
},
bbGet : function(ni) {
var xhtml = this.xhtml.toXHTML(ni.getElm());
ni.content = this.toBBCode(xhtml);
},
bbSet : function(ni) {
ni.content = this.fromBBCode(ni.content);
},
toBBCode : function(xhtml) {
function rp(r,m) {
xhtml = xhtml.replace(r,m);
}
rp(/\n/gi,"");
rp(/<strong>(.*?)<\/strong>/gi,"[b]$1[/b]");
rp(/<em>(.*?)<\/em>/gi,"[i]$1[/i]");
rp(/<span.*?style="text-decoration:underline;">(.*?)<\/span>/gi,"[u]$1[/u]");
rp(/<ul>(.*?)<\/ul>/gi,"[list]$1[/list]");
rp(/<li>(.*?)<\/li>/gi,"[*]$1[/*]");
rp(/<ol>(.*?)<\/ol>/gi,"[list=1]$1[/list]");
rp(/<img.*?src="(.*?)".*?>/gi,"[img]$1[/img]");
rp(/<a.*?href="(.*?)".*?>(.*?)<\/a>/gi,"[url=$1]$2[/url]");
rp(/<br.*?>/gi,"\n");
rp(/<.*?>.*?<\/.*?>/gi,"");
return xhtml;
},
fromBBCode : function(bbCode) {
function rp(r,m) {
bbCode = bbCode.replace(r,m);
}
rp(/\[b\](.*?)\[\/b\]/gi,"<strong>$1</strong>");
rp(/\[i\](.*?)\[\/i\]/gi,"<em>$1</em>");
rp(/\[u\](.*?)\[\/u\]/gi,"<span style=\"text-decoration:underline;\">$1</span>");
rp(/\[list\](.*?)\[\/list\]/gi,"<ul>$1</ul>");
rp(/\[list=1\](.*?)\[\/list\]/gi,"<ol>$1</ol>");
rp(/\[\*\](.*?)\[\/\*\]/gi,"<li>$1</li>");
rp(/\[img\](.*?)\[\/img\]/gi,"<img src=\"$1\" />");
rp(/\[url=(.*?)\](.*?)\[\/url\]/gi,"<a href=\"$1\">$2</a>");
rp(/\n/gi,"<br />");
//rp(/\[.*?\](.*?)\[\/.*?\]/gi,"$1");
return bbCode;
}
});
nicEditors.registerPlugin(nicBBCode);
nicEditor = nicEditor.extend({
floatingPanel : function() {
this.floating = new bkElement('DIV').setStyle({position: 'absolute', top : '-1000px'}).appendTo(document.body);
this.addEvent('focus', this.reposition.closure(this)).addEvent('blur', this.hide.closure(this));
this.setPanel(this.floating);
},
reposition : function() {
var e = this.selectedInstance.e;
this.floating.setStyle({ width : (parseInt(e.getStyle('width')) || e.clientWidth)+'px' });
var top = e.offsetTop-this.floating.offsetHeight;
if(top < 0) {
top = e.offsetTop+e.offsetHeight;
}
this.floating.setStyle({ top : top+'px', left : e.offsetLeft+'px', display : 'block' });
},
hide : function() {
this.floating.setStyle({ top : '-1000px'});
}
});
/* START CONFIG */
var nicCodeOptions = {
buttons : {
'xhtml' : {name : 'Edit HTML', type : 'nicCodeButton'}
}
};
/* END CONFIG */
var nicCodeButton = nicEditorAdvancedButton.extend({
width : '350px',
addPane : function() {
this.addForm({
'' : {type : 'title', txt : 'Edit HTML'},
'code' : {type : 'content', 'value' : this.ne.selectedInstance.getContent(), style : {width: '340px', height : '200px'}}
});
},
submit : function(e) {
var code = this.inputs['code'].value;
this.ne.selectedInstance.setContent(code);
this.removePane();
}
});
nicEditors.registerPlugin(nicPlugin,nicCodeOptions);<|fim▁end|> | removeClass : function(cls) {
if (this.hasClass(cls)) { |
<|file_name|>dashboard.js<|end_file_name|><|fim▁begin|>module.exports = {};
var app_data = {};
function initCharts()
{
$(document).ready(function() {
$.get(app_data.config.analytics_data_route, function(analytics_data) {
data = analytics_data.data;
max_val = analytics_data.highest_value;
var parseDate = d3.time.format('%Y%m%d').parse;
data.forEach(function(d) {
d.date = parseDate(d.date);
});
$('.sessions-value').html(formatAnalyticsValue((analytics_data.total_sessions).toString()));
$('.views-value').html(formatAnalyticsValue((analytics_data.total_views).toString()));
d3.select(window).on('resize', resize);
loadCharts();
}, 'json');
});
}
function loadChart(data, max_val, selector, graph_width, graph_height)
{
var margin = { top: 20, right: 0, bottom: 30, left: 50 },
width = graph_width - margin.left - margin.right,
height = graph_height - margin.top - margin.bottom;
var x = d3.time.scale().range([0, width]);
var y = d3.scale.linear().range([height, 0]);
var color = d3.scale.category10();
var x_axis = d3.svg.axis().scale(x).orient('bottom'); //.tickFormat(d3.time.format('%m/%d/%y'));
var y_axis = d3.svg.axis().scale(y).orient('left').ticks(6);
var line = d3.svg.line()
.interpolate('cardinal')
.tension(0.8)
.x(function(d) { return x(d.date); })
.y(function(d) { return y(d.val); });
var line_gridline = d3.svg.line()
.x(function(d) { return x(d[0]); })
.y(function(d) { return y(d[1]); });
var area = d3.svg.area()
.interpolate('cardinal')
.tension(0.8)
.x(function(d) { return x(d.date); })
.y0(height)
.y1(function(d) { return y(d.val); });
d3.select(selector + ' > svg').remove();
var svg = d3.select(selector).append('svg')
.attr('viewBox', '0 0 ' + graph_width + ' ' + graph_height)
.attr('perserveAspectRatio', 'xMinYMid')
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
color.domain([ 'sessions', 'views' ]);
var analytics = color.domain().map(function(name) {
return {
name: name,
values: data.map(function(d) {
return {date: d.date, val: +d[name]};
})
};
});
var x_extent = d3.extent(data, function(d) { return d.date; });
x.domain(x_extent);
y.domain([
d3.min(analytics, function(c) { return 0; }),
d3.max(analytics, function(c) { return max_val; /*d3.max(c.values, function(v) { return v.val; });*/ })
]);
svg.append('g')
.attr('class', 'x axis')
.attr('transform', 'translate(0,' + height + ')')
.call(x_axis);
svg.append('g')
.attr('class', 'y axis')
.call(y_axis)
.append('text')
.style('text-anchor', 'end');
var gridline_data = [];
svg.selectAll('.y.axis .tick').each(function(data) {
var tick = d3.select(this);
var transform = d3.transform(tick.attr('transform')).translate;
if (data > 0)
{
gridline_data.push({ values: [[x_extent[0], transform[1]], [x_extent[1], transform[1]]] });
}
});
gridline_data.forEach(function(data) {
svg.append('line')
.attr('class', 'gridline')
.attr('x1', x(data.values[0][0]))
.attr('x2', x(data.values[1][0]))
.attr('y1', data.values[0][1])
.attr('y2', data.values[1][1]);
});
var analytics_line = svg.selectAll('.analytics_line')
<|fim▁hole|> analytics_line.append('path')
.attr('class', 'line')
.attr('d', function(d) { return line(d.values); })
.style('stroke', function(d) { return '#f2711c'; });
analytics_line.append('path')
.attr('class', 'area')
.attr('d', function(d) { return area(d.values); })
.style('fill', function(d) { return '#f2711c'; });
/*analytics.forEach(function(category) {
category.values.forEach(function(item) {
analytics_line.append('circle')
.attr('class', 'dot')
.attr('r', 4)
.attr('cx', x(item.date))
.attr('cy', y(item.val))
.style('fill', '#f2711c');
});
});*/
}
function formatAnalyticsValue(value)
{
var formatted_val = '';
var c = 1;
for (var i=value.length-1; i>=0; i--)
{
formatted_val = (c++ % 3 == 0 && i > 0 ? ' ' : '') + value.substring(i, i+1) + formatted_val;
}
return formatted_val;
}
var aspect = 4;
var chart = null;
var data = null;
var max_val = 0;
var resize_timeout = -1;
function resize()
{
if (resize_timeout != -1) clearTimeout(resize_timeout);
resize_timeout = setTimeout(function() {
resize_timeout = -1;
loadCharts();
}, 1000);
}
function loadCharts()
{
if (data == null) return;
var width = $('.analytics-graph').width();
var height = Math.max(200, $('.analytics-graph').width()/aspect); //prevents height to be smaller than 200px
loadChart(data, max_val, '.analytics-graph', width, height);
chart = $('.analytics-graph > svg');
}
module.exports.init = function(trans, config) {
app_data.trans = trans;
app_data.config = config;
$(document).ready(function() {
initCharts();
});
};<|fim▁end|> | .data(analytics)
.enter().append('g')
.attr('class', 'analytics_line');
|
<|file_name|>Exec.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client.coprocessor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.Invocation;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Classes;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.lang.reflect.Method;
/**
* Represents an arbitrary method invocation against a Coprocessor
* instance. In order for a coprocessor implementation to be remotely callable
* by clients, it must define and implement a {@link CoprocessorProtocol}
* subclass. Only methods defined in the {@code CoprocessorProtocol} interface
* will be callable by clients.
*
* <p>
* This class is used internally by
* {@link org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* to wrap the {@code CoprocessorProtocol} method invocations requested in
* RPC calls. It should not be used directly by HBase clients.
* </p>
*
* @see ExecResult
* @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
* @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
*/
public class Exec extends Invocation implements Row {
private Configuration conf = HBaseConfiguration.create();
/** Row key used as a reference for any region lookups */
private byte[] referenceRow;
private Class<? extends CoprocessorProtocol> protocol;
public Exec() {
}
public Exec(Configuration configuration,
byte[] row,
Class<? extends CoprocessorProtocol> protocol,
Method method, Object[] parameters) {
super(method, parameters);
this.conf = configuration;
this.referenceRow = row;
this.protocol = protocol;
}
public Class<? extends CoprocessorProtocol> getProtocol() {
return protocol;
}
public byte[] getRow() {
return referenceRow;
}
public int compareTo(Row row) {
return Bytes.compareTo(referenceRow, row.getRow());
}
@Override
public void write(DataOutput out) throws IOException {
// fields for Invocation
out.writeUTF(this.methodName);
out.writeInt(parameterClasses.length);
for (int i = 0; i < parameterClasses.length; i++) {
HbaseObjectWritable.writeObject(out, parameters[i],
parameters[i] != null ? parameters[i].getClass() : parameterClasses[i],
conf);
out.writeUTF(parameterClasses[i].getName());
}
// fields for Exec
Bytes.writeByteArray(out, referenceRow);
out.writeUTF(protocol.getName());
}
@Override
public void readFields(DataInput in) throws IOException {
// fields for Invocation
methodName = in.readUTF();
parameters = new Object[in.readInt()];
parameterClasses = new Class[parameters.length];
HbaseObjectWritable objectWritable = new HbaseObjectWritable();
for (int i = 0; i < parameters.length; i++) {
parameters[i] = HbaseObjectWritable.readObject(in, objectWritable,
this.conf);
String parameterClassName = in.readUTF();
try {
parameterClasses[i] = Classes.extendedForName(parameterClassName);
} catch (ClassNotFoundException e) {
throw new IOException("Couldn't find class: " + parameterClassName);
}
}
// fields for Exec
referenceRow = Bytes.readByteArray(in);
String protocolName = in.readUTF();
try {
protocol = (Class<CoprocessorProtocol>)conf.getClassByName(protocolName);
}
catch (ClassNotFoundException cnfe) {
throw new IOException("Protocol class "+protocolName+" not found", cnfe);
}<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>kirki.input.js<|end_file_name|><|fim▁begin|>/* global kirkiL10n */
var kirki = kirki || {};
kirki = jQuery.extend( kirki, {
/**
* An object containing definitions for input fields.
*
* @since 3.0.16
*/
input: {
/**
* Radio input fields.
*
* @since 3.0.17
*/
radio: {
/**
* Init the control.
*
* @since 3.0.17
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @returns {null}
*/
init: function( control ) {
var input = jQuery( 'input[data-id="' + control.id + '"]' );
// Save the value
input.on( 'change keyup paste click', function() {
kirki.setting.set( control.id, jQuery( this ).val() );
} );
}
},
/**
* Color input fields.
*
* @since 3.0.16
*/
color: {
/**
* Init the control.
*
* @since 3.0.16
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @param {Object} control.choices - Additional options for the colorpickers.
* @param {Object} control.params - Control parameters.
* @param {Object} control.params.choices - alias for control.choices.
* @returns {null}
*/
init: function( control ) {
var picker = jQuery( '.kirki-color-control[data-id="' + control.id + '"]' ),
clear;
control.choices = control.choices || {};
if ( _.isEmpty( control.choices ) && control.params.choices ) {
control.choices = control.params.choices;
}
// If we have defined any extra choices, make sure they are passed-on to Iris.
if ( ! _.isEmpty( control.choices ) ) {
picker.wpColorPicker( control.choices );
}
// Tweaks to make the "clear" buttons work.
setTimeout( function() {
clear = jQuery( '.kirki-input-container[data-id="' + control.id + '"] .wp-picker-clear' );
if ( clear.length ) {
clear.click( function() {
kirki.setting.set( control.id, '' );
} );
}
}, 200 );
// Saves our settings to the WP API
picker.wpColorPicker( {
change: function() {
// Small hack: the picker needs a small delay
setTimeout( function() {
kirki.setting.set( control.id, picker.val() );
}, 20 );
}
} );
}
},
/**
* Generic input fields.
*
* @since 3.0.17
*/
genericInput: {
/**
* Init the control.
*
* @since 3.0.17
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @returns {null}
*/
init: function( control ) {
var input = jQuery( 'input[data-id="' + control.id + '"]' );
// Save the value
input.on( 'change keyup paste click', function() {
kirki.setting.set( control.id, jQuery( this ).val() );
} );
}
},
/**
* Generic input fields.
*
* @since 3.0.17
*/
textarea: {
/**
* Init the control.
*
* @since 3.0.17
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @returns {null}
*/
init: function( control ) {
var textarea = jQuery( 'textarea[data-id="' + control.id + '"]' );
// Save the value
textarea.on( 'change keyup paste click', function() {
kirki.setting.set( control.id, jQuery( this ).val() );
} );
}
},
select: {
/**
* Init the control.
*
* @since 3.0.17
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @returns {null}
*/
init: function( control ) {
var element = jQuery( 'select[data-id="' + control.id + '"]' ),
multiple = parseInt( element.data( 'multiple' ), 10 ),
selectValue,
selectWooOptions = {
escapeMarkup: function( markup ) {
return markup;
}
};
if ( control.params.placeholder ) {
selectWooOptions.placeholder = control.params.placeholder;
selectWooOptions.allowClear = true;
}
if ( 1 < multiple ) {
selectWooOptions.maximumSelectionLength = multiple;
}
jQuery( element ).selectWoo( selectWooOptions ).on( 'change', function() {
selectValue = jQuery( this ).val();
selectValue = ( null === selectValue && 1 < multiple ) ? [] : selectValue;
kirki.setting.set( control.id, selectValue );
} );
}
},
/**
* Number fields.
*
* @since 3.0.26
*/
number: {
/**
* Init the control.
*
* @since 3.0.17
* @param {Object} control - The control object.
* @param {Object} control.id - The setting.
* @returns {null}
*/
init: function( control ) {
var element = jQuery( 'input[data-id="' + control.id + '"]' ),
value = control.setting._value,
up,
down;
// Make sure we use default values if none are define for some arguments.
control.params.choices = _.defaults( control.params.choices, {
min: 0,
max: 100,
step: 1
} );
// Make sure we have a valid value.
if ( isNaN( value ) || '' === value ) {
value = ( 0 > control.params.choices.min && 0 < control.params.choices.max ) ? 0 : control.params.choices.min;
}
value = parseFloat( value );
// If step is 'any', set to 0.001.
control.params.choices.step = ( 'any' === control.params.choices.step ) ? 0.001 : control.params.choices.step;
// Make sure choices are properly formtted as numbers.
control.params.choices.min = parseFloat( control.params.choices.min );
control.params.choices.max = parseFloat( control.params.choices.max );
control.params.choices.step = parseFloat( control.params.choices.step );
up = jQuery( '.kirki-input-container[data-id="' + control.id + '"] .plus' );
down = jQuery( '.kirki-input-container[data-id="' + control.id + '"] .minus' );
up.click( function() {
var oldVal = parseFloat( element.val() ),
newVal;
newVal = ( oldVal >= control.params.choices.max ) ? oldVal : oldVal + control.params.choices.step;
element.val( newVal );
element.trigger( 'change' );
} );
down.click( function() {
var oldVal = parseFloat( element.val() ),
newVal;
newVal = ( oldVal <= control.params.choices.min ) ? oldVal : oldVal - control.params.choices.step;<|fim▁hole|> element.val( newVal );
element.trigger( 'change' );
} );
element.on( 'change keyup paste click', function() {
var val = jQuery( this ).val();
if ( isNaN( val ) ) {
val = parseFloat( val, 10 );
val = ( isNaN( val ) ) ? 0 : val;
jQuery( this ).attr( 'value', val );
}
kirki.setting.set( control.id, val );
} );
}
},
/**
* Image fields.
*
* @since 3.0.34
*/
image: {
/**
* Init the control.
*
* @since 3.0.34
* @param {Object} control - The control object.
* @returns {null}
*/
init: function( control ) {
var value = kirki.setting.get( control.id ),
saveAs = ( ! _.isUndefined( control.params.choices ) && ! _.isUndefined( control.params.choices.save_as ) ) ? control.params.choices.save_as : 'url',
preview = control.container.find( '.placeholder, .thumbnail' ),
previewImage = ( 'array' === saveAs ) ? value.url : value,
removeButton = control.container.find( '.image-upload-remove-button' ),
defaultButton = control.container.find( '.image-default-button' );
// Make sure value is properly formatted.
value = ( 'array' === saveAs && _.isString( value ) ) ? { url: value } : value;
// Tweaks for save_as = id.
if ( ( 'id' === saveAs || 'ID' === saveAs ) && '' !== value ) {
wp.media.attachment( value ).fetch().then( function() {
setTimeout( function() {
var url = wp.media.attachment( value ).get( 'url' );
preview.removeClass().addClass( 'thumbnail thumbnail-image' ).html( '<img src="' + url + '" alt="" />' );
}, 700 );
} );
}
// If value is not empty, hide the "default" button.
if ( ( 'url' === saveAs && '' !== value ) || ( 'array' === saveAs && ! _.isUndefined( value.url ) && '' !== value.url ) ) {
control.container.find( 'image-default-button' ).hide();
}
// If value is empty, hide the "remove" button.
if ( ( 'url' === saveAs && '' === value ) || ( 'array' === saveAs && ( _.isUndefined( value.url ) || '' === value.url ) ) ) {
removeButton.hide();
}
// If value is default, hide the default button.
if ( value === control.params.default ) {
control.container.find( 'image-default-button' ).hide();
}
if ( '' !== previewImage ) {
preview.removeClass().addClass( 'thumbnail thumbnail-image' ).html( '<img src="' + previewImage + '" alt="" />' );
}
control.container.on( 'click', '.image-upload-button', function( e ) {
var image = wp.media( { multiple: false } ).open().on( 'select', function() {
// This will return the selected image from the Media Uploader, the result is an object.
var uploadedImage = image.state().get( 'selection' ).first(),
jsonImg = uploadedImage.toJSON(),
previewImage = jsonImg.url;
if ( ! _.isUndefined( jsonImg.sizes ) ) {
previewImage = jsonImg.sizes.full.url;
if ( ! _.isUndefined( jsonImg.sizes.medium ) ) {
previewImage = jsonImg.sizes.medium.url;
} else if ( ! _.isUndefined( jsonImg.sizes.thumbnail ) ) {
previewImage = jsonImg.sizes.thumbnail.url;
}
}
if ( 'array' === saveAs ) {
kirki.setting.set( control.id, {
id: jsonImg.id,
url: jsonImg.sizes.full.url,
width: jsonImg.width,
height: jsonImg.height
} );
} else if ( 'id' === saveAs ) {
kirki.setting.set( control.id, jsonImg.id );
} else {
kirki.setting.set( control.id, ( ( ! _.isUndefined( jsonImg.sizes ) ) ? jsonImg.sizes.full.url : jsonImg.url ) );
}
if ( preview.length ) {
preview.removeClass().addClass( 'thumbnail thumbnail-image' ).html( '<img src="' + previewImage + '" alt="" />' );
}
if ( removeButton.length ) {
removeButton.show();
defaultButton.hide();
}
} );
e.preventDefault();
} );
control.container.on( 'click', '.image-upload-remove-button', function( e ) {
var preview,
removeButton,
defaultButton;
e.preventDefault();
kirki.setting.set( control.id, '' );
preview = control.container.find( '.placeholder, .thumbnail' );
removeButton = control.container.find( '.image-upload-remove-button' );
defaultButton = control.container.find( '.image-default-button' );
if ( preview.length ) {
preview.removeClass().addClass( 'placeholder' ).html( kirkiL10n.noFileSelected );
}
if ( removeButton.length ) {
removeButton.hide();
if ( jQuery( defaultButton ).hasClass( 'button' ) ) {
defaultButton.show();
}
}
} );
control.container.on( 'click', '.image-default-button', function( e ) {
var preview,
removeButton,
defaultButton;
e.preventDefault();
kirki.setting.set( control.id, control.params.default );
preview = control.container.find( '.placeholder, .thumbnail' );
removeButton = control.container.find( '.image-upload-remove-button' );
defaultButton = control.container.find( '.image-default-button' );
if ( preview.length ) {
preview.removeClass().addClass( 'thumbnail thumbnail-image' ).html( '<img src="' + control.params.default + '" alt="" />' );
}
if ( removeButton.length ) {
removeButton.show();
defaultButton.hide();
}
} );
}
}
}
} );<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//##################################################################################################
//! A collection of tools to emulate dice and dice rolls.
//##################################################################################################
//##################################################################################################
//************************************* MODULES AND RE-EXPORTS *************************************
//##################################################################################################
pub mod dice_collection;
pub mod value_die;
pub mod numeric_die;
//==================================================================================================
pub use self::numeric_die::NumericDie;
pub use self::value_die::ValueDie;
pub use self::dice_collection::DiceCollection;
//##################################################################################################
//********************************************** TRAITS ********************************************
//##################################################################################################
//==================================================================================================
/// An abstraction representing a die. All dice implement this trait.
//--------------------------------------------------------------------------------------------------
/// Any object implementing Die may be rolled to randomly choose a new value for the object from a
/// predetermined set of values. The value of the Die may be read at any time, including before
/// being rolled for the first time.
//==================================================================================================
pub trait Die {
//==============================================================================================
/// The type of value the die may have.
//==============================================================================================
type ValueType; <|fim▁hole|> /// Randomize the value of the die, selecting from the values specified at die construction.
///
//----------------------------------------------------------------------------------------------
/// ##### Return value
/// A mutable reference to the Die
//----------------------------------------------------------------------------------------------
/// # Examples
///
/// ```
/// # extern crate ezra;
/// # fn main() {
/// use ezra::dice::{Die,NumericDie};
///
/// // Construct a standard six sided die
/// let mut die = NumericDie::new(1, 1, 6, None);
///
/// // Randomize the value of the die
/// die.roll();
///
/// let value = die.get_value();
/// assert!(1 <= value && value <= 6);
/// # }
/// ```
//----------------------------------------------------------------------------------------------
/// Since the roll method returns a reference to itself, you may chain die methods together.
///
/// ```
/// # extern crate ezra;
/// # fn main() {
/// # use ezra::dice::{Die,NumericDie};
/// let mut die = NumericDie::new(1, 1, 6, None);
///
/// // Roll the die three times and print its value
/// println!("After three rolls the die has value {}.", die.roll().roll().roll().get_value());
/// # }
/// ```
//==============================================================================================
fn roll(&mut self) -> &mut Self;
//==============================================================================================
/// Obtain a copy of the current value of the die.
///
//----------------------------------------------------------------------------------------------
/// ##### Return value
/// A copy of the current value of the die
//----------------------------------------------------------------------------------------------
/// # Examples
/// ```
/// # extern crate ezra;
/// # fn main() {
/// use ezra::dice::{Die,NumericDie};
///
/// // Construct a standard six sided die
/// let mut die = NumericDie::new(1, 1, 6, None);
///
/// die.roll();
///
/// println!("The value of the die is {}.", die.get_value());
/// # }
//==============================================================================================
fn get_value(&self) -> Self::ValueType;
//==============================================================================================
/// Obtain a vector of all possile values the die may take.
///
//----------------------------------------------------------------------------------------------
/// ##### Return value
/// A vector of all possible values the die may have
//----------------------------------------------------------------------------------------------
/// # Examples
/// ```
/// # extern crate ezra;
/// # fn main() {
/// use ezra::dice::{Die,NumericDie};
///
/// // Construct a standard six sided die
/// let die = NumericDie::new(1, 1, 6, None);
///
/// assert_eq!(die.values(), vec![1, 2, 3, 4, 5, 6])
/// # }
//==============================================================================================
fn values(&self) -> Vec<Self::ValueType>;
}<|fim▁end|> |
//============================================================================================== |
<|file_name|>dump_patient_test.js<|end_file_name|><|fim▁begin|>"use strict";
var chakram = require("chakram"),
util = require("util"),
curry = require("curry"),
Q = require("q"),
auth = require("../common/auth.js"),
common = require("./common.js"),
sharing = require("../sharing/common.js"),
habits = require("../habits/common.js"),
journal = require("../journal/common.js"),
doctors = require("../doctors/common.js"),
pharmacies = require("../pharmacies/common.js"),
medications = require("../medications/common.js"),
doses = require("../doses/common.js");
var expect = chakram.expect;
describe("Patients", function () {
describe("View Patient JSON Data Dump (GET /patients/:patientid.json)", function () {
// basic endpoint
var dump = function (patientId, accessToken) {
var url = util.format("http://localhost:5000/v1/patients/%d.json", patientId);
return chakram.get(url, auth.genAuthHeaders(accessToken));
};
var dumpPatient = function (patient) {
return dump(patient._id, patient.user.accessToken);
};
// check an authenticated user is required
common.itRequiresAuthentication(dump);
// check it requires a valid patient ID corresponding to a patient we have read
// access to
common.itRequiresValidPatientId(dump);
common.itRequiresReadAuthorization(dumpPatient);
describe("with test data set up", function () {
// setup test user
var user;
before(function () {
return auth.createTestUser(undefined, true).then(function (u) {
user = u;
});
});
// setup test patient owned by another user, shared with this patient in the
// anyone group
var patient;
before(function () {
// create patient
return auth.createTestUser(undefined, true).then(curry(common.createOtherPatient)({}, user)).then(function (p) {
patient = p;
// share patient
return Q.nbind(patient.share, patient)(user.email, "default", "anyone");
});
});
// setup test doctor
before(function () {
return Q.nbind(patient.createDoctor, patient)({
name: "test doctor"
});
});
// setup test pharmacy
before(function () {
return Q.nbind(patient.createPharmacy, patient)({
name: "test pharmacy"
});
});
// setup test medication we have access to
var shownMed;
before(function () {
return Q.nbind(patient.createMedication, patient)({
name: "test medication"
}).then(function (m) {
shownMed = m;
});
});
// setup test medication we have no access to
var hiddenMed;
before(function () {
return Q.nbind(patient.createMedication, patient)({
name: "test medication",
access_anyone: "none"
}).then(function (m) {
hiddenMed = m;
});
});
// create journal entry we have access to
var shownEntry;
before(function () {
return Q.nbind(patient.createJournalEntry, patient)({
date: {utc: (new Date()).toISOString(), timezone: "America/Los_Angeles"},
text: "example journal entry",
creator: "[email protected]",
medication_ids: [shownMed._id]
}).then(function (e) {
shownEntry = e;
});
});
// create journal entry we have no access to
before(function () {
return Q.nbind(patient.createJournalEntry, patient)({
date: {utc: (new Date()).toISOString(), timezone: "America/Los_Angeles"},
text: "example journal entry",
creator: "[email protected]",
medication_ids: [hiddenMed._id]
});
});
// create dose event we have access to
var shownDose;
before(function () {
return Q.nbind(patient.createDose, patient)({
medication_id: shownMed._id,
date: {utc: (new Date()).toISOString(), timezone: "America/Los_Angeles"},
creator: "[email protected]",
taken: true
}).then(function (d) {
shownDose = d;
});
});
// create dose event we have no access to
before(function () {
return Q.nbind(patient.createDose, patient)({
medication_id: hiddenMed._id,
date: {utc: (new Date()).toISOString(), timezone: "America/Los_Angeles"},
creator: "[email protected]",
taken: true
});
});
// get dump
var response, dump;<|fim▁hole|> response = r;
dump = r.body;
});
});
before(function (done) {
require("fs").writeFile("/tmp/test.json", JSON.stringify(dump, null, 4), done);
});
it("returns a valid dump", function () {
expect(response).to.be.a.patient.dumpSuccess;
});
it("contains the patient's details", function () {
// remove success and ignore additional properties (not just patient
// data shown here);
var patientSchema = JSON.parse(JSON.stringify(common.schema));
patientSchema.required.splice(patientSchema.required.indexOf("success"), 1);
delete patientSchema.properties.success;
patientSchema.additionalProperties = true;
expect(dump).to.have.property("patient");
expect({
body: dump.patient
}).to.have.schema(patientSchema);
});
it("contains users the patient is shared with", function () {
// success removed by genericListSuccess
expect(response).to.be.an.api.genericListSuccess("shares", sharing.schema, false);
// share for owner and share with current user
expect(dump.shares.length).to.equal(2);
});
it("contains the patient's habits", function () {
// remove success key
var habitsSchema = JSON.parse(JSON.stringify(habits.schema));
habitsSchema.required.splice(habitsSchema.required.indexOf("success"), 1);
delete habitsSchema.properties.success;
habitsSchema.additionalProperties = true;
// chakram schema validation checks the schema of obj.body
expect(dump).to.have.property("habits");
expect({
body: dump.habits
}).to.have.schema(habitsSchema);
});
it("contains the patient's journal entries", function () {
// success removed by genericListSuccess
expect(response).to.be.an.api.genericListSuccess("entries", journal.schema, false);
});
it("only shows journal entries we have access to medications for", function () {
expect(dump.entries.length).to.equal(1);
expect(dump.entries[0].id).to.equal(shownEntry._id);
});
it("contains the patient's doctors", function () {
// success removed by genericListSuccess
expect(response).to.be.an.api.genericListSuccess("doctors", doctors.schema, false);
// one doctor created
expect(dump.doctors.length).to.equal(1);
});
it("contains the patient's pharmacies", function () {
// success removed by genericListSuccess
expect(response).to.be.an.api.genericListSuccess("pharmacies", pharmacies.schema, false);
// one pharmacy created
expect(dump.pharmacies.length).to.equal(1);
});
it("contains the patient's medications", function () {
// success removed by genericListSuccess
// but additional properties (e.g., summary) are added
var medicationsSchema = JSON.parse(JSON.stringify(medications.schema));
medicationsSchema.additionalProperties = true;
expect(response).to.be.an.api.genericListSuccess("medications", medicationsSchema, false);
});
it("only shows medications the user has access to", function () {
expect(dump.medications.length).to.equal(1);
expect(dump.medications[0].id).to.equal(shownMed._id);
});
it("contains the patient's doses", function () {
// success removed by genericListSuccess
expect(response).to.be.an.api.genericListSuccess("doses", doses.schema, false);
});
it("only shows doses the user has access to", function () {
expect(dump.doses.length).to.equal(1);
expect(dump.doses[0].id).to.equal(shownDose._id);
});
});
});
});<|fim▁end|> | before(function () {
return dumpPatient(patient).then(function (r) { |
<|file_name|>func_tokenize_error.py<|end_file_name|><|fim▁begin|>"""A module that is accepted by Python but rejected by tokenize.
The problem is the trailing line continuation at the end of the line,
which produces a TokenError."""<|fim▁hole|><|fim▁end|> |
""\ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Camino top level namespace
"""
from .connectivity import Conmat
from .convert import (Image2Voxel, FSL2Scheme, VtkStreamlines, ProcStreamlines,
TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader,
Shredder)
from .dti import (DTIFit, ModelFit, DTLUTGen, PicoPDFs, Track, TrackPICo,
TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap,
TrackBedpostxDeter, TrackBedpostxProba,
ComputeFractionalAnisotropy, ComputeMeanDiffusivity,<|fim▁hole|><|fim▁end|> | ComputeTensorTrace, ComputeEigensystem, DTMetric)
from .calib import (SFPICOCalibData, SFLUTGen)
from .odf import (QBallMX, LinRecon, SFPeaks, MESD)
from .utils import ImageStats |
<|file_name|>pt-br.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license<|fim▁hole|> error: 'Não foi possível limpar os dados colados devido a um erro interno',
title: 'Colar do Word',
toolbar: 'Colar do Word'
});<|fim▁end|> | */
CKEDITOR.plugins.setLang( 'pastefromword', 'pt-br', {
confirmCleanup: 'O texto que você deseja colar parece ter sido copiado do Word. Você gostaria de remover a formatação antes de colar?', |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
keybump
~~~~~~~
manage your versioning like a boss .
<|fim▁hole|>"""
from __future__ import unicode_literals
__version__ = '3.0.1'<|fim▁end|> | :copyright: (c) 2015 by gregorynicholas.
:license: MIT, see LICENSE for more details. |
<|file_name|>dataset.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
class DataSet(object):
__slots__ = (
'events', # List of all events in this data set
'group', # Iterable containing groups of events<|fim▁hole|> if group_function is None:
self.group = self.events
elif callable(group_function):
self.group = OrderedDict()
for event in self.events:
# Add this event to the group-by entries
key = group_function(event)
if key not in self.group:
self.group[key] = []
self.group[key].append(event)
else:
raise ValueError("group_function is not callable")
def __pretty__(self, p, cycle):
p.text('<{0}: '.format(type(self).__name__))
if cycle:
p.text('...')
else:
p.pretty({
'events': self.events,
'group': self.group.keys(),
})
p.text('>')<|fim▁end|> | )
def __init__(self, query, group_function):
self.events = query.all() |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var app = require('app'); // Module to control application life.
var BrowserWindow = require('browser-window'); // Module to create native browser window.
// Report crashes to our server.
require('crash-reporter').start();
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
var mainWindow = null;
// Quit when all windows are closed.
app.on('window-all-closed', function() {
'use strict';
// On OS X it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit();<|fim▁hole|> }
});
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
app.on('ready', function() {
'use strict';
// Create the browser window.
mainWindow = new BrowserWindow({
width: 1024,
height: 768,
title: 'Build & Deploy',
'auto-hide-menu-bar': true
});
// and load the index.html of the app.
mainWindow.loadUrl('file://' + __dirname + '/index.html');
// Emitted when the window is closed.
mainWindow.on('closed', function() {
// Dereference the window object, usually you would store windows
// in an array if your app supports multi windows, this is the time
// when you should delete the corresponding element.
mainWindow = null;
});
});<|fim▁end|> | |
<|file_name|>idt.rs<|end_file_name|><|fim▁begin|>use arch::segmentation::{self, SegmentSelector};
use super::bit_field::BitField;
use super::{HandlerFunc, InterruptVector};
/// Interrupt descriptor table.
pub struct Idt([Entry; 256]);
/// An entry of the interrupt descriptor table.
#[derive(Debug, Clone, Copy)]
#[repr(C, packed)]
pub struct Entry {
pointer_low: u16,
gdt_selector: SegmentSelector,
options: BitField<u16>,
pointer_middle: u16,
pointer_high: u32,
reserved: u32,
}
/// Options in an entry of IDT.
pub struct EntryOptions<'a>(&'a mut Entry);
impl<'a> EntryOptions<'a> {
/// Minimal settings of the entry.
fn minimal(entry: &'a mut Entry) -> Self {
let mut options = BitField::new(0);
options.set_range(9..12, 0b111); // 'must-be-one' bits
entry.options = options;
EntryOptions(entry)
}
/// Create a new entry with default settings.
fn new(entry: &'a mut Entry) -> Self {
Self::minimal(entry)
.set_present(true)<|fim▁hole|> /// Set the entry to be present.
pub fn set_present(self, present: bool) -> Self {
let mut options = self.0.options;
options.set_bit(15, present);
self.0.options = options;
self
}
/// Disable interrupts when using this entry.
pub fn disable_interrupts(self, disable: bool) -> Self {
let mut options = self.0.options;
options.set_bit(8, !disable);
self.0.options = options;
self
}
/// Set previlege level of this entry.
pub fn set_privilege_level(self, dpl: u16) -> Self {
let mut options = self.0.options;
options.set_range(13..15, dpl);
self.0.options = options;
self
}
/// Set stack index to use in TSS for this interrupt entry.
pub fn set_stack_index(self, index: u16) -> Self {
let mut options = self.0.options;
options.set_range(0..3, index);
self.0.options = options;
self
}
}
impl Idt {
/// Create a new IDT.
pub fn new() -> Idt {
Idt([Entry::missing(); 256])
}
/// Set an interrupt vector using a handler.
pub fn set_handler(&mut self, entry: InterruptVector, handler: HandlerFunc)
-> EntryOptions
{
self.0[entry as usize] = Entry::new(segmentation::cs(), handler);
EntryOptions(&mut self.0[entry as usize])
}
/// Load this IDT.
pub fn load(&self) {
use super::dtables::{DescriptorTablePointer, lidt};
use core::mem::size_of;
let ptr = DescriptorTablePointer {
base: self as *const _ as u64,
limit: (size_of::<Self>() - 1) as u16,
};
unsafe { lidt(&ptr) };
}
}
impl Entry {
/// Create a new entry using the handler and GDT selector.
fn new(gdt_selector: SegmentSelector, handler: HandlerFunc) -> Self {
let pointer = handler as u64;
let mut entry = Entry {
gdt_selector: gdt_selector,
pointer_low: pointer as u16,
pointer_middle: (pointer >> 16) as u16,
pointer_high: (pointer >> 32) as u32,
options: BitField::new(0),
reserved: 0,
};
EntryOptions::new(&mut entry);
entry
}
/// Create a missing entry.
fn missing() -> Self {
let mut entry = Entry {
gdt_selector: SegmentSelector::new(0),
pointer_low: 0,
pointer_middle: 0,
pointer_high: 0,
options: BitField::new(0),
reserved: 0,
};
EntryOptions::minimal(&mut entry);
entry
}
}<|fim▁end|> | .disable_interrupts(true)
.set_stack_index(0x1)
}
|
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'
import { Layout, Wrapper, Spacer } from './components/Styles'
/*eslint-disable*/
import IconTest from './IconTest'
import { ButtonTest, ButtonTest2 } from './ButtonTest'
/*eslint-enable*/
<|fim▁hole|> <Wrapper>
<Layout>
<div>
<ButtonTest />
<Spacer space={'0.8rem'} />
<ButtonTest2 />
<Spacer space={'0.8rem'} />
{/*<IconTest />*/}
</div>
</Layout>
</Wrapper>
</div>
)
}
}
export default App<|fim▁end|> | class App extends Component {
render() {
return (
<div> |
<|file_name|>binaries.js<|end_file_name|><|fim▁begin|>const gulp = require('gulp');
const util = require('gulp-util');
const zip = require('gulp-zip');
const release = require('gulp-github-release');
const folders = require('gulp-folders');
const nwBuilder = require('gulp-nw-builder');
const fs = require('fs');
const changelog = require('conventional-changelog');
const execSync = require('child_process').execSync;
const del = require('del');
const vinylPaths = require('vinyl-paths');
const getPaths = require('./_common').getPaths;
const currentTag = require('./_common').currentTag;
const binaryPath = () => getPaths().bin.build + '/OpenChallenge';
gulp.task('clean:binaries', () => {
const paths = getPaths();
return gulp.src([paths.bin.build, paths.bin.release])
.pipe(vinylPaths(del))
.on('error', util.log);
});
gulp.task('package:binaries', ['generate:binaries'], folders(binaryPath(), (folder) => {
return gulp.src(`${binaryPath()}/${folder}/**/*`)
.pipe(zip(`${folder}.zip`))
.pipe(gulp.dest(getPaths().bin.release));
}));<|fim▁hole|> return gulp.src(`${getPaths().bin.release}/*.zip`)
.pipe(release({
repo: 'openchallenge',
owner: 'seiyria',
tag: currentTag(),
manifest: require('../package.json')
}));
});
gulp.task('generate:binaries', ['clean:binaries', 'copy:nw'], () => {
execSync('npm install --prefix ./dist/ express');
const paths = getPaths();
return gulp.src(`${paths.dist}/**/*`)
.pipe(nwBuilder({
version: 'v0.12.2',
platforms: ['osx64', 'win64', 'linux64'],
appName: 'OpenChallenge',
appVersion: currentTag(),
buildDir: paths.bin.build,
cacheDir: paths.bin.cache,
macIcns: './favicon.icns',
winIco: './favicon.ico'
}));
});
gulp.task('generate:changelog', () => {
return changelog({
releaseCount: 0,
preset: 'angular'
})
.pipe(fs.createWriteStream('CHANGELOG.md'));
});<|fim▁end|> |
gulp.task('upload:binaries', ['package:binaries'], () => { |
<|file_name|>0002_initial-domain.py<|end_file_name|><|fim▁begin|># Generated by Django 1.11.11 on 2018-08-27 21:51
from django.db import migrations<|fim▁hole|>
def update_domain_forward(apps, schema_editor):
"""Set site domain and name."""
Domain = apps.get_model("domains", "Domain")
Domain.objects.update_or_create(pk=1, name="fedrowanie.siecobywatelska.pl")
class Migration(migrations.Migration):
dependencies = [("domains", "0001_initial")]
operations = [migrations.RunPython(update_domain_forward)]<|fim▁end|> | |
<|file_name|>EstadoBean.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package beans;
import dao.EstadoJpaController;
import dao.exceptions.NonexistentEntityException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.faces.application.FacesMessage;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.RequestScoped;
import javax.faces.context.FacesContext;
import modelo.Estado;
import util.JPAUtil;
/**
*
* @author denis
*/
@ManagedBean
@RequestScoped
public class EstadoBean {
private Estado estado = new Estado();
EstadoJpaController daoEstado = new EstadoJpaController(JPAUtil.factory);
private String mensagem;
public EstadoBean() {
}
public void inserir() {
FacesContext context = FacesContext.getCurrentInstance();
try {
daoEstado.create(estado);
estado = new Estado();
} catch (Exception ex) {
context.addMessage("formEstado", new FacesMessage("Estado não pode ser inserido"));
Logger.getLogger(ViagemClienteBean.class.getName()).log(Level.SEVERE, null, ex);
}
context.addMessage("formEstado", new FacesMessage("Estado foi inserido com sucesso!"));
}
public List<modelo.RelatorioEstado> pesquisarInfoDosEstados() {<|fim▁hole|> return daoEstado.pesquisarInfoDosEstados();
}
public void alterar() {
FacesContext context = FacesContext.getCurrentInstance();
try {
daoEstado.edit(estado);
estado = new Estado();
} catch (NonexistentEntityException ex) {
context.addMessage("formEstado", new FacesMessage("Estado não pode ser alterado"));
Logger.getLogger(EstadoBean.class.getName()).log(Level.SEVERE, null, ex);
} catch (Exception ex) {
context.addMessage("formEstado", new FacesMessage("Estado não pode ser alterado"));
Logger.getLogger(EstadoBean.class.getName()).log(Level.SEVERE, null, ex);
}
context.addMessage("formEstado", new FacesMessage("Estado foi inserido com sucesso!"));
}
public void excluir() {
FacesContext context = FacesContext.getCurrentInstance();
try {
daoEstado.destroy(estado.getId());
estado = new Estado();
} catch (Exception ex) {
context.addMessage("formEstado", new FacesMessage("Estado não pode ser excluido"));
Logger.getLogger(EstadoBean.class.getName()).log(Level.SEVERE, null, ex);
}
context.addMessage("formEstado", new FacesMessage("Estado foi inserido com sucesso"));
}
public Estado getEstado() {
return estado;
}
public void setEstado(Estado estado) {
this.estado = estado;
}
public List<Estado> getEstados() {
return daoEstado.findEstadoEntities();
}
public String getMensagem() {
return mensagem;
}
public void setMensagem(String mensagem) {
this.mensagem = mensagem;
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = ({ config }, options = {}) => config.module<|fim▁hole|> .when(options.style, use => use.options(options.style))
.end()
.use('css')
.loader(require.resolve('css-loader'))
.when(options.css, use => use.options(options.css));<|fim▁end|> | .rule('style')
.test(/\.css$/)
.use('style')
.loader(require.resolve('style-loader')) |
<|file_name|>gen_build_yaml.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate JSON data for LB interop test scenarios."""
import json
import os
import yaml
all_scenarios = []
# TODO(https://github.com/grpc/grpc-go/issues/2347): enable
# client_falls_back_because_no_backends_* scenarios for Java/Go.
# TODO(https://github.com/grpc/grpc-java/issues/4887): enable
# *short_stream* scenarios for Java.
# TODO(https://github.com/grpc/grpc-java/issues/4912): enable
# Java TLS tests involving TLS to the balancer.
def server_sec(transport_sec):
if transport_sec == 'google_default_credentials':
return 'alts', 'alts', 'tls'
return transport_sec, transport_sec, transport_sec
def generate_no_balancer_because_lb_a_record_returns_nx_domain():
all_configs = []
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
config = {
'name':
'no_balancer_because_lb_a_record_returns_nx_domain_%s' %
transport_sec,
'skip_langs': [],
'transport_sec':
transport_sec,
'balancer_configs': [],
'backend_configs': [],
'fallback_configs': [{
'transport_sec': fallback_sec,
}],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)<|fim▁hole|> return all_configs
all_scenarios += generate_no_balancer_because_lb_a_record_returns_nx_domain()
def generate_no_balancer_because_lb_a_record_returns_no_data():
all_configs = []
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
config = {
'name':
'no_balancer_because_lb_a_record_returns_no_data_%s' %
transport_sec,
'skip_langs': [],
'transport_sec':
transport_sec,
'balancer_configs': [],
'backend_configs': [],
'fallback_configs': [{
'transport_sec': fallback_sec,
}],
'cause_no_error_no_data_for_balancer_a_record':
True,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_no_balancer_because_lb_a_record_returns_no_data()
def generate_client_referred_to_backend():
all_configs = []
for balancer_short_stream in [True, False]:
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = []
if transport_sec == 'tls':
skip_langs += ['java']
if balancer_short_stream:
skip_langs += ['java']
config = {
'name':
'client_referred_to_backend_%s_short_stream_%s' %
(transport_sec, balancer_short_stream),
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
}],
'backend_configs': [{
'transport_sec': backend_sec,
}],
'fallback_configs': [],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_referred_to_backend()
def generate_client_referred_to_backend_fallback_broken():
all_configs = []
for balancer_short_stream in [True, False]:
for transport_sec in ['alts', 'tls', 'google_default_credentials']:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = []
if transport_sec == 'tls':
skip_langs += ['java']
if balancer_short_stream:
skip_langs += ['java']
config = {
'name':
'client_referred_to_backend_fallback_broken_%s_short_stream_%s'
% (transport_sec, balancer_short_stream),
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
}],
'backend_configs': [{
'transport_sec': backend_sec,
}],
'fallback_configs': [{
'transport_sec': 'insecure',
}],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_referred_to_backend_fallback_broken()
def generate_client_referred_to_backend_multiple_backends():
all_configs = []
for balancer_short_stream in [True, False]:
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = []
if transport_sec == 'tls':
skip_langs += ['java']
if balancer_short_stream:
skip_langs += ['java']
config = {
'name':
'client_referred_to_backend_multiple_backends_%s_short_stream_%s'
% (transport_sec, balancer_short_stream),
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
}],
'backend_configs': [{
'transport_sec': backend_sec,
}, {
'transport_sec': backend_sec,
}, {
'transport_sec': backend_sec,
}, {
'transport_sec': backend_sec,
}, {
'transport_sec': backend_sec,
}],
'fallback_configs': [],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_referred_to_backend_multiple_backends()
def generate_client_falls_back_because_no_backends():
all_configs = []
for balancer_short_stream in [True, False]:
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = ['go', 'java']
if transport_sec == 'tls':
skip_langs += ['java']
if balancer_short_stream:
skip_langs += ['java']
config = {
'name':
'client_falls_back_because_no_backends_%s_short_stream_%s' %
(transport_sec, balancer_short_stream),
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
}],
'backend_configs': [],
'fallback_configs': [{
'transport_sec': fallback_sec,
}],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_falls_back_because_no_backends()
def generate_client_falls_back_because_balancer_connection_broken():
all_configs = []
for transport_sec in ['alts', 'tls', 'google_default_credentials']:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = []
if transport_sec == 'tls':
skip_langs = ['java']
config = {
'name':
'client_falls_back_because_balancer_connection_broken_%s' %
transport_sec,
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [{
'transport_sec': 'insecure',
'short_stream': False,
}],
'backend_configs': [],
'fallback_configs': [{
'transport_sec': fallback_sec,
}],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_falls_back_because_balancer_connection_broken()
def generate_client_referred_to_backend_multiple_balancers():
all_configs = []
for balancer_short_stream in [True, False]:
for transport_sec in [
'insecure', 'alts', 'tls', 'google_default_credentials'
]:
balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
skip_langs = []
if transport_sec == 'tls':
skip_langs += ['java']
if balancer_short_stream:
skip_langs += ['java']
config = {
'name':
'client_referred_to_backend_multiple_balancers_%s_short_stream_%s'
% (transport_sec, balancer_short_stream),
'skip_langs':
skip_langs,
'transport_sec':
transport_sec,
'balancer_configs': [
{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
},
{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
},
{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
},
{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
},
{
'transport_sec': balancer_sec,
'short_stream': balancer_short_stream,
},
],
'backend_configs': [{
'transport_sec': backend_sec,
},],
'fallback_configs': [],
'cause_no_error_no_data_for_balancer_a_record':
False,
}
all_configs.append(config)
return all_configs
all_scenarios += generate_client_referred_to_backend_multiple_balancers()
print((yaml.dump({
'lb_interop_test_scenarios': all_scenarios,
})))<|fim▁end|> | |
<|file_name|>macro.py<|end_file_name|><|fim▁begin|># peppy Copyright (c) 2006-2010 Rob McMullen
# Licenced under the GPLv2; see http://peppy.flipturn.org for more info
"""Simple macros created by recording actions
This plugin provides macro recording
"""
import os
import wx
from wx.lib.pubsub import Publisher
from peppy.yapsy.plugins import *
from peppy.actions import *
from peppy.actions.minibuffer import *
from peppy.major import MajorMode
from peppy.majormodematcher import MajorModeMatcherDriver
from peppy.minor import *
from peppy.sidebar import *
from peppy.lib.multikey import *
from peppy.debug import *
import peppy.vfs as vfs
from peppy.vfs.itools.vfs.memfs import MemFS, MemFile, MemDir, TempFile
class CharEvent(FakeCharEvent):
"""Fake character event used by L{RecordKeyboardAction} when generating
scripted copies of an action list.
"""
def __init__(self, key, unicode, modifiers):
self.id = -1
self.event_object = None
self.keycode = key
self.unicode = unicode
self.modifiers = modifiers
self.is_quoted = True
@classmethod
def getScripted(cls, evt):
"""Returns a string that represents the python code to instantiate
the object.
Used when serializing a L{RecordedKeyboardAction} to a python string
"""
return "%s(%d, %d, %d)" % (cls.__name__, evt.GetKeyCode(), evt.GetUnicodeKey(), evt.GetModifiers())
class RecordedKeyboardAction(RecordedAction):
"""Subclass of L{RecordedAction} for keyboard events.
"""
def __init__(self, action, evt, multiplier):
RecordedAction.__init__(self, action, multiplier)
self.evt = FakeCharEvent(evt)
# Hack to force SelfInsertCommand to process the character, because
# normally it uses the evt.Skip() to force the EVT_CHAR handler to
# insert the character.
self.evt.is_quoted = True
def __str__(self):
return "%s: %dx%s" % (self.actioncls.__name__, self.multiplier, self.evt.GetKeyCode())
def performAction(self, system_state):
action = self.actioncls(system_state.frame, mode=system_state.mode)
action.actionKeystroke(self.evt, self.multiplier)
def getScripted(self):
return "%s(frame, mode).actionKeystroke(%s, %d)" % (self.actioncls.__name__, CharEvent.getScripted(self.evt), self.multiplier)
class RecordedMenuAction(RecordedAction):
"""Subclass of L{RecordedAction} for menu events.
"""
def __init__(self, action, index, multiplier):
RecordedAction.__init__(self, action, multiplier)
self.index = index
def __str__(self):
return "%s x%d, index=%s" % (self.actioncls.__name__, self.multiplier, self.index)
def performAction(self, system_state):
action = self.actioncls(system_state.frame, mode=system_state.mode)
action.action(self.index, self.multiplier)
def getScripted(self):
return "%s(frame, mode).action(%d, %d)" % (self.actioncls.__name__, self.index, self.multiplier)
class ActionRecorder(AbstractActionRecorder, debugmixin):
"""Creates, maintains and plays back a list of actions recorded from the
user's interaction with a major mode.
"""
def __init__(self):
self.recording = []
def __str__(self):
summary = ''
count = 0
for recorded_item in self.recording:
if hasattr(recorded_item, 'text'):
summary += recorded_item.text + " "
if len(summary) > 50:
summary = summary[0:50] + "..."
count += 1
if len(summary) == 0:
summary = "untitled"
return MacroFS.escapeFileName(summary)
def details(self):
"""Get a list of actions that have been recorded.
Primarily used for debugging, there is no way to use this list to
play back the list of actions.
"""
lines = []
for recorded_item in self.recording:
lines.append(str(recorded_item))
return "\n".join(lines)
def recordKeystroke(self, action, evt, multiplier):
if action.isRecordable():
record = RecordedKeyboardAction(action, evt, multiplier)
self.appendRecord(record)
def recordMenu(self, action, index, multiplier):
if action.isRecordable():
record = RecordedMenuAction(action, index, multiplier)
self.appendRecord(record)
def appendRecord(self, record):
"""Utility method to add a recordable action to the current list
This method checks for the coalescability of the record with the
previous record, and it is merged if possible.
@param record: L{RecordedAction} instance
"""
self.dprint("adding %s" % record)
if self.recording:
last = self.recording[-1]
if last.canCoalesceActions(record):
self.recording.pop()
record = last.coalesceActions(record)
self.dprint("coalesced into %s" % record)
self.recording.append(record)
def getRecordedActions(self):
return self.recording
def playback(self, frame, mode, multiplier=1):
mode.BeginUndoAction()
state = MacroPlaybackState(frame, mode)
self.dprint(state)
SelectAction.debuglevel = 1
while multiplier > 0:
for recorded_action in self.getRecordedActions():
recorded_action.performAction(state)
multiplier -= 1
SelectAction.debuglevel = 0
mode.EndUndoAction()
class PythonScriptableMacro(MemFile):
"""A list of serialized SelectAction commands used in playing back macros.
This object contains python code in the form of text strings that
provide a way to reproduce the effects of a previously recorded macro.
Additionally, since they are in plain text, they may be carefully edited
by the user to provide additional functionality that is not possible only
using the record capability.
The generated python script looks like the following:
SelfInsertCommand(frame, mode).actionKeystroke(CharEvent(97, 97, 0), 1)
BeginningTextOfLine(frame, mode).actionKeystroke(CharEvent(65, 65, 2), 1)
SelfInsertCommand(frame, mode).actionKeystroke(CharEvent(98, 98, 0), 1)
ElectricReturn(frame, mode).actionKeystroke(CharEvent(13, 13, 0), 1)
where the actions are listed, one per line, by their python class name.
The statements are C{exec}'d in in the global namespace, but have a
constructed local namespace that includes C{frame} and C{mode} representing
the current L{BufferFrame} and L{MajorMode} instance, respectively.
"""
keyword_mapping = {
'key': 'key_binding',
}
def __init__(self, recorder=None, name=None):
"""Converts the list of recorded actions into python string form.
"""
if isinstance(recorder, str):
data = recorder
elif recorder:
name = str(recorder)
data = self.getScriptFromRecorder(recorder)
else:
data = ""
if name is None:
name = "untitled"
MemFile.__init__(self, data, name)
self.parseMacroForMetadata()
def __str__(self):
return self.name
def get_key_binding(self):
try:
return self._key_binding
except AttributeError:
self._key_binding = None
return None
def set_key_binding(self, binding):
self._key_binding = binding
key_binding = property(get_key_binding, set_key_binding)
def save(self, url):
"""Save this macro to the specified macro: url
"""
dprint("Saving to %s" % url)
self.rebuildMacroAndMetadata()
fh = vfs.open_write(url)
fh.write(self.data)
fh.close()
def rebuildMacroAndMetadata(self):
"""Reconstructs text of macro taking into account any changes in
the keybindings or other metadata
"""
lines = []
comments = []
found = {}
in_opening_comments = True
for line in self.data.splitlines():
dprint(line)
if line.startswith("#@") and in_opening_comments:
key, value = self.splitMacroComment(line)
if key in self.keyword_mapping:
attribute = self.keyword_mapping[key]
value = getattr(self, attribute)
if value is not None:
dprint("found new %s = %s" % (key ,value))
line = "#@ %s %s" % (key, value)
found[key] = value
else:
found[key] = None
comments.append(line)
else:
in_opening_comments = False
lines.append(line)
for key, attribute in self.keyword_mapping.iteritems():
if key not in found:
value = getattr(self, attribute)
if value is not None:
dprint("adding new %s = %s" % (key ,value))
line = "#@ %s %s" % (key, value)
comments.append(line)
self.data = "\n".join(comments) + "\n" + "\n".join(lines) + "\n"
def parseMacroForMetadata(self):
"""Parses the macro comments for any metadata that might be present
Included in macro metadata are key bindings, authorship, etc. They are
comment lines in the format C{#@param value} where 'param' is one of
'key', 'author', 'name', 'created'
"""
for line in self.data.splitlines():
if line.startswith("#@"):
self.parseMacroComment(line[2:])
def splitMacroComment(self, line):
"""Split a macro comment into a key, value pair
Macro comments are lines that begin with #@ as the first two
characters. After that two character indicator, the remainder of the
line is a keyword and a value separated by whitespace. The keyword
can't contain any whitespace, so everything after the first block of
whitespace is considered the value.
"""
if line.startswith("#@"):
line = line[2:]
key, value = line.strip().split(" ", 1)
value = value.strip()
return key, value
def parseMacroComment(self, line):
"""Parse a single macro comment
The comment should have already been stripped of its leading delimiter.
"""
key, value = self.splitMacroComment(line)
if key in self.keyword_mapping:
attribute = self.keyword_mapping[key]
setattr(self, attribute, value)
dprint("found %s = %s" % (key ,value))
def setName(self, name):
"""Changes the name of the macro to the supplied string.
"""
self.name = name
def getScriptFromRecorder(self, recorder):
"""Converts the list of recorded actions into a python script that can
be executed by the L(playback) method.
Calls the L{RecordAction.getScripted} method of each recorded action to
generate the python script version of the action.
@returns: a multi-line string, exec-able using the L{playback} method
"""
script = ""
lines = []
for recorded_action in recorder.getRecordedActions():
lines.append(recorded_action.getScripted())
script += "\n".join(lines) + "\n"
return script
def playback(self, frame, mode, multiplier=1):
"""Plays back the list of actions.
Uses the current frame and mode as local variables for the python
scripted version of the action list.
"""
local = {'mode': mode,
'frame': frame,
}
self.addActionsToLocal(local)
if hasattr(mode, 'BeginUndoAction'):
mode.BeginUndoAction()
mode.beginProcessingMacro()
try:
while multiplier > 0:
exec self.data in globals(), local
multiplier -= 1
except Exception, e:
import traceback
error = "Error in macro %s:\n%s\n\n" % (self.name, traceback.format_exc())
Publisher().sendMessage('peppy.log.info', (frame, error))
finally:
mode.endProcessingMacro()
if hasattr(mode, 'BeginUndoAction'):
mode.EndUndoAction()
def addActionsToLocal(self, local):
"""Sets up the local environment for the exec call
All the possible actions must be placed in the local environment for
the call to exec.
"""
actions = MacroAction.getAllKnownActions()
for action in actions:
local[action.__name__] = action
actions = SelectAction.getAllKnownActions()
for action in actions:
local[action.__name__] = action
class StartRecordingMacro(SelectAction):
"""Begin recording actions"""
name = "Start Recording"
key_bindings = {'default': "S-C-9", 'mac': "^S-9", 'emacs': ["C-x S-9", "S-C-9"]}
default_menu = (("Tools/Macros", -800), 100)
def action(self, index=-1, multiplier=1):
self.frame.root_accel.startRecordingActions(ActionRecorder())
self.mode.setStatusText("Recording macro...")
class StopRecordingMixin(object):
def stopRecording(self):
if self.frame.root_accel.isRecordingActions():
recorder = self.frame.root_accel.stopRecordingActions()
self.dprint(recorder)
macro = MacroFS.addMacroFromRecording(recorder, self.mode)
RecentMacros.append(macro)
self.mode.setStatusText("Stopped recording macro.")
class StopRecordingMacro(StopRecordingMixin, SelectAction):
"""Stop recording actions"""
name = "Stop Recording"
key_bindings = {'default': "S-C-0", 'mac': "^S-0", 'emacs': ["C-x S-0", "S-C-0"]}
default_menu = ("Tools/Macros", 110)
@classmethod
def isRecordable(cls):
return False
def action(self, index=-1, multiplier=1):
self.stopRecording()
class ReplayLastMacro(StopRecordingMixin, SelectAction):
"""Play back last macro that was recorded"""
name = "Play Last Macro"
key_bindings = {'default': "S-C-8", 'mac': "^S-8", 'emacs': ["C-x e", "S-C-8"]}
default_menu = ("Tools/Macros", 120)
def isEnabled(self):
return RecentMacros.isEnabled()
@classmethod
def isRecordable(cls):
return False
def action(self, index=-1, multiplier=1):
self.stopRecording()
macro = RecentMacros.getLastMacro()
if macro:
self.dprint("Playing back %s" % macro)
wx.CallAfter(macro.playback, self.frame, self.mode, multiplier)
else:
self.dprint("No recorded macro.")
class MacroNameMixin(object):
"""Abstract mixin that provides a mapping of macro names to macro paths
This mixin is used to provide macro names to a completion minibuffer.
"""
def getMacroPathMap(self):
"""Generate list of possible names to complete.
For all the currently active actions, find all the names and
aliases under which the action could be called, and add them
to the list of possible completions.
@returns: tuple containing a list and a dict. The list contains the
precedence of paths which is used to determine which duplicates are
marked as auxiliary names. The dict contains a mapping of the path to
the macros in that path.
"""
raise NotImplementedError
def createList(self):
"""Generate list of possible macro names to complete.
Uses L{getMacroPathMap} to get the set of macro names on which to
complete. Completes on macro names, not path names, so duplicate
macro names would be possible. Gets around any possible duplication
by using the macro path order to get the hierarchy of paths, and any
duplicates are marked with the path name.
So, if we are in Python mode and there are macros "macro:Python/test"
and "macro:Fundamental/test", the Python mode macro would be marked
as simply "test", while the fundamental mode macro would be marked as
"test (Fundamental)" to mark the distinction.
"""
self.map = {}
path_order, macros = self.getMacroPathMap()
self.macro_path_hierarchy = []
for path in path_order:
for name in macros[path]:
#dprint("name = %s" % name)
macro_path = "%s/%s" % (path, name)
if name in self.map:
name = "%s (%s)" % (name, path)
self.map[name] = macro_path
self.macro_path_hierarchy.append(macro_path)
self.sorted = self.map.keys()
self.sorted.sort()
self.dprint(self.sorted)
class ModeMacroNameMixin(MacroNameMixin):
"""Concrete mixin for MacroNameMixin supplying names for macros that only
work with the action's major mode.
"""
def getMacroPathMap(self):
hierarchy = self.mode.getSubclassHierarchy()
#dprint(hierarchy)
path_map = {}
path_order = []
for modecls in hierarchy:
path, names = MacroFS.getMacroNamesFromMajorModeClass(modecls)
path_map[path] = names
path_order.append(path)
return path_order, path_map
class ExecuteMacroByName(ModeMacroNameMixin, SelectAction):
"""Execute a macro by name
Using the tab completion minibuffer, execute an action by its name. The
actions shown in the minibuffer will be limited to the actions relevant to
the current major mode.
"""
name = "&Execute Macro"
key_bindings = {'default': "S-C-7", 'emacs': "C-c e", }
default_menu = ("Tools/Macros", 130)
def action(self, index=-1, multiplier=1):
# FIXME: ignoring number right now
self.createList()
minibuffer = StaticListCompletionMinibuffer(self.mode, self,
label = "Execute Macro",
list = self.sorted,
initial = "")
self.mode.setMinibuffer(minibuffer)
def processMinibuffer(self, minibuffer, mode, text):
if text in self.map:
macro_path = self.map[text]
macro = MacroFS.getMacro(macro_path)
if macro:
wx.CallAfter(macro.playback, self.frame, self.mode)
else:
self.frame.SetStatusText("%s not a known macro" % text)
class ExecuteMacroByKeystroke(ModeMacroNameMixin, SelectAction):
"""Map keystrokes to macros
Uses hooks in the keyboard processing to map keystrokes to macros on a
per-major-mode basis.
Normally, actions provide the same keystrokes regardless of the class of
major mode. If the action is available to that major mode, it has one and
only one definition for the keystroke.
This needs to change for macros, because some macros won't be
available to certain major modes. A hook is provided for this in the
L{SelectAction.addKeyBindingToAcceleratorList} method, which is an
instance method of L{SelectAction}
"""
name = "Execute Macro By Keystroke"
def addKeyBindingToAcceleratorList(self, accel_list):
self.createList()
for macro in self.iterModeMacros():
if macro.key_binding:
self.dprint(macro.key_binding)
accel_list.addKeyBinding(macro.key_binding, self)
def iterModeMacros(self):
"""Iterate through macros available to this major mode
The macros from more specific major modes are returned first, then
up through the superclasses to the most general major mode in
the class hierarchy. I.e. PythonMode macros are returned before
FundamentalMode, etc.
"""
order = self.macro_path_hierarchy[:]
order.reverse()
for path in order:
macro = MacroFS.getMacro(path)
self.dprint(macro)
yield macro
def actionKeystroke(self, evt, multiplier=1):
"""Match the last keystroke with an active macro and play it back if
a mach found
All macros are matched within this action; macros don't have individual
actions (currently), which means that they can't be bound in menus
or toolbars. This may change in a future release.
"""
accel_list = self.frame.root_accel
last = accel_list.getLastKeystroke()
# Precompute the current Keystrokes so it can be directly compared
# with the result of the KeyAccelerator.split method call
last_keystrokes = last.getKeystrokeTuple()
for macro in self.iterModeMacros():
if macro.key_binding:
keystrokes = KeyAccelerator.split(macro.key_binding)
self.dprint("checking %s, %s" % (macro, keystrokes))
if keystrokes == last_keystrokes:
self.dprint("playback macro %s" % macro)
wx.CallAfter(macro.playback, self.frame, self.mode, multiplier)
break
class RecentMacros(OnDemandGlobalListAction):
"""Play a macro from the list of recently created macros
Maintains a list of the recent macros and runs the selected macro if chosen
out of the submenu.
Macros are stored as a list of L{PythonScriptableMacro}s in most-recent to
least recent order.
"""
name = "Recent Macros"
default_menu = ("Tools/Macros", -200)
inline = False
storage = []
@classmethod
def isEnabled(cls):
return bool(cls.storage)
@classmethod
def append(cls, macro):
"""Adds the macro to the list of recent macros.
"""
cls.storage[0:0] = (macro.name, )
cls.trimStorage(MacroPlugin.classprefs.list_length)
cls.calcHash()
@classmethod
def validateAll(cls):
"""Update the list to contain only valid macros.
This is used after rearranging the macro file system.
"""
valid_macros = []
for name in cls.storage:
macro = MacroFS.getMacro(name)
if macro:
valid_macros.append(name)
cls.setStorage(valid_macros)
@classmethod
def setStorage(cls, array):
cls.storage = array
cls.trimStorage(MacroPlugin.classprefs.list_length)
cls.calcHash()
@classmethod
def getLastMacroName(cls):
"""Return the pathname of the most recently added macro
@returns pathname within the macro: filesystem
"""
if cls.storage:
return cls.storage[0]
return None
@classmethod
def getLastMacro(cls):
"""Return the most recently added macro
@returns L{PythonScriptableMacro} instance, or None if no macro has yet
been added.
"""
name = cls.getLastMacroName()
if name:
return MacroFS.getMacro(name)
return None
def action(self, index=-1, multiplier=1):
name = self.storage[index]
macro = MacroFS.getMacro(name)
assert self.dprint("replaying macro %s" % macro)
wx.CallAfter(macro.playback, self.frame, self.mode, 1)
class MacroSaveData(object):
"""Data transfer object to serialize the state of the macro system"""
version = 1
def __init__(self):
self.macros = MacroFS.macros
self.recent = RecentMacros.storage
@classmethod
def load(cls, url):
import cPickle as pickle
# Note: because plugins are loaded using the execfile command, pickle
# can't find classes that are in the global namespace. Have to supply
# PythonScriptableMacro into the builtin namespace to get around this.
import __builtin__
__builtin__.PythonScriptableMacro = PythonScriptableMacro
if not vfs.exists(url):
return
fh = vfs.open(url)
bytes = fh.read()
fh.close()
if bytes:
version, data = pickle.loads(bytes)
if version == 1:
cls.unpackVersion1(data)
else:
raise RuntimeError("Unknown version of MacroSaveData in %s" % url)
@classmethod
def unpackVersion1(cls, data):
root, recent = data
if isinstance(root, MemDir):
MacroFS.root = root
#dprint(MacroFS.macros)
RecentMacros.setStorage(recent)
else:
dprint("Found prerelease version of macro filesystem; not loading")
@classmethod
def save(cls, url):
bytes = cls.packVersion1()
fh = vfs.open_write(url)
fh.write(bytes)
fh.close()
@classmethod
def packVersion1(cls):
import cPickle as pickle
# See above for the note about the builtin namespace
import __builtin__
__builtin__.PythonScriptableMacro = PythonScriptableMacro
data = (cls.version, (MacroFS.root, RecentMacros.storage))
#dprint(data)
pickled = pickle.dumps(data)
return pickled
class TempMacro(TempFile):
file_class = PythonScriptableMacro
class MacroFS(MemFS):
"""Filesystem to recognize "macro:macro_name" URLs
This simple filesystem allows URLs in the form of "macro:macro_name", and
provides the mapping from the macro name to the L{PythonScriptableMacro}
instance.
On disk, this is serialized as a pickle object of the macro class attribute.
"""
root = MemDir()
temp_file_class = TempMacro
@classmethod
def escapeFileName(cls, name):
name = name.replace("/", " ")
return name.strip()
@classmethod
def findAlternateName(cls, dirname, basename):
"""Find alternate name if the requested name already exists
If basename already exists in the directory, appends the emacs-style
counter <1>, <2>, etc. until an unused filename is found.
@returns: new filename guaranteed to be unique
"""
if dirname:
if not dirname.endswith("/"):
dirname += "/"
else:
dirname = ""
orig_basename = basename
fullpath = dirname + basename
count = 0
existing = True
while existing:
parent, existing, name = cls._find(fullpath)
if existing:
count += 1
basename = orig_basename + "<%d>" % count
fullpath = dirname + basename
return fullpath, basename
@classmethod
def addMacro(cls, macro, dirname=None):
if dirname:
if not dirname.endswith("/"):
dirname += "/"
# Make sure the directory exists
url = vfs.normalize("macro:%s" % dirname)
needs_mkdir = False
if vfs.exists(url):
if vfs.is_file(url):
# we have a macro that is the same name as the directory
# name. Rename the file and create the directory.
components = dirname.strip('/').split('/')
filename = components.pop()
parent_dirname = "/".join(components)
dum, new_filename = cls.findAlternateName(parent_dirname, filename)
#dprint("parent=%s filename=%s: New filename: %s" % (parent_dirname, filename, new_filename))
parent, existing, name = cls._find(parent_dirname)
#dprint("existing=%s" % existing)
existing[new_filename] = existing[filename]
del existing[filename]
#dprint("existing after=%s" % existing)
needs_mkdir = True
else:
needs_mkdir = True
if needs_mkdir:
#dprint("Making folder %s" % url)
vfs.make_folder(url)
else:
dirname = ""
fullpath, basename = cls.findAlternateName(dirname, macro.name)
parent, existing, name = cls._find(dirname)
#dprint("name=%s: parent=%s, existing=%s" % (basename, parent, existing))
macro.setName(fullpath)
existing[basename] = macro
@classmethod
def addMacroFromRecording(cls, recorder, mode):
"""Add a macro to the macro: filesystem.
The macro: filesystem is organized by major mode name. Any macro that
is defined on the Fundamental mode appears is valid for text modes;
otherwise, the macros are organized into directories based on mode
name.
"""
macro = PythonScriptableMacro(recorder)
path = mode.keyword
cls.addMacro(macro, path)
return macro
@classmethod
def getMacro(cls, name):
"""Get the L{PythonScriptableMacro} given the pathname of the macro
@param name: string or URL of macro
"""
try:
name = unicode(name.path)
except:
name = unicode(name)
parent, macro, name = cls._find(name)
#dprint(macro)
return macro
@classmethod
def isMacro(cls, name):
try:
name = unicode(name.path)
except:
name = unicode(name)
parent, macro, name = cls._find(name)
return bool(macro)
@classmethod
def getMacroNamesFromMajorModeClass(cls, modecls):
"""Get the list of macro names available for the specified major mode
class.
This is roughly equivalent to using C{vfs.get_names("macro:%s" %
mode.keyword)} except that it also handles the case of universal
macros linked to the abstract L{MajorMode} that are in the macro
directory "".
@param modecls: major mode class
@returns: tuple containing the path in the macro: filesystem and the
list of all macros in that path
"""
keyword = modecls.keyword
if keyword == "Abstract_Major_Mode":
path = ""
else:
path = keyword
try:
all_names = vfs.get_names("macro:%s" % path)
except OSError:
all_names = []
# Check to see that all the names are macro names and not directories
macro_names = []
for name in all_names:
url = "macro:" + path + "/" + name
if vfs.is_file(url):
macro_names.append(name)
return path, macro_names
@classmethod
def get_mimetype(cls, reference):
path = str(reference.path)
parent, existing, name = cls._find(path)
if existing:
if existing.is_file:
return "text/x-python"
return "application/x-not-regular-file"
raise OSError("[Errno 2] No such file or directory: '%s'" % reference)
class MacroTreeCtrl(wx.TreeCtrl):
"""Abstract TreeCtrl specialized to show macros
Must be subclassed and the L{addMacrosToTree} method must be defined that
populates the tree with all the macros to be displayed.
"""
def __init__(self, parent, allow_activation=True):
self.allow_activation = allow_activation
if wx.Platform == '__WXMSW__':
style = wx.TR_HAS_BUTTONS
self.has_root = True
else:
style = wx.TR_HIDE_ROOT|wx.TR_HAS_BUTTONS
self.has_root = False
wx.TreeCtrl.__init__(self, parent, -1, size=(self.classprefs.best_width, self.classprefs.best_height), style=style | wx.TR_EDIT_LABELS | wx.TR_MULTIPLE)
self.root = self.AddRoot("root item")
self.hierarchy = None
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.OnActivate)
self.Bind(wx.EVT_TREE_ITEM_COLLAPSING, self.OnCollapsing)
self.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.OnBeginEdit)
self.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.OnEndEdit)
self.allow_char_events = True
self.Bind(wx.EVT_CHAR, self.OnChar)
def OnChar(self, evt):
dprint(evt)
if self.allow_char_events:
evt.Skip()
def enableKeystrokeProcessing(self):
self.allow_char_events = True
def disableKeystrokeProcessing(self):
self.allow_char_events = False
def activateSpringTab(self):
"""Callback function from the SpringTab handler requesting that we
initialize ourselves.
"""<|fim▁hole|> """Rebuild the entire tree
Calls the L{addMacrosToTree} to rebuild the tree after all the items
below the root item have been deleted.
"""
self.DeleteChildren(self.root)
self.addMacrosToTree()
if self.has_root:
self.Expand(self.root)
if evt:
evt.Skip()
def addMacrosToTree(self):
"""Repopulate the macros into the tree
Upon entering this method, the tree will have been previously cleared
of everything but the root object. Any hierarchy expansion is left to
the individual implementation; it may hide or show levels as desired.
The root item will be expanded upon this method's return.
"""
keywords = self.findKeywordHierarchy()
# Getting the names of macros for a specific major mode may fail if no
# macros exist
for keyword in keywords:
self.appendAllFromMajorMode(keyword)
def findKeywordHierarchy(self):
"""Return a list of keywords representing the major mode subclassing
hierarchy of the major modes of interest.
This method must be overridden in a subclass to provide the list of
keywords to display
"""
raise NotImplementedError
def appendAllFromMajorMode(self, keyword):
"""Append all macros for a given major mode
"""
if keyword == "Abstract_Major_Mode":
keyword = "Universal Macros"
path = ""
else:
path = keyword
item = self.AppendItem(self.root, _(keyword))
try:
names = vfs.get_names("macro:%s" % path)
self.appendItems(item, path, names)
except OSError:
pass
self.Expand(item)
def appendItems(self, wxParent, path, names):
"""Append the macro names to the specified item
For the initial item highlighing, uses the current_line instance
attribute to determine the line number.
"""
names.sort()
for name in names:
if path:
fullpath = path + "/" + name
else:
fullpath = name
url = "macro:" + fullpath
if vfs.is_file(url):
text = self.getTextForMacroName(fullpath, name)
wxItem = self.AppendItem(wxParent, text)
self.SetPyData(wxItem, fullpath)
def getTextForMacroName(self, path, name):
"""Given the macro name, return the text to display in the list
This can be overridden in subclasses to provide the keystroke to which
the macro is bound, if the macro is active in the current major mode
"""
return name
def OnActivate(self, evt):
if self.allow_activation:
name = self.GetPyData(evt.GetItem())
self.dprint("Activating macro %s" % name)
if name is not None:
macro = MacroFS.getMacro(name)
wx.CallAfter(macro.playback, self.getFrame(), self.mode)
def OnCollapsing(self, evt):
item = evt.GetItem()
if item == self.root:
# Don't allow the root item to be collapsed
evt.Veto()
evt.Skip()
def OnBeginEdit(self, evt):
item = evt.GetItem()
name = self.GetPyData(item)
if name == None:
# Only actual macros are allowed to be edited. Other items in the
# tree are not macros, so we veto edit requests
evt.Veto()
def OnEndEdit(self, evt):
if evt.IsEditCancelled():
return
item = evt.GetItem()
old_path = self.GetPyData(item)
new_name = evt.GetLabel()
components = old_path.split('/')
components.pop()
dirname = '/'.join(components)
new_path = dirname + '/' + new_name
dprint("old=%s new=%s" % (old_path, new_path))
exists = MacroFS.getMacro(new_name)
if exists:
evt.Veto()
wx.CallAfter(self.frame.showErrorDialog, "Cannot rename %s\%s already exists.")
else:
vfs.move("macro:%s" % old_path, "macro:%s" % new_path)
self.SetPyData(item, new_path)
RecentMacros.validateAll()
def getSelectedMacros(self):
"""Return a list of all the selected macros
@returns: a list containing the URL of the macro
"""
paths = []
for item in self.GetSelections():
path = self.GetPyData(item)
if path is not None:
paths.append(vfs.normalize("macro:%s" % path))
return paths
def getOptionsForPopupActions(self):
options = {
'minor_mode': self,
'macros': self.getSelectedMacros(),
}
return options
class MacroListMinorMode(MacroTreeCtrl, MinorMode):
"""Tree control to display list of macros available for this major mode
"""
keyword="Macros"
default_classprefs = (
IntParam('best_width', 300),
IntParam('best_height', 500),
BoolParam('springtab', True),
)
@classmethod
def worksWithMajorMode(cls, modecls):
return True
@classmethod
def showWithMajorModeInstance(cls, mode=None, **kwargs):
# It only makes sense to allow macros on modes that you can save
return mode.isMacroProcessingAvailable()
def __init__(self, parent, **kwargs):
MacroTreeCtrl.__init__(self, parent, allow_activation=True)
MinorMode.__init__(self, parent, **kwargs)
self.SetItemText(self.root, (_("Macros Compatible with %s") % self.mode.keyword))
def findKeywordHierarchy(self):
"""Return a list of keywords representing the major mode subclassing
hierarchy of the current major mode.
"""
modecls = self.mode.__class__
keywords = []
hierarchy = modecls.getSubclassHierarchy()
hierarchy.reverse()
for cls in hierarchy:
keywords.append(cls.keyword)
return keywords
def getTextForMacroName(self, path, name):
macro = MacroFS.getMacro(path)
if macro.key_binding:
return "%s (%s)" % (name, macro.key_binding)
else:
return name
def getPopupActions(self, evt, x, y):
return [EditMacro, RenameMacro, (-800, RebindMacro), (-900, DeleteMacro)]
class MacroListSidebar(MacroTreeCtrl, Sidebar):
"""Tree control to display list of macros available for this major mode
"""
keyword = "All Macros"
caption = "All Macros"
default_classprefs = (
IntParam('best_width', 300),
IntParam('best_height', 500),
BoolParam('springtab', True),
)
def __init__(self, parent, **kwargs):
MacroTreeCtrl.__init__(self, parent, allow_activation=False)
Sidebar.__init__(self, parent, **kwargs)
self.SetItemText(self.root, _("All Macros"))
def findKeywordHierarchy(self):
"""Return a list of keywords representing the major mode subclassing
hierarchy of the current major mode.
"""
mode_classes = MajorModeMatcherDriver.findActiveModes()
mode_classes.reverse()
keywords = []
# Put the major mode first
keywords.append(mode_classes.pop(0).keyword)
mode_classes.sort(cmp=lambda a,b: cmp(a.keyword, b.keyword))
for cls in mode_classes:
keywords.append(cls.keyword)
return keywords
def getPopupActions(self, evt, x, y):
return [EditMacro, RenameMacro, (-900, DeleteMacro)]
class EditMacro(SelectAction):
"""Edit the macro in a new tab.
"""
name = "Edit Macro"
def isEnabled(self):
# As long as at least one item in the list is a macro, this can be
# enabled.
macros = self.popup_options['macros']
for path in self.popup_options['macros']:
if vfs.is_file(path):
return True
return False
def action(self, index=-1, multiplier=1):
dprint(self.popup_options)
for url in self.popup_options['macros']:
self.frame.open(url)
class RenameMacro(SelectAction):
"""Rename the selected macros.
"""
name = "Rename Macro"
def isEnabled(self):
macros = self.popup_options['macros']
return len(macros) == 1 and vfs.is_file(macros[0])
def action(self, index=-1, multiplier=1):
tree = self.popup_options['minor_mode']
items = tree.GetSelections()
if items:
tree.EditLabel(items[0])
class DeleteMacro(SelectAction):
"""Delete the selected macros.
"""
name = "Delete Macro"
def isEnabled(self):
macros = self.popup_options['macros']
for path in self.popup_options['macros']:
if vfs.is_file(path):
return True
return False
def action(self, index=-1, multiplier=1):
dprint(self.popup_options)
wx.CallAfter(self.processDelete)
def processDelete(self):
tree = self.popup_options['minor_mode']
macros = tree.getSelectedMacros()
retval = self.frame.showQuestionDialog("Are you sure you want to delete:\n\n%s" % ("\n".join([str(m) for m in macros])))
if retval == wx.ID_YES:
for macro in macros:
vfs.remove(macro)
tree.update()
RecentMacros.validateAll()
class MacroKeystrokeRecorder(KeystrokeRecorder):
"""Custom subclass of KeystrokeRecorder used for new macro keybindings.
"""
def __init__(self, mode, macro_url, tree=None, trigger="RET", count=-1):
"""Constructor that starts the quoted keystroke capturing
@param tree: MacroTreeCtrl instance
@param mode: major mode instance
@keyword trigger: (optional) trigger keystroke string that will be used
to end a variable length key sequence
@keyword count: (optional) exact number of keystrokes to capture
@keyword append: True will append the key sequence to the
action's list of key bindings, False (the default) will replace it.
"""
self.tree = tree
self.mode = mode
self.url = macro_url
self.macro = MacroFS.getMacro(self.url)
if self.tree:
self.tree.disableKeystrokeProcessing()
KeystrokeRecorder.__init__(self, self.mode.frame.root_accel, trigger,
count, append=False,
platform="emacs",
action_name=self.macro.name)
def statusUpdateHook(self, status_text):
self.mode.setStatusText(status_text)
def finishRecordingHook(self, accelerator_text):
dprint(self.macro)
self.macro.key_binding = accelerator_text
self.macro.save(self.url)
if self.tree:
# Update the tree display to show the new keystroke
self.tree.update()
# Have to turn on keystroke processing in a CallAfter otherwise the
# RET char trigger gets processed as an action in the tree.
wx.CallAfter(self.tree.enableKeystrokeProcessing)
self.mode.regenerateKeyBindings()
class RebindMacro(SelectAction):
"""Change the key binding of the selected macro
"""
name = "New Key Binding"
def isEnabled(self):
macros = self.popup_options['macros']
return len(macros) == 1 and vfs.is_file(macros[0])
def action(self, index=-1, multiplier=1):
tree = self.popup_options['minor_mode']
items = tree.getSelectedMacros()
if items:
macro_url = items[0]
dprint(macro_url)
MacroKeystrokeRecorder(tree.mode, macro_url, tree=tree)
class RebindLastMacro(StopRecordingMixin, SelectAction):
"""Add keyboard binding for last macro that was recorded"""
name = "Add Keybinding For Last Macro"
key_bindings = {'default': "S-C-6", 'mac': "^S-6", 'emacs': ["C-x C-k", "S-C-6"]}
default_menu = ("Tools/Macros", 130)
def isEnabled(self):
return RecentMacros.isEnabled()
@classmethod
def isRecordable(cls):
return False
def action(self, index=-1, multiplier=1):
self.stopRecording()
name = RecentMacros.getLastMacroName()
if name:
MacroKeystrokeRecorder(self.mode, name)
else:
self.dprint("No recorded macro.")
class MacroPlugin(IPeppyPlugin):
"""Plugin providing the macro recording capability
"""
default_classprefs = (
StrParam('macro_file', 'macros.dat', 'File name in main peppy configuration directory used to store macro definitions'),
IntParam('list_length', 3, 'Number of macros to save in the Recent Macros list'),
)
def activateHook(self):
vfs.register_file_system('macro', MacroFS)
def initialActivation(self):
pathname = wx.GetApp().getConfigFilePath(self.classprefs.macro_file)
macro_url = vfs.normalize(pathname)
try:
MacroSaveData.load(macro_url)
except:
dprint("Failed loading macro data to %s" % macro_url)
import traceback
traceback.print_exc()
def requestedShutdown(self):
pathname = wx.GetApp().getConfigFilePath(self.classprefs.macro_file)
macro_url = vfs.normalize(pathname)
try:
MacroSaveData.save(macro_url)
except:
dprint("Failed saving macro data to %s" % macro_url)
import traceback
traceback.print_exc()
pass
def deactivateHook(self):
vfs.deregister_file_system('macro')
def getMinorModes(self):
yield MacroListMinorMode
def getSidebars(self):
yield MacroListSidebar
def getActions(self):
return [
StartRecordingMacro, StopRecordingMacro,
ReplayLastMacro, RebindLastMacro,
RecentMacros, ExecuteMacroByName, ExecuteMacroByKeystroke,
]<|fim▁end|> | self.update()
def update(self, evt=None): |
<|file_name|>xlsx-util.ts<|end_file_name|><|fim▁begin|>import Fs from 'fs'
import Path from 'path'
import XmlParser from 'xml2js'
const Zip = require('node-zip')
/**
* This represents an entry in the zip file. If the entry comes from an existing archive previously loaded, the content will be automatically decompressed/converted first.
* @see https://stuk.github.io/jszip/documentation/api_zipobject.html
*/
export type ZipObject = {
/** the absolute path of the file. */
name: string
/** `true` if this is a directory. */
dir: boolean
/** the last modification date. */
date: Date
/** the comment for this file. */
comment: string
/** The UNIX permissions of the file, if any. 16 bits number. */
unixPermissions: number
/** The DOS permissions of the file, if any. 6 bits number. */
dosPermissions: number
/** the options of the file. The available options. */
options: {
compression: (
name: string,
data:
| string
| ArrayBuffer
| Uint8Array
| Buffer
| Blob
| Promise<any>
| WritableStream
) => void
}
/** Files. */
files: any
}
/** Sheet data. */
export type SheetData = {
/** Sheet name. */
name: string
/** Data obtained by converting the XML of the sheet to the JavaScript Object. */
sheet: any
/** Data obtained by converting the XML of the shared strings to the JavaScript Object. */
strings?: any
}
/** Sheet size. */<|fim▁hole|>export type SheetSize = {
/** Row of sheet. */
row: {
/** Minimum value of row. */
min: number
/** Maximum value of row. */
max: number
}
/** Column of sheet. */
col: {
/** Minimum value of column. */
min: number
/** Maximum value of column. */
max: number
}
}
/** It is a cell in a sheet. */
export type Cell = {
/** Row position. */
row: number
/** Column position. */
col: number
/** Type.. */
type: string
/** Value string. */
value: string
}
/** It is the position of the cell. */
type Position = {
/** Row position. */
row: number
/** Column position. */
col: number
}
/** The maximum number of sheets (Excel 97). */
const MaxSheets = 256
/** Defines the file path in the XLSX. */
const FilePaths = {
WorkBook: 'xl/workbook.xml',
SharedStrings: 'xl/sharedStrings.xml',
SheetBase: 'xl/worksheets/sheet'
}
/**
* Create a empty cells.
* @param rows Rows count.
* @param cols Columns count.
* @return Cells.
*/
export const createEmptyCells = (rows: number, cols: number): string[][] => {
const arr = []
for (let i = 0; i < rows; ++i) {
const row = []
for (let j = 0; j < cols; ++j) {
row.push('')
}
arr.push(row)
}
return arr
}
/**
* Get a cells from a rows.
* @param rows Rows.
* @return Cells.
*/
export const getCells = (rows: any[]): Cell[] => {
const cells: Cell[] = []
rows
.filter((row) => {
return row.c && 0 < row.c.length
})
.forEach((row) => {
row.c.forEach((cell: any) => {
const position = getPosition(cell.$.r)
cells.push({
row: position.row,
col: position.col,
type: cell.$.t ? cell.$.t : '',
value: cell.v && 0 < cell.v.length ? cell.v[0] : ''
})
})
})
return cells
}
/**
* Get the coordinates of the cell.
* @param text Position text. Such as "A1" and "U109".
* @return Position.
*/
export const getPosition = (text: string): Position => {
// 'A1' -> [A, 1]
const units = text.split(/([0-9]+)/)
if (units.length < 2) {
return { row: 0, col: 0 }
}
return {
row: parseInt(units[1], 10),
col: numOfColumn(units[0])
}
}
/**
* Get a sheet name.
* @param zip Extract data of XLSX (Zip) file.
* @param index Index of sheet. Range of from 1 to XlsxExtractor.count.
* @returns Sheet name.
*/
const getSheetName = async (zip: ZipObject, index: number): Promise<string> => {
const root = await parseXML(zip.files[FilePaths.WorkBook].asText())
let name = ''
if (
root &&
root.workbook &&
root.workbook.sheets &&
0 < root.workbook.sheets.length &&
root.workbook.sheets[0].sheet
) {
root.workbook.sheets[0].sheet.some((sheet: any) => {
const id = Number(sheet.$.sheetId)
if (id === index) {
name = sheet.$.name || ''
return true
}
return false
})
}
return name
}
/**
* Get a sheet data.
* @param zip Extract data of XLSX (Zip) file.
* @param index Index of sheet. Range of from 1 to XlsxExtractor.count.
* @returns Sheet data.
*/
export const getSheetData = async (
zip: ZipObject,
index: number
): Promise<SheetData> => {
const data: SheetData = {
name: '',
sheet: {}
}
data.name = await getSheetName(zip, index)
data.sheet = await parseXML(
zip.files[FilePaths.SheetBase + index + '.xml'].asText()
)
if (zip.files[FilePaths.SharedStrings]) {
data.strings = await parseXML(zip.files[FilePaths.SharedStrings].asText())
}
return data
}
/**
* Gets the number of sheets.
* @param zip Extract data of XLSX (Zip) file.
* @returns Number of sheets
*/
export const getSheetInnerCount = (zip: ZipObject): number => {
let count = 0
for (let i = 1; i < MaxSheets; ++i) {
const path = FilePaths.SheetBase + i + '.xml'
if (!zip.files[path]) {
break
}
++count
}
return count
}
/**
* Get the range of the sheet.
* @param sheet Sheet data.
* @param cells Cells.
* @return Range.
*/
export const getSheetSize = (sheet: any, cells: any[]): SheetSize => {
// Get the there if size is defined
if (
sheet &&
sheet.worksheet &&
sheet.worksheet.dimension &&
0 <= sheet.worksheet.dimension.length
) {
const range = sheet.worksheet.dimension[0].$.ref.split(':')
if (range.length === 2) {
const min = getPosition(range[0])
const max = getPosition(range[1])
return {
row: { min: min.row, max: max.row },
col: { min: min.col, max: max.col }
}
}
}
const ascend = (a: number, b: number) => a - b
const rows = cells.map((cell) => cell.row).sort(ascend)
const cols = cells.map((cell) => cell.col).sort(ascend)
return {
row: { min: rows[0], max: rows[rows.length - 1] },
col: { min: cols[0], max: cols[cols.length - 1] }
}
}
/**
* Convert the column text to number.
* @param text Column text, such as A" and "AA".
* @return Column number, otherwise -1.
*/
export const numOfColumn = (text: string): number => {
const letters = [
'',
'A',
'B',
'C',
'D',
'E',
'F',
'G',
'H',
'I',
'J',
'K',
'L',
'M',
'N',
'O',
'P',
'Q',
'R',
'S',
'T',
'U',
'V',
'W',
'X',
'Y',
'Z'
]
const col = text.trim().split('')
let num = 0
for (let i = 0, max = col.length; i < max; ++i) {
num *= 26
num += letters.indexOf(col[i])
}
return num
}
/**
* Parse the `r` element of XML.
* @param r `r` elements.
* @return Parse result.
*/
export const parseR = (r: any[]): string => {
let value = ''
r.forEach((obj) => {
if (obj.t) {
value += parseT(obj.t)
}
})
return value
}
/**
* Parse the `t` element of XML.
* @param t `t` elements.
* @return Parse result.
*/
export const parseT = (t: any[]): string => {
let value = ''
t.forEach((obj) => {
switch (typeof obj) {
case 'string':
value += obj
break
// The value of xml:space="preserve" is stored in the underscore
case 'object':
if (obj._ && typeof obj._ === 'string') {
value += obj._
}
break
default:
break
}
})
return value
}
/**
* Parse the XML text.
* @param xml XML text.
* @return XML parse task.
*/
export const parseXML = (xml: string): Promise<any> => {
return new Promise((resolve, reject) => {
XmlParser.parseString(xml, (err, obj) => {
return err ? reject(err) : resolve(obj)
})
})
}
/**
* Extract a zip file.
* @param path Zip file path.
* @return If success zip object, otherwise null.
* @throws Failed to expand the XLSX file.
*/
export const unzip = (path: string): ZipObject => {
try {
const file = Fs.readFileSync(Path.resolve(path))
return Zip(file)
} catch (err) {
throw new Error('Failed to expand the XLSX file.')
}
}
/**
* Get a value from the cell strings.
*
* @param str Cell strings.
*
* @return Value.
*/
export const valueFromStrings = (str: any): string => {
let value = ''
const keys = Object.keys(str)
keys.forEach((key) => {
switch (key) {
case 't':
value += parseT(str[key])
break
case 'r':
value += parseR(str[key])
break
default:
break
}
})
return value
}<|fim▁end|> | |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <CtrlLib/CtrlLib.h>
using namespace Upp;
GUI_APP_MAIN
{
ConvertOpeningHours cv;
<|fim▁hole|> EditField ef;
ef.SetConvert(cv);
TopWindow win;
win.Add(ef.TopPos(0, Ctrl::STDSIZE).HSizePos());
win.Run();
}<|fim▁end|> | DDUMP(cv.Format(cv.Scan("11:00-22:00 12:00-18:00 23:00-23:30")));
|
<|file_name|>JsonSerializableSchema.java<|end_file_name|><|fim▁begin|>package com.google.ratel.deps.jackson.databind.jsonschema;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Retention;
import java.lang.annotation.ElementType;
import java.lang.annotation.Target;
import com.google.ratel.deps.jackson.annotation.JacksonAnnotation;
/**
* Annotation that can be used to define JSON Schema definition for
* the annotated class.
*<p>
* Note that annotation is often not needed: for example, regular
* Jackson beans that Jackson can introspect can be used without
* annotations, to produce JSON schema definition.
*
* @author Ryan Heaton
* @author Tatu Saloranta
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@JacksonAnnotation
public @interface JsonSerializableSchema
{
/**
* Marker value used to indicate that property has "no value";<|fim▁hole|> */
public final static String NO_VALUE = "##irrelevant";
/**
* Property that can be used to indicate id of the type when
* generating JSON Schema; empty String indicates that no id
* is defined.
*/
public String id() default "";
/**
* The schema type for this JsonSerializable instance.
* Possible values: "string", "number", "boolean", "object", "array", "null", "any"
*
* @return The schema type for this JsonSerializable instance.
*/
public String schemaType() default "any";
/**
* If the schema type is "object", JSON definition of properties of the object as
* a String.
*
* @return The node representing the schema properties, or "##irrelevant" if irrelevant.
*
* @deprecated (since 2.1) -- support will be dropped in future, since JSON-as-String is
* fundamentally bad way for customizing anything. No direct replacements offered.
*/
@Deprecated
public String schemaObjectPropertiesDefinition() default NO_VALUE;
/**
* If the schema type if "array", JSON definition of the schema for item types contained.
*
* @return The schema for the items in the array, or "##irrelevant" if irrelevant.
*
* @deprecated (since 2.1) -- support will be dropped in future, since JSON-as-String is
* fundamentally bad way for customizing anything. No direct replacements offered.
*/
@Deprecated
public String schemaItemDefinition() default NO_VALUE;
}<|fim▁end|> | * needed because annotations can not have null as default
* value. |
<|file_name|>webpack.config.dist.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack');
var path = require('path');
var mainPath = path.resolve(__dirname,'..','..','src','index.jsx');
var srcPath = path.resolve(__dirname,'..','..', 'src');
var config = require("./webpack.config.js");
var packageJson = require('../../package.json')
/** ============
NOTE: change the following per the dependencies
of the component you are developing.
============ **/
config.externals = {
// "react": "react",
// "react-dom": "react-dom",
// your dependencies here
// "d3": "d3",
}
config.entry = mainPath;
config.devtool = 'none';
config.plugins = [
new webpack.optimize.UglifyJsPlugin(),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
}
}),<|fim▁hole|> new webpack.ProvidePlugin({
'react': "react"
})
];
config.module.loaders.unshift({
test: /\.jsx$/,
loaders: ['babel'],
exclude: ['node_modules']
})
module.exports = config;<|fim▁end|> | |
<|file_name|>clone.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::result::Iter;
// #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
// #[must_use]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub enum Result<T, E> {
// /// Contains the success value
// #[stable(feature = "rust1", since = "1.0.0")]
// Ok(T),
//
// /// Contains the error value
// #[stable(feature = "rust1", since = "1.0.0")]
// Err(E)
// }
// impl<T, E> Result<T, E> {
// /////////////////////////////////////////////////////////////////////////
// // Querying the contained values
// /////////////////////////////////////////////////////////////////////////
//
// /// Returns true if the result is `Ok`
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<i32, &str> = Ok(-3);
// /// assert_eq!(x.is_ok(), true);
// ///
// /// let x: Result<i32, &str> = Err("Some error message");
// /// assert_eq!(x.is_ok(), false);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn is_ok(&self) -> bool {
// match *self {
// Ok(_) => true,
// Err(_) => false
// }
// }
//
// /// Returns true if the result is `Err`
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<i32, &str> = Ok(-3);
// /// assert_eq!(x.is_err(), false);
// ///
// /// let x: Result<i32, &str> = Err("Some error message");
// /// assert_eq!(x.is_err(), true);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn is_err(&self) -> bool {
// !self.is_ok()
// }
//
// /////////////////////////////////////////////////////////////////////////
// // Adapter for each variant
// /////////////////////////////////////////////////////////////////////////
//
// /// Converts from `Result<T, E>` to `Option<T>`
// ///
// /// Converts `self` into an `Option<T>`, consuming `self`,
// /// and discarding the error, if any.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(2);
// /// assert_eq!(x.ok(), Some(2));
// ///
// /// let x: Result<u32, &str> = Err("Nothing here");
// /// assert_eq!(x.ok(), None);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn ok(self) -> Option<T> {
// match self {
// Ok(x) => Some(x),
// Err(_) => None,
// }
// }
//
// /// Converts from `Result<T, E>` to `Option<E>`
// ///
// /// Converts `self` into an `Option<E>`, consuming `self`,
// /// and discarding the success value, if any.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(2);
// /// assert_eq!(x.err(), None);
// ///
// /// let x: Result<u32, &str> = Err("Nothing here");
// /// assert_eq!(x.err(), Some("Nothing here"));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn err(self) -> Option<E> {
// match self {
// Ok(_) => None,
// Err(x) => Some(x),
// }
// }
//
// /////////////////////////////////////////////////////////////////////////
// // Adapter for working with references
// /////////////////////////////////////////////////////////////////////////
//
// /// Converts from `Result<T, E>` to `Result<&T, &E>`
// ///
// /// Produces a new `Result`, containing a reference
// /// into the original, leaving the original in place.
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(2);
// /// assert_eq!(x.as_ref(), Ok(&2));
// ///
// /// let x: Result<u32, &str> = Err("Error");
// /// assert_eq!(x.as_ref(), Err(&"Error"));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn as_ref(&self) -> Result<&T, &E> {
// match *self {
// Ok(ref x) => Ok(x),
// Err(ref x) => Err(x),
// }
// }
//
// /// Converts from `Result<T, E>` to `Result<&mut T, &mut E>`
// ///
// /// ```
// /// fn mutate(r: &mut Result<i32, i32>) {
// /// match r.as_mut() {
// /// Ok(&mut ref mut v) => *v = 42,
// /// Err(&mut ref mut e) => *e = 0,
// /// }
// /// }
// ///
// /// let mut x: Result<i32, i32> = Ok(2);
// /// mutate(&mut x);
// /// assert_eq!(x.unwrap(), 42);
// ///
// /// let mut x: Result<i32, i32> = Err(13);
// /// mutate(&mut x);
// /// assert_eq!(x.unwrap_err(), 0);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn as_mut(&mut self) -> Result<&mut T, &mut E> {
// match *self {
// Ok(ref mut x) => Ok(x),
// Err(ref mut x) => Err(x),
// }
// }
//
// /// Converts from `Result<T, E>` to `&[T]` (without copying)
// #[inline]
// #[unstable(feature = "as_slice", since = "unsure of the utility here")]
// pub fn as_slice(&self) -> &[T] {
// match *self {
// Ok(ref x) => slice::ref_slice(x),
// Err(_) => {
// // work around lack of implicit coercion from fixed-size array to slice
// let emp: &[_] = &[];
// emp
// }
// }
// }
//
// /// Converts from `Result<T, E>` to `&mut [T]` (without copying)
// ///
// /// ```
// /// # #![feature(core)]
// /// let mut x: Result<&str, u32> = Ok("Gold");
// /// {
// /// let v = x.as_mut_slice();
// /// assert!(v == ["Gold"]);
// /// v[0] = "Silver";
// /// assert!(v == ["Silver"]);
// /// }
// /// assert_eq!(x, Ok("Silver"));
// ///
// /// let mut x: Result<&str, u32> = Err(45);
// /// assert!(x.as_mut_slice().is_empty());
// /// ```
// #[inline]
// #[unstable(feature = "core",
// reason = "waiting for mut conventions")]
// pub fn as_mut_slice(&mut self) -> &mut [T] {
// match *self {
// Ok(ref mut x) => slice::mut_ref_slice(x),
// Err(_) => {
// // work around lack of implicit coercion from fixed-size array to slice
// let emp: &mut [_] = &mut [];
// emp
// }
// }
// }
//
// /////////////////////////////////////////////////////////////////////////
// // Transforming contained values
// /////////////////////////////////////////////////////////////////////////
//
// /// Maps a `Result<T, E>` to `Result<U, E>` by applying a function to an
// /// contained `Ok` value, leaving an `Err` value untouched.
// ///
// /// This function can be used to compose the results of two functions.
// ///
// /// # Examples
// ///
// /// Print the numbers on each line of a string multiplied by two.
// ///
// /// ```
// /// let line = "1\n2\n3\n4\n";
// ///
// /// for num in line.lines() {
// /// match num.parse::<i32>().map(|i| i * 2) {
// /// Ok(n) => println!("{}", n),
// /// Err(..) => {}
// /// }
// /// }
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn map<U, F: FnOnce(T) -> U>(self, op: F) -> Result<U,E> {
// match self {
// Ok(t) => Ok(op(t)),
// Err(e) => Err(e)
// }
// }
//
// /// Maps a `Result<T, E>` to `Result<T, F>` by applying a function to an
// /// contained `Err` value, leaving an `Ok` value untouched.
// ///
// /// This function can be used to pass through a successful result while handling
// /// an error.
// ///
// /// # Examples
// ///
// /// ```
// /// fn stringify(x: u32) -> String { format!("error code: {}", x) }
// ///
// /// let x: Result<u32, u32> = Ok(2);
// /// assert_eq!(x.map_err(stringify), Ok(2));
// ///
// /// let x: Result<u32, u32> = Err(13);
// /// assert_eq!(x.map_err(stringify), Err("error code: 13".to_string()));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn map_err<F, O: FnOnce(E) -> F>(self, op: O) -> Result<T,F> {
// match self {
// Ok(t) => Ok(t),
// Err(e) => Err(op(e))
// }
// }
//
// /////////////////////////////////////////////////////////////////////////
// // Iterator constructors
// /////////////////////////////////////////////////////////////////////////
//
// /// Returns an iterator over the possibly contained value.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(7);
// /// assert_eq!(x.iter().next(), Some(&7));
// ///
// /// let x: Result<u32, &str> = Err("nothing!");
// /// assert_eq!(x.iter().next(), None);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn iter(&self) -> Iter<T> {
// Iter { inner: self.as_ref().ok() }
// }
//
// /// Returns a mutable iterator over the possibly contained value.
// ///
// /// # Examples
// ///
// /// ```
// /// let mut x: Result<u32, &str> = Ok(7);
// /// match x.iter_mut().next() {
// /// Some(&mut ref mut x) => *x = 40,
// /// None => {},
// /// }
// /// assert_eq!(x, Ok(40));
// ///
// /// let mut x: Result<u32, &str> = Err("nothing!");
// /// assert_eq!(x.iter_mut().next(), None);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn iter_mut(&mut self) -> IterMut<T> {
// IterMut { inner: self.as_mut().ok() }
// }
//
// ////////////////////////////////////////////////////////////////////////
// // Boolean operations on the values, eager and lazy
// /////////////////////////////////////////////////////////////////////////
//
// /// Returns `res` if the result is `Ok`, otherwise returns the `Err` value of `self`.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(2);
// /// let y: Result<&str, &str> = Err("late error");
// /// assert_eq!(x.and(y), Err("late error"));
// ///
// /// let x: Result<u32, &str> = Err("early error");
// /// let y: Result<&str, &str> = Ok("foo");
// /// assert_eq!(x.and(y), Err("early error"));
// ///
// /// let x: Result<u32, &str> = Err("not a 2");
// /// let y: Result<&str, &str> = Err("late error");
// /// assert_eq!(x.and(y), Err("not a 2"));
// ///
// /// let x: Result<u32, &str> = Ok(2);
// /// let y: Result<&str, &str> = Ok("different result type");
// /// assert_eq!(x.and(y), Ok("different result type"));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn and<U>(self, res: Result<U, E>) -> Result<U, E> {
// match self {
// Ok(_) => res,
// Err(e) => Err(e),
// }
// }
//
// /// Calls `op` if the result is `Ok`, otherwise returns the `Err` value of `self`.
// ///
// /// This function can be used for control flow based on result values.
// ///
// /// # Examples
// ///
// /// ```
// /// fn sq(x: u32) -> Result<u32, u32> { Ok(x * x) }
// /// fn err(x: u32) -> Result<u32, u32> { Err(x) }
// ///
// /// assert_eq!(Ok(2).and_then(sq).and_then(sq), Ok(16));
// /// assert_eq!(Ok(2).and_then(sq).and_then(err), Err(4));
// /// assert_eq!(Ok(2).and_then(err).and_then(sq), Err(2));
// /// assert_eq!(Err(3).and_then(sq).and_then(sq), Err(3));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn and_then<U, F: FnOnce(T) -> Result<U, E>>(self, op: F) -> Result<U, E> {
// match self {
// Ok(t) => op(t),
// Err(e) => Err(e),
// }
// }
//
// /// Returns `res` if the result is `Err`, otherwise returns the `Ok` value of `self`.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: Result<u32, &str> = Ok(2);
// /// let y: Result<u32, &str> = Err("late error");
// /// assert_eq!(x.or(y), Ok(2));
// ///
// /// let x: Result<u32, &str> = Err("early error");
// /// let y: Result<u32, &str> = Ok(2);
// /// assert_eq!(x.or(y), Ok(2));
// ///
// /// let x: Result<u32, &str> = Err("not a 2");
// /// let y: Result<u32, &str> = Err("late error");
// /// assert_eq!(x.or(y), Err("late error"));
// ///
// /// let x: Result<u32, &str> = Ok(2);
// /// let y: Result<u32, &str> = Ok(100);
// /// assert_eq!(x.or(y), Ok(2));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn or<F>(self, res: Result<T, F>) -> Result<T, F> {
// match self {
// Ok(v) => Ok(v),
// Err(_) => res,
// }
// }
//
// /// Calls `op` if the result is `Err`, otherwise returns the `Ok` value of `self`.
// ///
// /// This function can be used for control flow based on result values.
// ///
// /// # Examples
// ///
// /// ```
// /// fn sq(x: u32) -> Result<u32, u32> { Ok(x * x) }
// /// fn err(x: u32) -> Result<u32, u32> { Err(x) }
// ///
// /// assert_eq!(Ok(2).or_else(sq).or_else(sq), Ok(2));
// /// assert_eq!(Ok(2).or_else(err).or_else(sq), Ok(2));
// /// assert_eq!(Err(3).or_else(sq).or_else(err), Ok(9));
// /// assert_eq!(Err(3).or_else(err).or_else(err), Err(3));
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn or_else<F, O: FnOnce(E) -> Result<T, F>>(self, op: O) -> Result<T, F> {
// match self {
// Ok(t) => Ok(t),
// Err(e) => op(e),
// }
// }
//
// /// Unwraps a result, yielding the content of an `Ok`.
// /// Else it returns `optb`.
// ///
// /// # Examples
// ///
// /// ```
// /// let optb = 2;
// /// let x: Result<u32, &str> = Ok(9);
// /// assert_eq!(x.unwrap_or(optb), 9);
// ///
// /// let x: Result<u32, &str> = Err("error");
// /// assert_eq!(x.unwrap_or(optb), optb);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn unwrap_or(self, optb: T) -> T {
// match self {
// Ok(t) => t,
// Err(_) => optb
// }
// }
//
// /// Unwraps a result, yielding the content of an `Ok`.
// /// If the value is an `Err` then it calls `op` with its value.
// ///
// /// # Examples
// ///
// /// ```
// /// fn count(x: &str) -> usize { x.len() }
// ///
// /// assert_eq!(Ok(2).unwrap_or_else(count), 2);
// /// assert_eq!(Err("foo").unwrap_or_else(count), 3);
// /// ```
// #[inline]
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn unwrap_or_else<F: FnOnce(E) -> T>(self, op: F) -> T {
// match self {
// Ok(t) => t,
// Err(e) => op(e)
// }
// }
// }
// pub struct Iter<'a, T: 'a> { inner: Option<&'a T> }
// impl<'a, T> Iterator for Iter<'a, T> {
// type Item = &'a T;
//
// #[inline]
// fn next(&mut self) -> Option<&'a T> { self.inner.take() }
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let n = if self.inner.is_some() {1} else {0};
// (n, Some(n))
// }
// }
// impl<'a, T> Clone for Iter<'a, T> {
// fn clone(&self) -> Iter<'a, T> { Iter { inner: self.inner } }
// }
type T = u32;
type E = &'static str;
#[test]
fn clone_test1() {
let x: Result<T, E> = Ok::<T, E>(7);
let mut iter: Iter<T> = x.iter();
let mut clone: Iter<T> = iter.clone();
let a: Option<&T> = iter.next();
let b: Option<&T> = clone.next();
assert_eq!(a, Some::<&T>(&7));
assert_eq!(b, Some::<&T>(&7));<|fim▁hole|> let x: Result<T, E> = Err::<T, E>("nothing!");
let mut iter: Iter<T> = x.iter();
let mut clone: Iter<T> = iter.clone();
let a: Option<&T> = iter.next();
let b: Option<&T> = clone.next();
assert_eq!(a, None::<&T>);
assert_eq!(b, None::<&T>);
}
}<|fim▁end|> | }
#[test]
fn clone_test2() { |
<|file_name|>coverage.rs<|end_file_name|><|fim▁begin|>use truetype::{GlyphID, Result, Tape, Value};
/// A coverage table.
#[derive(Clone, Debug)]
pub enum Coverage {
/// Format 1.
Format1(Coverage1),
/// Format 2.
Format2(Coverage2),
}
table! {
#[doc = "A coverage table in format 1."]
pub Coverage1 { // CoverageFormat1
format (u16), // CoverageFormat
count (u16), // GlyphCount
glyph_ids (Vec<GlyphID>) |this, tape| { // GlyphArray
tape.take_given(this.count as usize)
},
}
}
table! {
#[doc = "A coverage table in format 2."]
pub Coverage2 { // CoverageFormat2
format (u16), // CoverageFormat
count (u16), // RangeCount
ranges (Vec<CoverageRange>) |this, tape| { // RangeRecord
tape.take_given(this.count as usize)
},
}
}
table! {
#[doc = "A coverage range."]
#[derive(Copy)]
pub CoverageRange { // RangeRecord<|fim▁hole|> end (GlyphID), // End
index (u16 ), // StartCoverageIndex
}
}
impl Default for Coverage {
#[inline]
fn default() -> Self {
Coverage::Format1(Coverage1::default())
}
}
impl Value for Coverage {
fn read<T: Tape>(tape: &mut T) -> Result<Self> {
Ok(match tape.peek::<u16>()? {
1 => Coverage::Format1(tape.take()?),
2 => Coverage::Format2(tape.take()?),
_ => raise!("found an unknown format of the coverage table"),
})
}
}<|fim▁end|> | start (GlyphID), // Start |
<|file_name|>alnInfoHolder.hpp<|end_file_name|><|fim▁begin|>#pragma once
//
// njhseq - A library for analyzing sequence data
// Copyright (C) 2012-2018 Nicholas Hathaway <[email protected]>,
//
// This file is part of njhseq.
//
// njhseq is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// njhseq is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with njhseq. If not, see <http://www.gnu.org/licenses/>.
//
//
// alnInfoHolder.hpp
//
// Created by Nicholas Hathaway on 1/13/14.
//
#include "njhseq/alignment/alnCache/alnInfoHolderBase.hpp"
#if __APPLE__ == 1 && __cpp_lib_shared_timed_mutex < 201402L && __ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ <= 101106
#include <sharedMutex.h>
#else
#include <shared_mutex>
#endif
namespace njhseq {
class alnInfoMasterHolder {
public:
// constructors<|fim▁hole|> const substituteMatrix & scoringArray, bool verbose = false);
// members
std::unordered_map<std::string, alnInfoHolderBase<alnInfoLocal>> localHolder_;
std::unordered_map<std::string, alnInfoHolderBase<alnInfoGlobal>> globalHolder_;
std::hash<std::string> strH_;
void clearHolders();
void addHolder(const gapScoringParameters & gapPars,
const substituteMatrix & scoringArray);
// reading
void read(const std::string &masterDirName, bool verbose = false);
// writing
void write(const std::string &masterDirName, bool verbose = false);
void mergeOtherHolder(const alnInfoMasterHolder & otherHolder);
};
namespace alignment {
static std::mutex alnCacheDirSearchLock;
static std::unordered_map<std::string, std::unique_ptr<std::shared_timed_mutex>> alnCacheDirLocks;
} // namespace alignment
} // namespace njhseq<|fim▁end|> | alnInfoMasterHolder();
alnInfoMasterHolder(const gapScoringParameters & gapPars,
const substituteMatrix & scoringArray);
alnInfoMasterHolder(const std::string &masterDirName, const gapScoringParameters & gapPars, |
<|file_name|>InventaryType.java<|end_file_name|><|fim▁begin|>/**
* TNTConcept Easy Enterprise Management by Autentia Real Bussiness Solution S.L.
* Copyright (C) 2007 Autentia Real Bussiness Solution S.L.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.autentia.tnt.businessobject;
public enum InventaryType {
PC, LAPTOP, BOOK, KEY, CARD, CAR
}<|fim▁end|> | *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>87. Prime power triples.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Fri Aug 29 15:52:33 2014
@author: raffaelerainone
"""
from time import clock
from math import sqrt
def is_prime(n):
check=True
i=2
while check and i<=sqrt(n):
if n%i==0:
check=False
i+=1
return check
start = clock()
lim=50*(10**6)
A=[]
prime_2 = [i for i in range(2,int(lim**(0.5))) if is_prime(i)]
prime_3 = [i for i in prime_2 if i<(int(lim**(0.34)))]
prime_4 = [i for i in prime_3 if i<(int(lim**(0.25)))]
for i in prime_2:
for j in prime_3:
for k in prime_4:<|fim▁hole|> A.append(x)
print len(set(A))
print clock() - start<|fim▁end|> | x=(i**2)+(j**3)+(k**4)
if x<lim: |
<|file_name|>test_dirname.rs<|end_file_name|><|fim▁begin|>use common::util::*;
#[test]
fn test_path_with_trailing_slashes() {
new_ucmd!().arg("/root/alpha/beta/gamma/delta/epsilon/omega//")
.run().stdout_is("/root/alpha/beta/gamma/delta/epsilon");
}
#[test]
fn test_path_without_trailing_slashes() {
new_ucmd!().arg("/root/alpha/beta/gamma/delta/epsilon/omega")
.run().stdout_is("/root/alpha/beta/gamma/delta/epsilon");<|fim▁hole|>}
#[test]
fn test_root() {
new_ucmd!().arg("/").run().stdout_is("/");
}
#[test]
fn test_pwd() {
new_ucmd!().arg(".").run().stdout_is(".");
}
#[test]
fn test_empty() {
new_ucmd!().arg("").run().stdout_is(".");
}<|fim▁end|> | |
<|file_name|>E0008.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
match Some("hi".to_string()) {
Some(s) if s.len() == 0 => {},
//~^ ERROR E0008
_ => {},
}
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Interface to random number generators in Rust.
//!
//! This is an experimental library which lives underneath the standard library
//! in its dependency chain. This library is intended to define the interface
//! for random number generation and also provide utilities around doing so. It
//! is not recommended to use this library directly, but rather the official
//! interface through `std::rand`.
#![crate_name = "rand"]
#![license = "MIT/ASL2"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/master/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(macro_rules, phase, globs)]
#![no_std]
#![experimental]
#[phase(plugin, link)]
extern crate core;
#[cfg(test)] #[phase(plugin, link)] extern crate std;
#[cfg(test)] #[phase(plugin, link)] extern crate log;
#[cfg(test)] extern crate native;
#[cfg(test)] extern crate debug;
use core::prelude::*;
pub use isaac::{IsaacRng, Isaac64Rng};
use distributions::{Range, IndependentSample};
use distributions::range::SampleRange;
#[cfg(test)]
static RAND_BENCH_N: u64 = 100;
pub mod distributions;
pub mod isaac;
pub mod reseeding;
mod rand_impls;
/// A type that can be randomly generated using an `Rng`.
pub trait Rand {
/// Generates a random instance of this type using the specified source of
/// randomness.
fn rand<R: Rng>(rng: &mut R) -> Self;
}
/// A random number generator.
pub trait Rng {
/// Return the next random u32.
///
/// This rarely needs to be called directly, prefer `r.gen()` to
/// `r.next_u32()`.
// FIXME #7771: Should be implemented in terms of next_u64
fn next_u32(&mut self) -> u32;
/// Return the next random u64.
///
/// By default this is implemented in terms of `next_u32`. An
/// implementation of this trait must provide at least one of
/// these two methods. Similarly to `next_u32`, this rarely needs
/// to be called directly, prefer `r.gen()` to `r.next_u64()`.
fn next_u64(&mut self) -> u64 {
(self.next_u32() as u64 << 32) | (self.next_u32() as u64)
}
/// Fill `dest` with random data.
///
/// This has a default implementation in terms of `next_u64` and
/// `next_u32`, but should be overridden by implementations that
/// offer a more efficient solution than just calling those
/// methods repeatedly.
///
/// This method does *not* have a requirement to bear any fixed
/// relationship to the other methods, for example, it does *not*
/// have to result in the same output as progressively filling
/// `dest` with `self.gen::<u8>()`, and any such behaviour should
/// not be relied upon.
///
/// This method should guarantee that `dest` is entirely filled
/// with new data, and may fail if this is impossible
/// (e.g. reading past the end of a file that is being used as the
/// source of randomness).
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut v = [0u8, .. 13579];
/// task_rng().fill_bytes(v);
/// println!("{}", v.as_slice());
/// ```
fn fill_bytes(&mut self, dest: &mut [u8]) {
// this could, in theory, be done by transmuting dest to a
// [u64], but this is (1) likely to be undefined behaviour for
// LLVM, (2) has to be very careful about alignment concerns,
// (3) adds more `unsafe` that needs to be checked, (4)
// probably doesn't give much performance gain if
// optimisations are on.
let mut count = 0i;
let mut num = 0;
for byte in dest.mut_iter() {
if count == 0 {
// we could micro-optimise here by generating a u32 if
// we only need a few more bytes to fill the vector
// (i.e. at most 4).
num = self.next_u64();
count = 8;
}
*byte = (num & 0xff) as u8;
num >>= 8;
count -= 1;
}
}
/// Return a random value of a `Rand` type.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let x: uint = rng.gen();
/// println!("{}", x);
/// println!("{}", rng.gen::<(f64, bool)>());
/// ```
#[inline(always)]
fn gen<T: Rand>(&mut self) -> T {
Rand::rand(self)
}
/// Return an iterator which will yield an infinite number of randomly
/// generated items.
///
/// # Example
///
/// ```
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let x = rng.gen_iter::<uint>().take(10).collect::<Vec<uint>>();
/// println!("{}", x);
/// println!("{}", rng.gen_iter::<(f64, bool)>().take(5)
/// .collect::<Vec<(f64, bool)>>());
/// ```
fn gen_iter<'a, T: Rand>(&'a mut self) -> Generator<'a, T, Self> {
Generator { rng: self }
}
/// Generate a random value in the range [`low`, `high`). Fails if
/// `low >= high`.
///
/// This is a convenience wrapper around
/// `distributions::Range`. If this function will be called
/// repeatedly with the same arguments, one should use `Range`, as
/// that will amortize the computations that allow for perfect
/// uniformity, as they only happen on initialization.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let n: uint = rng.gen_range(0u, 10);
/// println!("{}", n);
/// let m: f64 = rng.gen_range(-40.0f64, 1.3e5f64);
/// println!("{}", m);
/// ```
fn gen_range<T: PartialOrd + SampleRange>(&mut self, low: T, high: T) -> T {
assert!(low < high, "Rng.gen_range called with low >= high");
Range::new(low, high).ind_sample(self)
}
/// Return a bool with a 1 in n chance of true
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// println!("{:b}", rng.gen_weighted_bool(3));
/// ```
fn gen_weighted_bool(&mut self, n: uint) -> bool {
n == 0 || self.gen_range(0, n) == 0
}
/// Return an iterator of random characters from the set A-Z,a-z,0-9.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let s: String = task_rng().gen_ascii_chars().take(10).collect();
/// println!("{}", s);
/// ```
fn gen_ascii_chars<'a>(&'a mut self) -> AsciiGenerator<'a, Self> {
AsciiGenerator { rng: self }
}
/// Return a random element from `values`.
///
/// Return `None` if `values` is empty.
///
/// # Example
///
/// ```
/// use std::rand::{task_rng, Rng};
///
/// let choices = [1i, 2, 4, 8, 16, 32];
/// let mut rng = task_rng();
/// println!("{}", rng.choose(choices));
/// assert_eq!(rng.choose(choices.slice_to(0)), None);
/// ```
fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> {
if values.is_empty() {
None
} else {
Some(&values[self.gen_range(0u, values.len())])
}
}
/// Deprecated name for `choose()`.
#[deprecated = "replaced by .choose()"]
fn choose_option<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> {
self.choose(values)
}
/// Shuffle a mutable slice in place.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let mut y = [1i, 2, 3];
/// rng.shuffle(y);
/// println!("{}", y.as_slice());
/// rng.shuffle(y);
/// println!("{}", y.as_slice());
/// ```<|fim▁hole|> fn shuffle<T>(&mut self, values: &mut [T]) {
let mut i = values.len();
while i >= 2u {
// invariant: elements with index >= i have been locked in place.
i -= 1u;
// lock element i in place.
values.swap(i, self.gen_range(0u, i + 1u));
}
}
}
/// Iterator which will generate a stream of random items.
///
/// This iterator is created via the `gen_iter` method on `Rng`.
pub struct Generator<'a, T, R:'a> {
rng: &'a mut R,
}
impl<'a, T: Rand, R: Rng> Iterator<T> for Generator<'a, T, R> {
fn next(&mut self) -> Option<T> {
Some(self.rng.gen())
}
}
/// Iterator which will continuously generate random ascii characters.
///
/// This iterator is created via the `gen_ascii_chars` method on `Rng`.
pub struct AsciiGenerator<'a, R:'a> {
rng: &'a mut R,
}
impl<'a, R: Rng> Iterator<char> for AsciiGenerator<'a, R> {
fn next(&mut self) -> Option<char> {
static GEN_ASCII_STR_CHARSET: &'static [u8] =
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
abcdefghijklmnopqrstuvwxyz\
0123456789";
Some(*self.rng.choose(GEN_ASCII_STR_CHARSET).unwrap() as char)
}
}
/// A random number generator that can be explicitly seeded to produce
/// the same stream of randomness multiple times.
pub trait SeedableRng<Seed>: Rng {
/// Reseed an RNG with the given seed.
///
/// # Example
///
/// ```rust
/// use std::rand::{Rng, SeedableRng, StdRng};
///
/// let seed: &[_] = &[1, 2, 3, 4];
/// let mut rng: StdRng = SeedableRng::from_seed(seed);
/// println!("{}", rng.gen::<f64>());
/// rng.reseed([5, 6, 7, 8]);
/// println!("{}", rng.gen::<f64>());
/// ```
fn reseed(&mut self, Seed);
/// Create a new RNG with the given seed.
///
/// # Example
///
/// ```rust
/// use std::rand::{Rng, SeedableRng, StdRng};
///
/// let seed: &[_] = &[1, 2, 3, 4];
/// let mut rng: StdRng = SeedableRng::from_seed(seed);
/// println!("{}", rng.gen::<f64>());
/// ```
fn from_seed(seed: Seed) -> Self;
}
/// An Xorshift[1] random number
/// generator.
///
/// The Xorshift algorithm is not suitable for cryptographic purposes
/// but is very fast. If you do not know for sure that it fits your
/// requirements, use a more secure one such as `IsaacRng` or `OsRng`.
///
/// [1]: Marsaglia, George (July 2003). ["Xorshift
/// RNGs"](http://www.jstatsoft.org/v08/i14/paper). *Journal of
/// Statistical Software*. Vol. 8 (Issue 14).
pub struct XorShiftRng {
x: u32,
y: u32,
z: u32,
w: u32,
}
impl XorShiftRng {
/// Creates a new XorShiftRng instance which is not seeded.
///
/// The initial values of this RNG are constants, so all generators created
/// by this function will yield the same stream of random numbers. It is
/// highly recommended that this is created through `SeedableRng` instead of
/// this function
pub fn new_unseeded() -> XorShiftRng {
XorShiftRng {
x: 0x193a6754,
y: 0xa8a7d469,
z: 0x97830e05,
w: 0x113ba7bb,
}
}
}
impl Rng for XorShiftRng {
#[inline]
fn next_u32(&mut self) -> u32 {
let x = self.x;
let t = x ^ (x << 11);
self.x = self.y;
self.y = self.z;
self.z = self.w;
let w = self.w;
self.w = w ^ (w >> 19) ^ (t ^ (t >> 8));
self.w
}
}
impl SeedableRng<[u32, .. 4]> for XorShiftRng {
/// Reseed an XorShiftRng. This will fail if `seed` is entirely 0.
fn reseed(&mut self, seed: [u32, .. 4]) {
assert!(!seed.iter().all(|&x| x == 0),
"XorShiftRng.reseed called with an all zero seed.");
self.x = seed[0];
self.y = seed[1];
self.z = seed[2];
self.w = seed[3];
}
/// Create a new XorShiftRng. This will fail if `seed` is entirely 0.
fn from_seed(seed: [u32, .. 4]) -> XorShiftRng {
assert!(!seed.iter().all(|&x| x == 0),
"XorShiftRng::from_seed called with an all zero seed.");
XorShiftRng {
x: seed[0],
y: seed[1],
z: seed[2],
w: seed[3]
}
}
}
impl Rand for XorShiftRng {
fn rand<R: Rng>(rng: &mut R) -> XorShiftRng {
let mut tuple: (u32, u32, u32, u32) = rng.gen();
while tuple == (0, 0, 0, 0) {
tuple = rng.gen();
}
let (x, y, z, w) = tuple;
XorShiftRng { x: x, y: y, z: z, w: w }
}
}
/// A wrapper for generating floating point numbers uniformly in the
/// open interval `(0,1)` (not including either endpoint).
///
/// Use `Closed01` for the closed interval `[0,1]`, and the default
/// `Rand` implementation for `f32` and `f64` for the half-open
/// `[0,1)`.
///
/// # Example
/// ```rust
/// use std::rand::{random, Open01};
///
/// let Open01(val) = random::<Open01<f32>>();
/// println!("f32 from (0,1): {}", val);
/// ```
pub struct Open01<F>(pub F);
/// A wrapper for generating floating point numbers uniformly in the
/// closed interval `[0,1]` (including both endpoints).
///
/// Use `Open01` for the closed interval `(0,1)`, and the default
/// `Rand` implementation of `f32` and `f64` for the half-open
/// `[0,1)`.
///
/// # Example
///
/// ```rust
/// use std::rand::{random, Closed01};
///
/// let Closed01(val) = random::<Closed01<f32>>();
/// println!("f32 from [0,1]: {}", val);
/// ```
pub struct Closed01<F>(pub F);
#[cfg(not(test))]
mod std {
pub use core::{option, fmt}; // fail!()
}
#[cfg(test)]
mod test {
use std::rand;
pub struct MyRng<R> { inner: R }
impl<R: rand::Rng> ::Rng for MyRng<R> {
fn next_u32(&mut self) -> u32 {
fn next<T: rand::Rng>(t: &mut T) -> u32 {
use std::rand::Rng;
t.next_u32()
}
next(&mut self.inner)
}
}
pub fn rng() -> MyRng<rand::TaskRng> {
MyRng { inner: rand::task_rng() }
}
pub fn weak_rng() -> MyRng<rand::XorShiftRng> {
MyRng { inner: rand::weak_rng() }
}
}<|fim▁end|> | |
<|file_name|>NhinDocQueryWebServiceProxyTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.docquery.nhin.proxy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.any;
import gov.hhs.fha.nhinc.aspect.NwhinInvocationEvent;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.HomeCommunityType;
import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType;
import gov.hhs.fha.nhinc.connectmgr.ConnectionManager;
import gov.hhs.fha.nhinc.connectmgr.ConnectionManagerCache;
import gov.hhs.fha.nhinc.docquery.aspect.AdhocQueryRequestDescriptionBuilder;
import gov.hhs.fha.nhinc.docquery.aspect.AdhocQueryResponseDescriptionBuilder;
import gov.hhs.fha.nhinc.messaging.client.CONNECTClient;
import gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants.UDDI_SPEC_VERSION;
import ihe.iti.xds_b._2007.RespondingGatewayQueryPortType;
import java.lang.reflect.Method;
import javax.xml.ws.Service;
import oasis.names.tc.ebxml_regrep.xsd.query._3.AdhocQueryRequest;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JMock;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
*
* @author Neil Webb
*/
@RunWith(JMock.class)
public class NhinDocQueryWebServiceProxyTest {
Mockery context = new JUnit4Mockery() {
{
setImposteriser(ClassImposteriser.INSTANCE);
}
};
final Service mockService = context.mock(Service.class);
final RespondingGatewayQueryPortType mockPort = context.mock(RespondingGatewayQueryPortType.class);
@SuppressWarnings("unchecked")
private CONNECTClient<RespondingGatewayQueryPortType> client = mock(CONNECTClient.class);
private ConnectionManagerCache cache = mock(ConnectionManagerCache.class);
private AdhocQueryRequest request;
private AssertionType assertion;
@Test
public void hasBeginOutboundProcessingEvent() throws Exception {
Class<NhinDocQueryProxyWebServiceSecuredImpl> clazz = NhinDocQueryProxyWebServiceSecuredImpl.class;
Method method = clazz.getMethod("respondingGatewayCrossGatewayQuery", AdhocQueryRequest.class,
AssertionType.class, NhinTargetSystemType.class);
NwhinInvocationEvent annotation = method.getAnnotation(NwhinInvocationEvent.class);
assertNotNull(annotation);
assertEquals(AdhocQueryRequestDescriptionBuilder.class, annotation.beforeBuilder());
assertEquals(AdhocQueryResponseDescriptionBuilder.class, annotation.afterReturningBuilder());
assertEquals("Document Query", annotation.serviceType());
assertEquals("", annotation.version());
}
@Test
public void testNoMtom() throws Exception {
NhinDocQueryProxyWebServiceSecuredImpl impl = getImpl();
NhinTargetSystemType target = getTarget("1.1");
impl.respondingGatewayCrossGatewayQuery(request, assertion, target);
verify(client, never()).enableMtom();
}
@Test
public void testUsingGuidance() throws Exception {
NhinDocQueryProxyWebServiceSecuredImpl impl = getImpl();
NhinTargetSystemType target = getTarget("1.1");
impl.respondingGatewayCrossGatewayQuery(request, assertion, target);
verify(cache).getEndpointURLByServiceNameSpecVersion(any(String.class), any(String.class), any(UDDI_SPEC_VERSION.class));
}
/**
* @param hcidValue
* @return
*/
private NhinTargetSystemType getTarget(String hcidValue) {
NhinTargetSystemType target = new NhinTargetSystemType();
HomeCommunityType hcid = new HomeCommunityType();
hcid.setHomeCommunityId(hcidValue);
target.setHomeCommunity(hcid);
target.setUseSpecVersion("2.0");
return target;
}
/**
* @return
*/
private NhinDocQueryProxyWebServiceSecuredImpl getImpl() {
return new NhinDocQueryProxyWebServiceSecuredImpl() {
/*
* (non-Javadoc)
*
* @see
* gov.hhs.fha.nhinc.docquery.nhin.proxy.NhinDocQueryProxyWebServiceSecuredImpl#getCONNECTClientSecured(
* gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor,
* gov.hhs.fha.nhinc.common.nhinccommon.AssertionType, java.lang.String,<|fim▁hole|> ServicePortDescriptor<RespondingGatewayQueryPortType> portDescriptor, AssertionType assertion,
String url, NhinTargetSystemType target) {
return client;
}
/* (non-Javadoc)
* @see gov.hhs.fha.nhinc.docquery.nhin.proxy.NhinDocQueryProxyWebServiceSecuredImpl#getCMInstance()
*/
@Override
protected ConnectionManager getCMInstance() {
return cache;
}
};
}
}<|fim▁end|> | * gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType)
*/
@Override
public CONNECTClient<RespondingGatewayQueryPortType> getCONNECTClientSecured( |
<|file_name|>advanced_activations.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Layers that act as activation functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras import activations
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras import constraints
from tensorflow.python.keras._impl.keras import initializers
from tensorflow.python.keras._impl.keras import regularizers
from tensorflow.python.keras._impl.keras.engine import InputSpec
from tensorflow.python.keras._impl.keras.engine import Layer
from tensorflow.python.keras._impl.keras.engine.base_layer import shape_type_conversion
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.layers.LeakyReLU')
class LeakyReLU(Layer):
"""Leaky version of a Rectified Linear Unit.
It allows a small gradient when the unit is not active:
`f(x) = alpha * x for x < 0`,
`f(x) = x for x >= 0`.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Arguments:
alpha: float >= 0. Negative slope coefficient.
"""
def __init__(self, alpha=0.3, **kwargs):
super(LeakyReLU, self).__init__(**kwargs)
self.supports_masking = True
self.alpha = K.cast_to_floatx(alpha)
def call(self, inputs):
return K.relu(inputs, alpha=self.alpha)
def get_config(self):
config = {'alpha': float(self.alpha)}
base_config = super(LeakyReLU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape
@tf_export('keras.layers.PReLU')
class PReLU(Layer):
"""Parametric Rectified Linear Unit.
It follows:
`f(x) = alpha * x for x < 0`,
`f(x) = x for x >= 0`,
where `alpha` is a learned array with the same shape as x.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Arguments:
alpha_initializer: initializer function for the weights.
alpha_regularizer: regularizer for the weights.
alpha_constraint: constraint for the weights.
shared_axes: the axes along which to share learnable
parameters for the activation function.
For example, if the incoming feature maps
are from a 2D convolution
with output shape `(batch, height, width, channels)`,
and you wish to share parameters across space
so that each filter only has one set of parameters,
set `shared_axes=[1, 2]`.
"""
def __init__(self,
alpha_initializer='zeros',
alpha_regularizer=None,
alpha_constraint=None,
shared_axes=None,
**kwargs):
super(PReLU, self).__init__(**kwargs)
self.supports_masking = True
self.alpha_initializer = initializers.get(alpha_initializer)
self.alpha_regularizer = regularizers.get(alpha_regularizer)
self.alpha_constraint = constraints.get(alpha_constraint)
if shared_axes is None:
self.shared_axes = None
elif not isinstance(shared_axes, (list, tuple)):
self.shared_axes = [shared_axes]
else:
self.shared_axes = list(shared_axes)
@shape_type_conversion
def build(self, input_shape):
param_shape = list(input_shape[1:])
self.param_broadcast = [False] * len(param_shape)
if self.shared_axes is not None:
for i in self.shared_axes:
param_shape[i - 1] = 1
self.param_broadcast[i - 1] = True
self.alpha = self.add_weight(
shape=param_shape,
name='alpha',
initializer=self.alpha_initializer,
regularizer=self.alpha_regularizer,
constraint=self.alpha_constraint)
# Set input spec
axes = {}
if self.shared_axes:
for i in range(1, len(input_shape)):
if i not in self.shared_axes:
axes[i] = input_shape[i]
self.input_spec = InputSpec(ndim=len(input_shape), axes=axes)
self.built = True
def call(self, inputs, mask=None):
pos = K.relu(inputs)
if K.backend() == 'theano':
neg = (
K.pattern_broadcast(self.alpha, self.param_broadcast) *
(inputs - K.abs(inputs)) * 0.5)
else:
neg = -self.alpha * K.relu(-inputs)
return pos + neg
def get_config(self):
config = {
'alpha_initializer': initializers.serialize(self.alpha_initializer),
'alpha_regularizer': regularizers.serialize(self.alpha_regularizer),
'alpha_constraint': constraints.serialize(self.alpha_constraint),
'shared_axes': self.shared_axes
}
base_config = super(PReLU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape
@tf_export('keras.layers.ELU')
class ELU(Layer):
"""Exponential Linear Unit.
It follows:
`f(x) = alpha * (exp(x) - 1.) for x < 0`,
`f(x) = x for x >= 0`.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Arguments:
alpha: scale for the negative factor.
"""
def __init__(self, alpha=1.0, **kwargs):
super(ELU, self).__init__(**kwargs)
self.supports_masking = True
self.alpha = K.cast_to_floatx(alpha)
def call(self, inputs):
return K.elu(inputs, self.alpha)
def get_config(self):
config = {'alpha': float(self.alpha)}
base_config = super(ELU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape
<|fim▁hole|>
@tf_export('keras.layers.ThresholdedReLU')
class ThresholdedReLU(Layer):
"""Thresholded Rectified Linear Unit.
It follows:
`f(x) = x for x > theta`,
`f(x) = 0 otherwise`.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Arguments:
theta: float >= 0. Threshold location of activation.
"""
def __init__(self, theta=1.0, **kwargs):
super(ThresholdedReLU, self).__init__(**kwargs)
self.supports_masking = True
self.theta = K.cast_to_floatx(theta)
def call(self, inputs, mask=None):
return inputs * K.cast(K.greater(inputs, self.theta), K.floatx())
def get_config(self):
config = {'theta': float(self.theta)}
base_config = super(ThresholdedReLU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape
@tf_export('keras.layers.Softmax')
class Softmax(Layer):
"""Softmax activation function.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Arguments:
axis: Integer, axis along which the softmax normalization is applied.
"""
def __init__(self, axis=-1, **kwargs):
super(Softmax, self).__init__(**kwargs)
self.supports_masking = True
self.axis = axis
def call(self, inputs):
return activations.softmax(inputs, axis=self.axis)
def get_config(self):
config = {'axis': self.axis}
base_config = super(Softmax, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape<|fim▁end|> | |
<|file_name|>transaction.rs<|end_file_name|><|fim▁begin|>// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use bytes::Bytes;
use libproto::blockchain::SignedTransaction as ProtoSignedTransaction;
use libproto::request::FullTransaction as PTransaction;
use protobuf::Message;
use util::H256;
use util::U256;
// TODO: No need Deserialize. Just because test in trans.rs
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct FullTransaction {
pub hash: H256,
pub content: Bytes,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct RpcTransaction {
pub hash: H256,
pub content: Bytes,
#[serde(rename = "blockNumber")]
pub block_number: U256,
#[serde(rename = "blockHash")]
pub block_hash: H256,
pub index: U256,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct TransactionHash {
pub hash: H256,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(untagged)]
pub enum BlockTransaction {<|fim▁hole|>
impl From<PTransaction> for RpcTransaction {
fn from(mut ptransaction: PTransaction) -> Self {
let stx = ptransaction.take_transaction();
let mut bhash: H256 = H256::default();
bhash.0.clone_from_slice(ptransaction.block_hash.as_slice());
RpcTransaction {
hash: H256::from_slice(stx.get_tx_hash()),
content: Bytes(stx.get_transaction_with_sig().get_transaction().write_to_bytes().unwrap()),
block_number: U256::from(ptransaction.block_number),
block_hash: bhash,
index: U256::from(ptransaction.index),
}
}
}
impl From<ProtoSignedTransaction> for FullTransaction {
fn from(stx: ProtoSignedTransaction) -> Self {
FullTransaction {
hash: H256::from_slice(stx.get_tx_hash()),
content: Bytes(stx.get_transaction_with_sig().get_transaction().write_to_bytes().unwrap()),
}
}
}
impl From<ProtoSignedTransaction> for TransactionHash {
fn from(stx: ProtoSignedTransaction) -> Self {
TransactionHash { hash: H256::from_slice(stx.get_tx_hash()) }
}
}<|fim▁end|> | Full(FullTransaction),
Hash(TransactionHash),
} |
<|file_name|>ProofTree.js<|end_file_name|><|fim▁begin|>// Structure to represent a proof
class ProofTree {
constructor({equation, rule, newScope=false }) {
this.equation = equation;
this.rule = rule;
this.newScope = newScope;
this.parent = null;
this.children = [];
this.isSound = !newScope;
}
isAssumption() {
return this.newScope;
}
isEmpty() {
return this.parent === null && this.children === [];
}
size() {
if (this.isEmpty()) return 0;
if (this.children.length)
return 1 + this.children.map(c=>c.size()).reduce((acc, c)=>acc+c);
return 1;
}
lastNumber() {
return this.size();
}
walk(fn) {
fn(this);
this.children.forEach(child => {
child.walk(fn);
});
}
last() {
if (this.children.length === 0) return this;
var last = this;
this.children.forEach(child => {
if (!child.isAssumption()) {
last = child.last();
}
});
return last;
}
setLines() {
var count = 1;
this.walk((child) => {
child.lineNumber = count;
count ++;
});
}
root() {
if (this.parent === null) return this;
return this.parent.root();
}
inScope(target) {
if (this.lineNumber === target) {
return true;
} else {
if (this.parent === null) return false;
var child = null;
var anySiblings = this.parent.children.some(child => {
return !child.isAssumption() && (child.lineNumber === target)
})
if (anySiblings) {
return true;
}
return this.parent.inScope(target);
}
}
// inScope(line1, line2, context=this.root()) {
//
// if (line1 === line2) return true;
// if (line1 > line2) return false;
// var line1Obj = context.line(line1);<|fim▁hole|> line(lineNumber) {
var line = null;
var count = 1;
this.walk(child => {
if (lineNumber === count) line = child;
count ++;
});
return line;
}
addLine(line) {
line.parent = this.last();
line.parent.children.push(line);
this.root().setLines();
}
closeBox() {
this.isSound = true;
}
addLineTo(line, lineNumber) {
// line.parent = this.line()
}
addLineNewScope({equation, rule}) {
var line = new ProofTree({
equation,
rule,
newScope: true
});
line.parent = this.last();
this.children.push(line);
line.root().setLines();
}
}
// Synonym as it reads better sometimes
ProofTree.prototype.scope = ProofTree.prototype.line;
export default ProofTree;<|fim▁end|> | // var line2Obj = context.line(line2);
// return this.inScope(line1Obj.lineNumber, line2Obj.parent.lineNumber, context);
// }
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.