prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>modify_vpc_attribute.go<|end_file_name|><|fim▁begin|>package vpc
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// ModifyVpcAttribute invokes the vpc.ModifyVpcAttribute API synchronously
// api document: https://help.aliyun.com/api/vpc/modifyvpcattribute.html
func (client *Client) ModifyVpcAttribute(request *ModifyVpcAttributeRequest) (response *ModifyVpcAttributeResponse, err error) {
response = CreateModifyVpcAttributeResponse()
err = client.DoAction(request, response)
return
}
// ModifyVpcAttributeWithChan invokes the vpc.ModifyVpcAttribute API asynchronously
// api document: https://help.aliyun.com/api/vpc/modifyvpcattribute.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) ModifyVpcAttributeWithChan(request *ModifyVpcAttributeRequest) (<-chan *ModifyVpcAttributeResponse, <-chan error) {
responseChan := make(chan *ModifyVpcAttributeResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.ModifyVpcAttribute(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// ModifyVpcAttributeWithCallback invokes the vpc.ModifyVpcAttribute API asynchronously
// api document: https://help.aliyun.com/api/vpc/modifyvpcattribute.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) ModifyVpcAttributeWithCallback(request *ModifyVpcAttributeRequest, callback func(response *ModifyVpcAttributeResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *ModifyVpcAttributeResponse
var err error
defer close(result)
response, err = client.ModifyVpcAttribute(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// ModifyVpcAttributeRequest is the request struct for api ModifyVpcAttribute
type ModifyVpcAttributeRequest struct {
*requests.RpcRequest
VpcName string `position:"Query" name:"VpcName"`
ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"`
ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"`
VpcId string `position:"Query" name:"VpcId"`
OwnerAccount string `position:"Query" name:"OwnerAccount"`
CidrBlock string `position:"Query" name:"CidrBlock"`
Description string `position:"Query" name:"Description"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
}
// ModifyVpcAttributeResponse is the response struct for api ModifyVpcAttribute
type ModifyVpcAttributeResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateModifyVpcAttributeRequest creates a request to invoke ModifyVpcAttribute API
func CreateModifyVpcAttributeRequest() (request *ModifyVpcAttributeRequest) {
request = &ModifyVpcAttributeRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Vpc", "2016-04-28", "ModifyVpcAttribute", "vpc", "openAPI")
return
}
<|fim▁hole|>func CreateModifyVpcAttributeResponse() (response *ModifyVpcAttributeResponse) {
response = &ModifyVpcAttributeResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}<|fim▁end|> | // CreateModifyVpcAttributeResponse creates a response to parse from ModifyVpcAttribute response |
<|file_name|>categories.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Link from 'gatsby-link';
import { BlogPostContent, BlogPostContainer, TagList } from '../utils/styles';
import { arrayReducer } from '../utils/helpers.js';
export default function TagsPage({
data<|fim▁hole|>
const categoryLinks = categoryArray.map((category, index) => {
return (
<li className="category-item" key={index}>
<Link className='category-list-link' to={`/categories/${category}`} key={index}>{category}</Link>
</li>
)
});
return (
<BlogPostContainer>
<BlogPostContent>
<h2>Categories</h2>
<TagList className='categories-list'>{categoryLinks}</TagList>
</BlogPostContent>
</BlogPostContainer>
);
}
export const query = graphql`
query CategoryPage {
allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }) {
totalCount
edges {
node {
frontmatter {
category
}
}
}
}
}
`;<|fim▁end|> | }) {
const { edges: posts } = data.allMarkdownRemark;
const categoryArray = arrayReducer(posts, 'category'); |
<|file_name|>advisories.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
import json
import os
import re
import datetime
class Advisories(object):
today = datetime.datetime.now().strftime("%Y-%m-%d")
def __init__(self, initial_advisories_path=None, format="txt"):
self.advisories = []
self.added_packages = {}
if initial_advisories_path is not None:
f = open(initial_advisories_path, "r")
if format == 'json':
s = f.read()
if s.startswith("advisories ="):
s = s.replace("advisories = ", "", 1)
s = s.rstrip(";\n")
self.advisories = json.loads(s)
else:
for line in f:
self.parse_line(line)
f.close()
def parse_line(self, line):
line = line.strip()
if line.startswith("#") or line == "":
return
d, p, v, f, desc = line.split(";", 4)
pkgs = p.split(",")
flags = f.split(" ")
desc = desc.replace("\"", "\\\"")
obj = {
"date": d,
"packages": pkgs,
"toolkit_version": v,
"flags": flags,
"description": desc,
}
self.advisories.append(obj)
def add_advisories(self, packages):
for p in packages:
if p.arch == 'src' and p.name not in self.added_packages and \
".src.rpm" in p.path:
pfd = os.popen('rpm -q -p "%s" --changelog' % p.path)
pfd.readline() # Date line
changelog = ""
for l in pfd:
if l.startswith("*"):
break
else:
if l.startswith("- "):
l = l.replace("- ", "", 1)
changelog += l
pfd.close()
changelog = changelog.strip().replace("\n", "<br />")
pfd = os.popen('rpm -q -p "%s" -l' % p.path)
files = []
for l in pfd:
if ".tar.gz" in l:<|fim▁hole|> l = matches.group(1).replace("-", "_") + \
matches.group(2)
files.append(l.replace(".tar.gz", "").strip())
pfd.close()
if len(files) > 0:
obj = {
"date": Advisories.today,
"packages": files,
"toolkit_version": "6.0",
"flags": ["bug"],
"description": changelog
}
self.advisories.append(obj)
self.added_packages[p.name] = obj
def to_json(self):
return json.dumps(self.advisories)
def new_to_text(self):
s = ""
for k in self.added_packages:
a = self.added_packages[k]
date = a['date']
pkgs = " ".join(a['packages'])
toolkit_version = a['toolkit_version']
flags = " ".join(a['flags'])
desc = a['description'].replace("\\\"", "\"")
s += "%s;%s;%s;%s;%s\n" % \
(date, pkgs, toolkit_version, flags, desc)
return s
def to_text(self):
s = ""
for a in self.advisories:
date = a['date']
pkgs = " ".join(a['packages'])
toolkit_version = a['toolkit_version']
flags = " ".join(a['flags'])
desc = a['description'].replace("\\\"", "\"")
s += "%s;%s;%s;%s;%s\n" % \
(date, pkgs, toolkit_version, flags, desc)
return s<|fim▁end|> | l = l.replace(".tar.gz", "").strip()
matches = re.match(l, r"([a-z-]+)(-[0-9.]+)")
if matches is not None: |
<|file_name|>queue_redis.go<|end_file_name|><|fim▁begin|>// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package queue
import (
"errors"
"strings"
"code.gitea.io/gitea/modules/log"
"github.com/go-redis/redis"
)
// RedisQueueType is the type for redis queue
const RedisQueueType Type = "redis"
// RedisQueueConfiguration is the configuration for the redis queue
type RedisQueueConfiguration struct {
ByteFIFOQueueConfiguration
RedisByteFIFOConfiguration
}
// RedisQueue redis queue
type RedisQueue struct {
*ByteFIFOQueue
}
// NewRedisQueue creates single redis or cluster redis queue
func NewRedisQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, error) {
configInterface, err := toConfig(RedisQueueConfiguration{}, cfg)
if err != nil {
return nil, err
}
config := configInterface.(RedisQueueConfiguration)
byteFIFO, err := NewRedisByteFIFO(config.RedisByteFIFOConfiguration)
if err != nil {
return nil, err
}
byteFIFOQueue, err := NewByteFIFOQueue(RedisQueueType, byteFIFO, handle, config.ByteFIFOQueueConfiguration, exemplar)
if err != nil {<|fim▁hole|> ByteFIFOQueue: byteFIFOQueue,
}
queue.qid = GetManager().Add(queue, RedisQueueType, config, exemplar)
return queue, nil
}
type redisClient interface {
RPush(key string, args ...interface{}) *redis.IntCmd
LPop(key string) *redis.StringCmd
LLen(key string) *redis.IntCmd
SAdd(key string, members ...interface{}) *redis.IntCmd
SRem(key string, members ...interface{}) *redis.IntCmd
SIsMember(key string, member interface{}) *redis.BoolCmd
Ping() *redis.StatusCmd
Close() error
}
var _ (ByteFIFO) = &RedisByteFIFO{}
// RedisByteFIFO represents a ByteFIFO formed from a redisClient
type RedisByteFIFO struct {
client redisClient
queueName string
}
// RedisByteFIFOConfiguration is the configuration for the RedisByteFIFO
type RedisByteFIFOConfiguration struct {
Network string
Addresses string
Password string
DBIndex int
QueueName string
}
// NewRedisByteFIFO creates a ByteFIFO formed from a redisClient
func NewRedisByteFIFO(config RedisByteFIFOConfiguration) (*RedisByteFIFO, error) {
fifo := &RedisByteFIFO{
queueName: config.QueueName,
}
dbs := strings.Split(config.Addresses, ",")
if len(dbs) == 0 {
return nil, errors.New("no redis host specified")
} else if len(dbs) == 1 {
fifo.client = redis.NewClient(&redis.Options{
Network: config.Network,
Addr: strings.TrimSpace(dbs[0]), // use default Addr
Password: config.Password, // no password set
DB: config.DBIndex, // use default DB
})
} else {
fifo.client = redis.NewClusterClient(&redis.ClusterOptions{
Addrs: dbs,
})
}
if err := fifo.client.Ping().Err(); err != nil {
return nil, err
}
return fifo, nil
}
// PushFunc pushes data to the end of the fifo and calls the callback if it is added
func (fifo *RedisByteFIFO) PushFunc(data []byte, fn func() error) error {
if fn != nil {
if err := fn(); err != nil {
return err
}
}
return fifo.client.RPush(fifo.queueName, data).Err()
}
// Pop pops data from the start of the fifo
func (fifo *RedisByteFIFO) Pop() ([]byte, error) {
data, err := fifo.client.LPop(fifo.queueName).Bytes()
if err == nil || err == redis.Nil {
return data, nil
}
return data, err
}
// Close this fifo
func (fifo *RedisByteFIFO) Close() error {
return fifo.client.Close()
}
// Len returns the length of the fifo
func (fifo *RedisByteFIFO) Len() int64 {
val, err := fifo.client.LLen(fifo.queueName).Result()
if err != nil {
log.Error("Error whilst getting length of redis queue %s: Error: %v", fifo.queueName, err)
return -1
}
return val
}
func init() {
queuesMap[RedisQueueType] = NewRedisQueue
}<|fim▁end|> | return nil, err
}
queue := &RedisQueue{ |
<|file_name|>take.js<|end_file_name|><|fim▁begin|>'use strict';
//
// Data-forge enumerator for iterating a standard JavaScript array.
//
var TakeIterator = function (iterator, takeAmount) {
var self = this;
self._iterator = iterator;
self._takeAmount = takeAmount;
};
module.exports = TakeIterator;<|fim▁hole|> var self = this;
if (--self._takeAmount >= 0) {
return self._iterator.moveNext();
}
return false;
};
TakeIterator.prototype.getCurrent = function () {
var self = this;
return self._iterator.getCurrent();
};<|fim▁end|> |
TakeIterator.prototype.moveNext = function () {
|
<|file_name|>password.module.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular
.module('password', [<|fim▁hole|> /*@@DIST-TEMPLATE-CACHE*/
'ngRoute',
'password.analytics',
'password.title',
'password.nav',
'password.welcome',
'password.forgot',
'password.recovery',
'password.change',
'password.profile',
'password.mfa',
'password.reset',
'password.help',
'password.logo'
]);
})();<|fim▁end|> | 'ngMaterial', |
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Standard library macros
//!
//! This modules contains a set of macros which are exported from the standard
//! library. Each macro is available for use when linking against the standard
//! library.
#![experimental]
#![macro_escape]
/// The entry point for panic of Rust tasks.
///
/// This macro is used to inject panic into a Rust task, causing the task to
/// unwind and panic entirely. Each task's panic can be reaped as the
/// `Box<Any>` type, and the single-argument form of the `panic!` macro will be
/// the value which is transmitted.
///
/// The multi-argument form of this macro panics with a string and has the
/// `format!` syntax for building a string.
///
/// # Example
///
/// ```should_fail
/// # #![allow(unreachable_code)]
/// panic!();
/// panic!("this is a terrible mistake!");
/// panic!(4i); // panic with the value of 4 to be collected elsewhere
/// panic!("this is a {} {message}", "fancy", message = "message");
/// ```
#[macro_export]
macro_rules! panic(
() => ({
panic!("explicit panic")
});
($msg:expr) => ({
// static requires less code at runtime, more constant data
static _FILE_LINE: (&'static str, uint) = (file!(), line!());
::std::rt::begin_unwind($msg, &_FILE_LINE)
});
($fmt:expr, $($arg:tt)*) => ({
// a closure can't have return type !, so we need a full
// function to pass to format_args!, *and* we need the
// file and line numbers right here; so an inner bare fn
// is our only choice.
//
// LLVM doesn't tend to inline this, presumably because begin_unwind_fmt
// is #[cold] and #[inline(never)] and because this is flagged as cold
// as returning !. We really do want this to be inlined, however,
// because it's just a tiny wrapper. Small wins (156K to 149K in size)
// were seen when forcing this to be inlined, and that number just goes
// up with the number of calls to panic!()
//
// The leading _'s are to avoid dead code warnings if this is
// used inside a dead function. Just `#[allow(dead_code)]` is
// insufficient, since the user may have
// `#[forbid(dead_code)]` and which cannot be overridden.
#[inline(always)]
fn _run_fmt(fmt: &::std::fmt::Arguments) -> ! {
static _FILE_LINE: (&'static str, uint) = (file!(), line!());
::std::rt::begin_unwind_fmt(fmt, &_FILE_LINE)
}
format_args!(_run_fmt, $fmt, $($arg)*)
});
)
/// Ensure that a boolean expression is `true` at runtime.
///
/// This will invoke the `panic!` macro if the provided expression cannot be
/// evaluated to `true` at runtime.
///
/// # Example
///
/// ```
/// // the panic message for these assertions is the stringified value of the
/// // expression given.
/// assert!(true);
/// # fn some_computation() -> bool { true }
/// assert!(some_computation());
///
/// // assert with a custom message
/// # let x = true;
/// assert!(x, "x wasn't true!");
/// # let a = 3i; let b = 27i;
/// assert!(a + b == 30, "a = {}, b = {}", a, b);
/// ```
#[macro_export]
macro_rules! assert(
($cond:expr) => (
if !$cond {
panic!(concat!("assertion failed: ", stringify!($cond)))
}
);
($cond:expr, $($arg:expr),+) => (
if !$cond {
panic!($($arg),+)
}
);
)
/// Asserts that two expressions are equal to each other, testing equality in
/// both directions.
///
/// On panic, this macro will print the values of the expressions.
///
/// # Example
///
/// ```
/// let a = 3i;
/// let b = 1i + 2i;
/// assert_eq!(a, b);
/// ```
#[macro_export]
macro_rules! assert_eq(
($given:expr , $expected:expr) => ({
match (&($given), &($expected)) {
(given_val, expected_val) => {
// check both directions of equality....
if !((*given_val == *expected_val) &&
(*expected_val == *given_val)) {
panic!("assertion failed: `(left == right) && (right == left)` \
(left: `{}`, right: `{}`)", *given_val, *expected_val)
}
}
}
})
)
/// Ensure that a boolean expression is `true` at runtime.
///
/// This will invoke the `panic!` macro if the provided expression cannot be
/// evaluated to `true` at runtime.
///
/// Unlike `assert!`, `debug_assert!` statements can be disabled by passing
/// `--cfg ndebug` to the compiler. This makes `debug_assert!` useful for
/// checks that are too expensive to be present in a release build but may be
/// helpful during development.
///
/// # Example
///
/// ```
/// // the panic message for these assertions is the stringified value of the
/// // expression given.
/// debug_assert!(true);
/// # fn some_expensive_computation() -> bool { true }
/// debug_assert!(some_expensive_computation());
///
/// // assert with a custom message
/// # let x = true;
/// debug_assert!(x, "x wasn't true!");
/// # let a = 3i; let b = 27i;
/// debug_assert!(a + b == 30, "a = {}, b = {}", a, b);
/// ```
#[macro_export]
macro_rules! debug_assert(
($($arg:tt)*) => (if cfg!(not(ndebug)) { assert!($($arg)*); })
)
/// Asserts that two expressions are equal to each other, testing equality in
/// both directions.
///
/// On panic, this macro will print the values of the expressions.
///
/// Unlike `assert_eq!`, `debug_assert_eq!` statements can be disabled by
/// passing `--cfg ndebug` to the compiler. This makes `debug_assert_eq!`
/// useful for checks that are too expensive to be present in a release build
/// but may be helpful during development.
///
/// # Example
///
/// ```
/// let a = 3i;
/// let b = 1i + 2i;
/// debug_assert_eq!(a, b);
/// ```
#[macro_export]
macro_rules! debug_assert_eq(
($($arg:tt)*) => (if cfg!(not(ndebug)) { assert_eq!($($arg)*); })
)
/// A utility macro for indicating unreachable code. It will panic if
/// executed. This is occasionally useful to put after loops that never
/// terminate normally, but instead directly return from a function.
///
/// # Example
///
/// ```{.rust}
/// struct Item { weight: uint }
///
/// fn choose_weighted_item(v: &[Item]) -> Item {
/// assert!(!v.is_empty());
/// let mut so_far = 0u;
/// for item in v.iter() {
/// so_far += item.weight;
/// if so_far > 100 {
/// return *item;
/// }
/// }
/// // The above loop always returns, so we must hint to the
/// // type checker that it isn't possible to get down here
/// unreachable!();
/// }
/// ```
#[macro_export]
macro_rules! unreachable(
() => ({
panic!("internal error: entered unreachable code")
});
($msg:expr) => ({
unreachable!("{}", $msg)
});
($fmt:expr, $($arg:tt)*) => ({
panic!(concat!("internal error: entered unreachable code: ", $fmt), $($arg)*)
});
)
/// A standardised placeholder for marking unfinished code. It panics with the
/// message `"not yet implemented"` when executed.
#[macro_export]
macro_rules! unimplemented(
() => (panic!("not yet implemented"))
)
/// Use the syntax described in `std::fmt` to create a value of type `String`.
/// See `std::fmt` for more information.
///
/// # Example
///
/// ```
/// format!("test");
/// format!("hello {}", "world!");
/// format!("x = {}, y = {y}", 10i, y = 30i);
/// ```
#[macro_export]
macro_rules! format(
($($arg:tt)*) => (
format_args!(::std::fmt::format, $($arg)*)
)
)
/// Use the `format!` syntax to write data into a buffer of type `&mut Writer`.
/// See `std::fmt` for more information.
///
/// # Example
///
/// ```
/// # #![allow(unused_must_use)]
/// use std::io::MemWriter;
///
/// let mut w = MemWriter::new();
/// write!(&mut w, "test");
/// write!(&mut w, "formatted {}", "arguments");
/// ```
#[macro_export]
macro_rules! write(
($dst:expr, $($arg:tt)*) => ({
format_args_method!($dst, write_fmt, $($arg)*)
})
)
/// Equivalent to the `write!` macro, except that a newline is appended after
/// the message is written.
#[macro_export]
macro_rules! writeln(
($dst:expr, $fmt:expr $($arg:tt)*) => (
write!($dst, concat!($fmt, "\n") $($arg)*)
)
)
/// Equivalent to the `println!` macro except that a newline is not printed at
/// the end of the message.
#[macro_export]
macro_rules! print(
($($arg:tt)*) => (format_args!(::std::io::stdio::print_args, $($arg)*))
)
/// Macro for printing to a task's stdout handle.
///
/// Each task can override its stdout handle via `std::io::stdio::set_stdout`.
/// The syntax of this macro is the same as that used for `format!`. For more
/// information, see `std::fmt` and `std::io::stdio`.
///
/// # Example
///
/// ```
/// println!("hello there!");
/// println!("format {} arguments", "some");
/// ```
#[macro_export]
macro_rules! println(
($($arg:tt)*) => (format_args!(::std::io::stdio::println_args, $($arg)*))
)
/// Declare a task-local key with a specific type.
///
/// # Example
///
/// ```
/// local_data_key!(my_integer: int)
///
/// my_integer.replace(Some(2));
/// println!("{}", my_integer.get().map(|a| *a));
/// ```
#[macro_export]
macro_rules! local_data_key(
($name:ident: $ty:ty) => (
#[allow(non_upper_case_globals)]
static $name: ::std::local_data::Key<$ty> = &::std::local_data::KeyValueKey;<|fim▁hole|> (pub $name:ident: $ty:ty) => (
#[allow(non_upper_case_globals)]
pub static $name: ::std::local_data::Key<$ty> = &::std::local_data::KeyValueKey;
);
)
/// Helper macro for unwrapping `Result` values while returning early with an
/// error if the value of the expression is `Err`. For more information, see
/// `std::io`.
#[macro_export]
macro_rules! try (
($expr:expr) => ({
match $expr {
Ok(val) => val,
Err(err) => return Err(::std::error::FromError::from_error(err))
}
})
)
/// Create a `std::vec::Vec` containing the arguments.
#[macro_export]
macro_rules! vec[
($($x:expr),*) => ({
use std::slice::BoxedSlicePrelude;
let xs: ::std::boxed::Box<[_]> = box [$($x),*];
xs.into_vec()
});
($($x:expr,)*) => (vec![$($x),*])
]
/// A macro to select an event from a number of receivers.
///
/// This macro is used to wait for the first event to occur on a number of
/// receivers. It places no restrictions on the types of receivers given to
/// this macro, this can be viewed as a heterogeneous select.
///
/// # Example
///
/// ```
/// let (tx1, rx1) = channel();
/// let (tx2, rx2) = channel();
/// # fn long_running_task() {}
/// # fn calculate_the_answer() -> int { 42i }
///
/// spawn(proc() { long_running_task(); tx1.send(()) });
/// spawn(proc() { tx2.send(calculate_the_answer()) });
///
/// select! (
/// () = rx1.recv() => println!("the long running task finished first"),
/// answer = rx2.recv() => {
/// println!("the answer was: {}", answer);
/// }
/// )
/// ```
///
/// For more information about select, see the `std::comm::Select` structure.
#[macro_export]
#[experimental]
macro_rules! select {
(
$($name:pat = $rx:ident.$meth:ident() => $code:expr),+
) => ({
use std::comm::Select;
let sel = Select::new();
$( let mut $rx = sel.handle(&$rx); )+
unsafe {
$( $rx.add(); )+
}
let ret = sel.wait();
$( if ret == $rx.id() { let $name = $rx.$meth(); $code } else )+
{ unreachable!() }
})
}
// When testing the standard library, we link to the liblog crate to get the
// logging macros. In doing so, the liblog crate was linked against the real
// version of libstd, and uses a different std::fmt module than the test crate
// uses. To get around this difference, we redefine the log!() macro here to be
// just a dumb version of what it should be.
#[cfg(test)]
macro_rules! log (
($lvl:expr, $($args:tt)*) => (
if log_enabled!($lvl) { println!($($args)*) }
)
)
/// Built-in macros to the compiler itself.
///
/// These macros do not have any corresponding definition with a `macro_rules!`
/// macro, but are documented here. Their implementations can be found hardcoded
/// into libsyntax itself.
#[cfg(dox)]
pub mod builtin {
/// The core macro for formatted string creation & output.
///
/// This macro takes as its first argument a callable expression which will
/// receive as its first argument a value of type `&fmt::Arguments`. This
/// value can be passed to the functions in `std::fmt` for performing useful
/// functions. All other formatting macros (`format!`, `write!`,
/// `println!`, etc) are proxied through this one.
///
/// For more information, see the documentation in `std::fmt`.
///
/// # Example
///
/// ```rust
/// use std::fmt;
///
/// let s = format_args!(fmt::format, "hello {}", "world");
/// assert_eq!(s, format!("hello {}", "world"));
///
/// format_args!(|args| {
/// // pass `args` to another function, etc.
/// }, "hello {}", "world");
/// ```
#[macro_export]
macro_rules! format_args( ($closure:expr, $fmt:expr $($args:tt)*) => ({
/* compiler built-in */
}) )
/// Inspect an environment variable at compile time.
///
/// This macro will expand to the value of the named environment variable at
/// compile time, yielding an expression of type `&'static str`.
///
/// If the environment variable is not defined, then a compilation error
/// will be emitted. To not emit a compile error, use the `option_env!`
/// macro instead.
///
/// # Example
///
/// ```rust
/// let path: &'static str = env!("PATH");
/// println!("the $PATH variable at the time of compiling was: {}", path);
/// ```
#[macro_export]
macro_rules! env( ($name:expr) => ({ /* compiler built-in */ }) )
/// Optionally inspect an environment variable at compile time.
///
/// If the named environment variable is present at compile time, this will
/// expand into an expression of type `Option<&'static str>` whose value is
/// `Some` of the value of the environment variable. If the environment
/// variable is not present, then this will expand to `None`.
///
/// A compile time error is never emitted when using this macro regardless
/// of whether the environment variable is present or not.
///
/// # Example
///
/// ```rust
/// let key: Option<&'static str> = option_env!("SECRET_KEY");
/// println!("the secret key might be: {}", key);
/// ```
#[macro_export]
macro_rules! option_env( ($name:expr) => ({ /* compiler built-in */ }) )
/// Concatenate literals into a static byte slice.
///
/// This macro takes any number of comma-separated literal expressions,
/// yielding an expression of type `&'static [u8]` which is the
/// concatenation (left to right) of all the literals in their byte format.
///
/// This extension currently only supports string literals, character
/// literals, and integers less than 256. The byte slice returned is the
/// utf8-encoding of strings and characters.
///
/// # Example
///
/// ```
/// let rust = bytes!("r", 'u', "st", 255);
/// assert_eq!(rust[1], b'u');
/// assert_eq!(rust[4], 255);
/// ```
#[macro_export]
macro_rules! bytes( ($($e:expr),*) => ({ /* compiler built-in */ }) )
/// Concatenate identifiers into one identifier.
///
/// This macro takes any number of comma-separated identifiers, and
/// concatenates them all into one, yielding an expression which is a new
/// identifier. Note that hygiene makes it such that this macro cannot
/// capture local variables, and macros are only allowed in item,
/// statement or expression position, meaning this macro may be difficult to
/// use in some situations.
///
/// # Example
///
/// ```
/// #![feature(concat_idents)]
///
/// # fn main() {
/// fn foobar() -> int { 23 }
///
/// let f = concat_idents!(foo, bar);
/// println!("{}", f());
/// # }
/// ```
#[macro_export]
macro_rules! concat_idents( ($($e:ident),*) => ({ /* compiler built-in */ }) )
/// Concatenates literals into a static string slice.
///
/// This macro takes any number of comma-separated literals, yielding an
/// expression of type `&'static str` which represents all of the literals
/// concatenated left-to-right.
///
/// Integer and floating point literals are stringified in order to be
/// concatenated.
///
/// # Example
///
/// ```
/// let s = concat!("test", 10i, 'b', true);
/// assert_eq!(s, "test10btrue");
/// ```
#[macro_export]
macro_rules! concat( ($($e:expr),*) => ({ /* compiler built-in */ }) )
/// A macro which expands to the line number on which it was invoked.
///
/// The expanded expression has type `uint`, and the returned line is not
/// the invocation of the `line!()` macro itself, but rather the first macro
/// invocation leading up to the invocation of the `line!()` macro.
///
/// # Example
///
/// ```
/// let current_line = line!();
/// println!("defined on line: {}", current_line);
/// ```
#[macro_export]
macro_rules! line( () => ({ /* compiler built-in */ }) )
/// A macro which expands to the column number on which it was invoked.
///
/// The expanded expression has type `uint`, and the returned column is not
/// the invocation of the `col!()` macro itself, but rather the first macro
/// invocation leading up to the invocation of the `col!()` macro.
///
/// # Example
///
/// ```
/// let current_col = col!();
/// println!("defined on column: {}", current_col);
/// ```
#[macro_export]
macro_rules! col( () => ({ /* compiler built-in */ }) )
/// A macro which expands to the file name from which it was invoked.
///
/// The expanded expression has type `&'static str`, and the returned file
/// is not the invocation of the `file!()` macro itself, but rather the
/// first macro invocation leading up to the invocation of the `file!()`
/// macro.
///
/// # Example
///
/// ```
/// let this_file = file!();
/// println!("defined in file: {}", this_file);
/// ```
#[macro_export]
macro_rules! file( () => ({ /* compiler built-in */ }) )
/// A macro which stringifies its argument.
///
/// This macro will yield an expression of type `&'static str` which is the
/// stringification of all the tokens passed to the macro. No restrictions
/// are placed on the syntax of the macro invocation itself.
///
/// # Example
///
/// ```
/// let one_plus_one = stringify!(1 + 1);
/// assert_eq!(one_plus_one, "1 + 1");
/// ```
#[macro_export]
macro_rules! stringify( ($t:tt) => ({ /* compiler built-in */ }) )
/// Includes a utf8-encoded file as a string.
///
/// This macro will yield an expression of type `&'static str` which is the
/// contents of the filename specified. The file is located relative to the
/// current file (similarly to how modules are found),
///
/// # Example
///
/// ```rust,ignore
/// let secret_key = include_str!("secret-key.ascii");
/// ```
#[macro_export]
macro_rules! include_str( ($file:expr) => ({ /* compiler built-in */ }) )
/// Includes a file as a byte slice.
///
/// This macro will yield an expression of type `&'static [u8]` which is
/// the contents of the filename specified. The file is located relative to
/// the current file (similarly to how modules are found),
///
/// # Example
///
/// ```rust,ignore
/// let secret_key = include_bin!("secret-key.bin");
/// ```
#[macro_export]
macro_rules! include_bin( ($file:expr) => ({ /* compiler built-in */ }) )
/// Expands to a string that represents the current module path.
///
/// The current module path can be thought of as the hierarchy of modules
/// leading back up to the crate root. The first component of the path
/// returned is the name of the crate currently being compiled.
///
/// # Example
///
/// ```rust
/// mod test {
/// pub fn foo() {
/// assert!(module_path!().ends_with("test"));
/// }
/// }
///
/// test::foo();
/// ```
#[macro_export]
macro_rules! module_path( () => ({ /* compiler built-in */ }) )
/// Boolean evaluation of configuration flags.
///
/// In addition to the `#[cfg]` attribute, this macro is provided to allow
/// boolean expression evaluation of configuration flags. This frequently
/// leads to less duplicated code.
///
/// The syntax given to this macro is the same syntax as the `cfg`
/// attribute.
///
/// # Example
///
/// ```rust
/// let my_directory = if cfg!(windows) {
/// "windows-specific-directory"
/// } else {
/// "unix-directory"
/// };
/// ```
#[macro_export]
macro_rules! cfg( ($cfg:tt) => ({ /* compiler built-in */ }) )
}<|fim▁end|> | ); |
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>package parser
import (
"io"
"time"
"github.com/lestrrat/go-lex"
"github.com/lestrrat/go-xslate/internal/frame"
"github.com/lestrrat/go-xslate/node"
)
const (
ItemError lex.ItemType = lex.ItemDefaultMax + 1 + iota
ItemEOF
ItemRawString
ItemComment
ItemNumber
ItemComplex
ItemChar
ItemSpace
ItemTagStart
ItemTagEnd
ItemSymbol
ItemIdentifier
ItemDoubleQuotedString
ItemSingleQuotedString
ItemBool
ItemField
ItemComma
ItemOpenParen // '('
ItemCloseParen // ')'
ItemOpenSquareBracket // '['
ItemCloseSquareBracket // ']'
ItemPeriod // '.'
ItemKeyword // Delimiter
ItemCall // CALL
ItemGet // GET
ItemSet // SET
ItemMacro // MACRO
ItemBlock // BLOCK
ItemForeach // FOREACH
ItemWhile // WHILE
ItemIn // IN
ItemInclude // INCLUDE
ItemWith // WITH
ItemIf // IF
ItemElse // ELSE
ItemElseIf // ELSIF
ItemUnless // UNLESS
ItemSwitch // SWITCH
ItemCase // CASE
ItemWrapper // WRAPPER
ItemDefault // DEFAULT
ItemEnd // END
ItemOperator // Delimiter
ItemRange // ..
ItemEquals // ==
ItemNotEquals // !=
ItemGT // >
ItemLT // <
ItemCmp // <=>
ItemLE // <=
ItemGE // >=
ItemShiftLeft // <<
ItemShiftRight // >>
ItemAssignAdd // +=
ItemAssignSub // -=
ItemAssignMul // *=
ItemAssignDiv // /=
ItemAssignMod // %=
ItemAnd // &&
ItemOr // ||
ItemFatComma // =>
ItemIncr // ++
ItemDecr // --
ItemPlus
ItemMinus
ItemAsterisk
ItemSlash
ItemVerticalSlash
ItemMod
ItemAssign // =
DefaultItemTypeMax
)
// AST is represents the syntax tree for an Xslate template
type AST struct {
Name string // name of the template
ParseName string // name of the top-level template during parsing
Root *node.ListNode // root of the tree
Timestamp time.Time // last-modified date of this template
text string
}
type Builder struct {
}
// Frame is the frame struct used during parsing, which has a bit of
// extension over the common Frame struct.
type Frame struct {
*frame.Frame
Node node.Appender
// This contains names of local variables, mapped to their<|fim▁hole|>
type Lexer struct {
lex.Lexer
tagStart string
tagEnd string
symbols *LexSymbolSet
}
// LexSymbol holds the pre-defined symbols to be lexed
type LexSymbol struct {
Name string
Type lex.ItemType
Priority float32
}
// LexSymbolList a list of LexSymbols. Normally you do not need to use it.
// This is mainly only useful for sorting LexSymbols
type LexSymbolList []LexSymbol
// LexSymbolSorter sorts a list of LexSymbols by priority
type LexSymbolSorter struct {
list LexSymbolList
}
// LexSymbolSet is the container for symbols.
type LexSymbolSet struct {
Map map[string]LexSymbol
SortedList LexSymbolList
}
// Parser defines the interface for Xslate parsers
type Parser interface {
Parse(string, []byte) (*AST, error)
ParseString(string, string) (*AST, error)
ParseReader(string, io.Reader) (*AST, error)
}<|fim▁end|> | // respective location in the framestack
LvarNames map[string]int
} |
<|file_name|>ganalytics.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Google Analytics cookie event formatters."""
import unittest
from plaso.formatters import ganalytics
from tests.formatters import test_lib
class AnalyticsUtmaCookieFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the UTMA Google Analytics cookie event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = ganalytics.AnalyticsUtmaCookieFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = ganalytics.AnalyticsUtmaCookieFormatter()
expected_attribute_names = [
u'url', u'cookie_name', u'sessions', u'domain_hash', u'visitor_id']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetMessages.
# TODO: add test for GetSources.
class AnalyticsUtmbCookieFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the UTMB Google Analytics cookie event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = ganalytics.AnalyticsUtmbCookieFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = ganalytics.AnalyticsUtmbCookieFormatter()
expected_attribute_names = [
u'url', u'cookie_name', u'pages_viewed', u'domain_hash']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetMessages.
# TODO: add test for GetSources.
class AnalyticsUtmtCookieFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the UTMT Google Analytics cookie event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = ganalytics.AnalyticsUtmtCookieFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = ganalytics.AnalyticsUtmtCookieFormatter()
expected_attribute_names = [u'url', u'cookie_name']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetMessages.
# TODO: add test for GetSources.
class AnalyticsUtmzCookieFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the UTMZ Google Analytics cookie event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = ganalytics.AnalyticsUtmzCookieFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = ganalytics.AnalyticsUtmzCookieFormatter()
expected_attribute_names = [
u'url', u'cookie_name', u'sessions', u'domain_hash', u'sources',
u'utmcsr', u'utmccn', u'utmcmd', u'utmctr', u'utmcct']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetMessages.<|fim▁hole|>
if __name__ == '__main__':
unittest.main()<|fim▁end|> | # TODO: add test for GetSources.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from ._stub import *
from ._fluent import *
from ._matchers import * |
<|file_name|>image.py<|end_file_name|><|fim▁begin|>'''Fairly basic set of tools for real-time data augmentation on image data.
Can easily be extended to include new transformations,
new preprocessing methods, etc...
'''
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import re
from scipy import linalg
import scipy.ndimage as ndi
from six.moves import range
import os
import threading<|fim▁hole|>
from .. import backend as K
def random_rotation(x, rg, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
theta = np.pi / 180 * np.random.uniform(-rg, rg)
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(rotation_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_shift(x, wrg, hrg, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
h, w = x.shape[row_index], x.shape[col_index]
tx = np.random.uniform(-hrg, hrg) * h
ty = np.random.uniform(-wrg, wrg) * w
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = translation_matrix # no need to do offset
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_shear(x, intensity, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
shear = np.random.uniform(-intensity, intensity)
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(shear_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_zoom(x, zoom_range, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
if len(zoom_range) != 2:
raise Exception('zoom_range should be a tuple or list of two floats. '
'Received arg: ', zoom_range)
if zoom_range[0] == 1 and zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(zoom_range[0], zoom_range[1], 2)
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(zoom_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_barrel_transform(x, intensity):
# TODO
pass
def random_channel_shift(x, intensity, channel_index=0):
x = np.rollaxis(x, channel_index, 0)
min_x, max_x = np.min(x), np.max(x)
channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)
for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def transform_matrix_offset_center(matrix, x, y):
o_x = float(x) / 2 + 0.5
o_y = float(y) / 2 + 0.5
offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])
reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])
transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)
return transform_matrix
def apply_transform(x, transform_matrix, channel_index=0, fill_mode='nearest', cval=0.):
x = np.rollaxis(x, channel_index, 0)
final_affine_matrix = transform_matrix[:2, :2]
final_offset = transform_matrix[:2, 2]
channel_images = [ndi.interpolation.affine_transform(x_channel, final_affine_matrix,
final_offset, order=0, mode=fill_mode, cval=cval) for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def flip_axis(x, axis):
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def array_to_img(x, dim_ordering='default', scale=True):
from PIL import Image
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
if dim_ordering == 'th':
x = x.transpose(1, 2, 0)
if scale:
x += max(-np.min(x), 0)
x_max = np.max(x)
if x_max != 0:
x /= x_max
x *= 255
if x.shape[2] == 3:
# RGB
return Image.fromarray(x.astype('uint8'), 'RGB')
elif x.shape[2] == 1:
# grayscale
return Image.fromarray(x[:, :, 0].astype('uint8'), 'L')
else:
raise Exception('Unsupported channel number: ', x.shape[2])
def img_to_array(img, dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
if dim_ordering not in ['th', 'tf']:
raise Exception('Unknown dim_ordering: ', dim_ordering)
# image has dim_ordering (height, width, channel)
x = np.asarray(img, dtype='float32')
if len(x.shape) == 3:
if dim_ordering == 'th':
x = x.transpose(2, 0, 1)
elif len(x.shape) == 2:
if dim_ordering == 'th':
x = x.reshape((1, x.shape[0], x.shape[1]))
else:
x = x.reshape((x.shape[0], x.shape[1], 1))
else:
raise Exception('Unsupported image shape: ', x.shape)
return x
def load_img(path, grayscale=False, target_size=None):
from PIL import Image
img = Image.open(path)
if grayscale:
img = img.convert('L')
else: # Ensure 3 channel even when loaded image is grayscale
img = img.convert('RGB')
if target_size:
img = img.resize((target_size[1], target_size[0]))
return img
def list_pictures(directory, ext='jpg|jpeg|bmp|png'):
return [os.path.join(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f)) and re.match('([\w]+\.(?:' + ext + '))', f)]
class ImageDataGenerator(object):
'''Generate minibatches with
real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channels.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided (before applying
any other transformation).
dim_ordering: 'th' or 'tf'. In 'th' mode, the channels dimension
(the depth) is at index 1, in 'tf' mode it is at index 3.
It defaults to the `image_dim_ordering` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "th".
'''
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
self.__dict__.update(locals())
self.mean = None
self.std = None
self.principal_components = None
self.rescale = rescale
if dim_ordering not in {'tf', 'th'}:
raise Exception('dim_ordering should be "tf" (channel after row and '
'column) or "th" (channel before row and column). '
'Received arg: ', dim_ordering)
self.dim_ordering = dim_ordering
if dim_ordering == 'th':
self.channel_index = 1
self.row_index = 2
self.col_index = 3
if dim_ordering == 'tf':
self.channel_index = 3
self.row_index = 1
self.col_index = 2
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise Exception('zoom_range should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, X, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg'):
return NumpyArrayIterator(
X, y, self,
batch_size=batch_size, shuffle=shuffle, seed=seed,
dim_ordering=self.dim_ordering,
save_to_dir=save_to_dir, save_prefix=save_prefix, save_format=save_format)
def flow_from_directory(self, directory,
target_size=(256, 256), color_mode='rgb',
classes=None, class_mode='categorical',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg'):
return DirectoryIterator(
directory, self,
target_size=target_size, color_mode=color_mode,
classes=classes, class_mode=class_mode,
dim_ordering=self.dim_ordering,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix, save_format=save_format)
def standardize(self, x):
if self.rescale:
x *= self.rescale
# x is a single image, so it doesn't have image number at index 0
img_channel_index = self.channel_index - 1
if self.samplewise_center:
x -= np.mean(x, axis=img_channel_index, keepdims=True)
if self.samplewise_std_normalization:
x /= (np.std(x, axis=img_channel_index, keepdims=True) + 1e-7)
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= (self.std + 1e-7)
if self.zca_whitening:
flatx = np.reshape(x, (x.size))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))
return x
def random_transform(self, x):
# x is a single image, so it doesn't have image number at index 0
img_row_index = self.row_index - 1
img_col_index = self.col_index - 1
img_channel_index = self.channel_index - 1
# use composition of homographies to generate final transform that needs to be applied
if self.rotation_range:
theta = np.pi / 180 * np.random.uniform(-self.rotation_range, self.rotation_range)
else:
theta = 0
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * x.shape[img_row_index]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * x.shape[img_col_index]
else:
ty = 0
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(np.dot(np.dot(rotation_matrix, translation_matrix), shear_matrix), zoom_matrix)
h, w = x.shape[img_row_index], x.shape[img_col_index]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
x = apply_transform(x, transform_matrix, img_channel_index,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
x = random_channel_shift(x, self.channel_shift_range, img_channel_index)
if self.horizontal_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_col_index)
if self.vertical_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_row_index)
# TODO:
# channel-wise normalization
# barrel/fisheye
return x
def fit(self, X,
augment=False,
rounds=1,
seed=None):
'''Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
X: Numpy array, the data to fit on.
augment: whether to fit on randomly augmented samples
rounds: if `augment`,
how many augmentation passes to do over the data
seed: random seed.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds * X.shape[0]] + list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
aX[i + r * X.shape[0]] = self.random_transform(X[i])
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= (self.std + 1e-7)
if self.zca_whitening:
flatX = np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2] * X.shape[3]))
sigma = np.dot(flatX.T, flatX) / flatX.shape[1]
U, S, V = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + 10e-7))), U.T)
class Iterator(object):
def __init__(self, N, batch_size, shuffle, seed):
self.N = N
self.batch_size = batch_size
self.shuffle = shuffle
self.batch_index = 0
self.total_batches_seen = 0
self.lock = threading.Lock()
self.index_generator = self._flow_index(N, batch_size, shuffle, seed)
def reset(self):
self.batch_index = 0
def _flow_index(self, N, batch_size=32, shuffle=False, seed=None):
# ensure self.batch_index is 0
self.reset()
while 1:
if self.batch_index == 0:
index_array = np.arange(N)
if shuffle:
if seed is not None:
np.random.seed(seed + self.total_batches_seen)
index_array = np.random.permutation(N)
current_index = (self.batch_index * batch_size) % N
if N >= current_index + batch_size:
current_batch_size = batch_size
self.batch_index += 1
else:
current_batch_size = N - current_index
self.batch_index = 0
self.total_batches_seen += 1
yield (index_array[current_index: current_index + current_batch_size],
current_index, current_batch_size)
def __iter__(self):
# needed if we want to do something like:
# for x, y in data_gen.flow(...):
return self
def __next__(self, *args, **kwargs):
return self.next(*args, **kwargs)
class NumpyArrayIterator(Iterator):
def __init__(self, X, y, image_data_generator,
batch_size=32, shuffle=False, seed=None,
dim_ordering='default',
save_to_dir=None, save_prefix='', save_format='jpeg'):
if y is not None and len(X) != len(y):
raise Exception('X (images tensor) and y (labels) '
'should have the same length. '
'Found: X.shape = %s, y.shape = %s' % (np.asarray(X).shape, np.asarray(y).shape))
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
self.X = X
self.y = y
self.image_data_generator = image_data_generator
self.dim_ordering = dim_ordering
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
super(NumpyArrayIterator, self).__init__(X.shape[0], batch_size, shuffle, seed)
def next(self):
# for python 2.x.
# Keeps under lock only the mechanism which advances
# the indexing of each batch
# see http://anandology.com/blog/using-iterators-and-generators/
with self.lock:
index_array, current_index, current_batch_size = next(self.index_generator)
# The transformation of images is not under thread lock so it can be done in parallel
batch_x = np.zeros(tuple([current_batch_size] + list(self.X.shape)[1:]))
for i, j in enumerate(index_array):
x = self.X[j]
x = self.image_data_generator.random_transform(x.astype('float32'))
x = self.image_data_generator.standardize(x)
batch_x[i] = x
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.dim_ordering, scale=True)
fname = '{prefix}_{index}_{hash}.{format}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4),
format=self.save_format)
img.save(os.path.join(self.save_to_dir, fname))
if self.y is None:
return batch_x
batch_y = self.y[index_array]
return batch_x, batch_y
class DirectoryIterator(Iterator):
def __init__(self, directory, image_data_generator,
target_size=(256, 256), color_mode='rgb',
dim_ordering='default',
classes=None, class_mode='categorical',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg'):
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
self.directory = directory
self.image_data_generator = image_data_generator
self.target_size = tuple(target_size)
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
self.dim_ordering = dim_ordering
if self.color_mode == 'rgb':
if self.dim_ordering == 'tf':
self.image_shape = self.target_size + (3,)
else:
self.image_shape = (3,) + self.target_size
else:
if self.dim_ordering == 'tf':
self.image_shape = self.target_size + (1,)
else:
self.image_shape = (1,) + self.target_size
self.classes = classes
if class_mode not in {'categorical', 'binary', 'sparse', None}:
raise ValueError('Invalid class_mode:', class_mode,
'; expected one of "categorical", '
'"binary", "sparse", or None.')
self.class_mode = class_mode
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
white_list_formats = {'png', 'jpg', 'jpeg', 'bmp'}
# first, count the number of samples and classes
self.nb_sample = 0
if not classes:
classes = []
for subdir in sorted(os.listdir(directory)):
if os.path.isdir(os.path.join(directory, subdir)):
classes.append(subdir)
self.nb_class = len(classes)
self.class_indices = dict(zip(classes, range(len(classes))))
for subdir in classes:
subpath = os.path.join(directory, subdir)
for fname in os.listdir(subpath):
is_valid = False
for extension in white_list_formats:
if fname.lower().endswith('.' + extension):
is_valid = True
break
if is_valid:
self.nb_sample += 1
print('Found %d images belonging to %d classes.' % (self.nb_sample, self.nb_class))
# second, build an index of the images in the different class subfolders
self.filenames = []
self.classes = np.zeros((self.nb_sample,), dtype='int32')
i = 0
for subdir in classes:
subpath = os.path.join(directory, subdir)
for fname in os.listdir(subpath):
is_valid = False
for extension in white_list_formats:
if fname.lower().endswith('.' + extension):
is_valid = True
break
if is_valid:
self.classes[i] = self.class_indices[subdir]
self.filenames.append(os.path.join(subdir, fname))
i += 1
super(DirectoryIterator, self).__init__(self.nb_sample, batch_size, shuffle, seed)
def next(self):
with self.lock:
index_array, current_index, current_batch_size = next(self.index_generator)
# The transformation of images is not under thread lock so it can be done in parallel
batch_x = np.zeros((current_batch_size,) + self.image_shape)
grayscale = self.color_mode == 'grayscale'
# build batch of image data
for i, j in enumerate(index_array):
fname = self.filenames[j]
img = load_img(os.path.join(self.directory, fname), grayscale=grayscale, target_size=self.target_size)
x = img_to_array(img, dim_ordering=self.dim_ordering)
x = self.image_data_generator.random_transform(x)
x = self.image_data_generator.standardize(x)
batch_x[i] = x
# optionally save augmented images to disk for debugging purposes
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.dim_ordering, scale=True)
fname = '{prefix}_{index}_{hash}.{format}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4),
format=self.save_format)
img.save(os.path.join(self.save_to_dir, fname))
# build batch of labels
if self.class_mode == 'sparse':
batch_y = self.classes[index_array]
elif self.class_mode == 'binary':
batch_y = self.classes[index_array].astype('float32')
elif self.class_mode == 'categorical':
batch_y = np.zeros((len(batch_x), self.nb_class), dtype='float32')
for i, label in enumerate(self.classes[index_array]):
batch_y[i, label] = 1.
else:
return batch_x
return batch_x, batch_y<|fim▁end|> | |
<|file_name|>ws_iris_RidgeClassifier_oracle_code_gen.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("RidgeClassifier" , "iris" , "oracle")<|fim▁end|> | |
<|file_name|>app.component.spec.ts<|end_file_name|><|fim▁begin|>import { LayoutModule } from '@angular/cdk/layout';
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { MatButtonModule } from '@angular/material/button';
import { MatIconModule } from '@angular/material/icon';
import { MatListModule } from '@angular/material/list';
import { MatSidenavModule } from '@angular/material/sidenav';
import { MatToolbarModule } from '@angular/material/toolbar';
import { RouterTestingModule } from '@angular/router/testing';
import { AppComponent } from './app.component';
describe('AppComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [
RouterTestingModule,
NoopAnimationsModule,
LayoutModule,
MatButtonModule,
MatIconModule,
MatListModule,
MatSidenavModule,
MatToolbarModule,
],
declarations: [
AppComponent
],
}).compileComponents();
}));
it('should create the app', () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.componentInstance;
expect(app).toBeTruthy();
});
<|fim▁hole|> it(`should have as title 'angular-launchpod'`, () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.componentInstance;
expect(app.title).toEqual('angular-launchpod');
});
it('should render title', () => {
const fixture = TestBed.createComponent(AppComponent);
fixture.detectChanges();
const compiled = fixture.nativeElement;
expect(compiled.querySelector('.content span').textContent).toContain('angular-launchpod app is running!');
});
});<|fim▁end|> | |
<|file_name|>spps_conversion_resultats.py<|end_file_name|><|fim▁begin|>#----------------------------------------------------------------------
# I-SIMPA (http://i-simpa.ifsttar.fr). This file is part of I-SIMPA.
#
# I-SIMPA is a GUI for 3D numerical sound propagation modelling dedicated
# to scientific acoustic simulations.
# Copyright (C) 2007-2014 - IFSTTAR - Judicael Picaut, Nicolas Fortin
#
# I-SIMPA is free software; you can redistribute it and/or modify
<|fim▁hole|># the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# I-SIMPA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA or
# see <http://ww.gnu.org/licenses/>
#
# For more information, please consult: <http://i-simpa.ifsttar.fr> or
# send an email to [email protected]
#
# To contact Ifsttar, write to Ifsttar, 14-20 Boulevard Newton
# Cite Descartes, Champs sur Marne F-77447 Marne la Vallee Cedex 2 FRANCE
# or write to [email protected]
# ----------------------------------------------------------------------
# -*- coding: cp1252 -*-
# Titre: Script de conversion en fichiers CSV
# Description: Script de conversion des fichiers de sortie créés par SPPS en fichier CSV
# Date: Avril 2009
# Auteur: N. Fortin et J. Picaut
# Contact: [email protected]
# Chargement des librairies
import os
import libsimpa as ls
def GabeToCsv(filepath,csvpath):
"""
Converti un fichier GABE (Generic Array Binary Exchange) en format CSV (Comma Separated Values)
"""
# Instanciation du lecteur
reader=ls.Gabe_rw()
# Lecture du fichier gabe
if reader.Load(filepath):
# Conversion en liste
data=reader.ToList()
# Rotation des données (les colonnes deviennent des lignes)
data=zip(*data)
# Ecriture des données
fich=open(csvpath,'w')
for line in data:
firstcol=True
for col in line:
if not firstcol:
fich.write(",")
else:
firstcol=False
fich.write(str(col)) # Ecriture de la cellule et virgule
fich.write("\n") # Retour à la ligne
fich.close()<|fim▁end|> | # it under the terms of the GNU General Public License as published by
|
<|file_name|>Range.js<|end_file_name|><|fim▁begin|>/* @flow */
import {
InputTypeComposer,
type ObjectTypeComposerFieldConfigAsObjectDefinition,
} from 'graphql-compose';
import { getTypeName, type CommonOpts, desc } from '../../../utils';
import { getAllAsFieldConfigMap } from '../../Commons/FieldNames';
export function getRangeITC<TContext>(
opts: CommonOpts<TContext>
): InputTypeComposer<TContext> | ObjectTypeComposerFieldConfigAsObjectDefinition<any, any> {<|fim▁hole|> `
Matches documents with fields that have terms within a certain range.
[Documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html)
`
);
const subName = getTypeName('QueryRangeSettings', opts);
const fields = getAllAsFieldConfigMap(
opts,
opts.getOrCreateITC(subName, () => ({
name: subName,
fields: {
gt: 'JSON',
gte: 'JSON',
lt: 'JSON',
lte: 'JSON',
boost: 'Float',
relation: 'String',
},
}))
);
if (typeof fields === 'object') {
return opts.getOrCreateITC(name, () => ({
name,
description,
fields,
}));
}
return {
type: 'JSON',
description,
};
}<|fim▁end|> | const name = getTypeName('QueryRange', opts);
const description = desc( |
<|file_name|>0005_auto_20170808_1832.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-08 18:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [<|fim▁hole|> ('content', '0004_ediimages'),
]
operations = [
migrations.AlterModelOptions(
name='ediimages',
options={'ordering': ('sort_order',)},
),
migrations.AddField(
model_name='ediimages',
name='sort_order',
field=models.PositiveIntegerField(default=0),
),
]<|fim▁end|> | |
<|file_name|>background.js<|end_file_name|><|fim▁begin|>/**
* No Coin - Stop coin miners in your browser
**
* @author Rafael Keramidas <ker.af>
* @license MIT
* @source https://github.com/keraf/NoCoin
*/
// Config
const defaultConfig = {
toggle: true,
whitelist: [{
domain: 'cnhv.co',
expiration: 0,
}],
};
const localConfig = JSON.parse(localStorage.getItem('config'));
let config = {
...defaultConfig,
...localConfig,
};
/**
* Functions
*/
const saveConfig = () => {
localStorage.setItem('config', JSON.stringify(config));
};
const changeToggleIcon = (isEnabled) => {
chrome.browserAction.setIcon({
path: `img/${isEnabled ? 'logo_enabled' : 'logo_disabled'}.png`,
});
};
const getDomain = (url) => {
const match = url.match(/:\/\/(.[^/]+)/);
return match ? match[1] : '';
};
const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const isDomainWhitelisted = (domain) => {
if (!domain) return false;
const domainInfo = config.whitelist.find(w => w.domain === domain);
if (domainInfo) {
if (domainInfo.expiration !== 0 && domainInfo.expiration <= getTimestamp()) {
removeDomainFromWhitelist(domain);
return false;
}
return true;
}
return false;
};
const addDomainToWhitelist = (domain, time) => {
if (!domain) return;
// Make sure the domain is not already whitelisted before adding it
if (!isDomainWhitelisted(domain)) {
config.whitelist = [
...config.whitelist,
{
domain: domain,
expiration: time === 0 ? 0 : getTimestamp() + (time * 60),
},<|fim▁hole|> ];
saveConfig();
}
};
const removeDomainFromWhitelist = (domain) => {
if (!domain) return;
config.whitelist = config.whitelist.filter(w => w.domain !== domain);
saveConfig();
};
const runBlocker = (blacklist) => {
const blacklistedUrls = blacklist.split('\n');
chrome.webRequest.onBeforeRequest.addListener(details => {
// Globally paused
if (!config.toggle) {
return { cancel: false };
}
// Is domain white listed
if (isDomainWhitelisted(domains[details.tabId])) {
return { cancel: false };
}
return { cancel: true };
}, {
urls: blacklistedUrls
}, ['blocking']);
};
const runFallbackBlocker = () => {
fetch(chrome.runtime.getURL('blacklist.txt'))
.then(resp => {
resp.text().then(text => runBlocker(text));
});
};
/**
* Main
*/
let domains = [];
// Updating domain for synchronous checking in onBeforeRequest
chrome.tabs.onUpdated.addListener((tabId, changeInfo, tab) => {
domains[tabId] = getDomain(tab.url);
});
chrome.tabs.onRemoved.addListener((tabId) => {
delete domains[tabId];
});
// Run with the right icon
if (!config.toggle) {
changeToggleIcon(false);
}
// Load the blacklist and run the blocker
const blacklist = 'https://raw.githubusercontent.com/keraf/NoCoin/master/src/blacklist.txt';
fetch(blacklist)
.then(resp => {
if (resp.status === 200) {
resp.text().then((text) => {
if (text === '') {
runFallbackBlocker();
} else {
runBlocker(text);
}
});
} else {
runFallbackBlocker();
}
})
.catch(err => {
runFallbackBlocker();
});
// Communication with the popup and content scripts
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
switch (message.type) {
case 'GET_STATE':
sendResponse({
whitelisted: isDomainWhitelisted(domains[message.tabId]),
toggle: config.toggle,
});
break;
case 'TOGGLE':
config.toggle = !config.toggle;
saveConfig();
changeToggleIcon(config.toggle);
sendResponse(config.toggle);
break;
case 'WHITELIST': {
if (message.whitelisted) {
removeDomainFromWhitelist(domains[message.tabId], message.time);
} else {
addDomainToWhitelist(domains[message.tabId], message.time);
}
sendResponse(!message.whitelisted);
break;
}
}
});<|fim▁end|> | |
<|file_name|>test_worker_comm.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
from twisted.cred import credentials
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.endpoints import clientFromString
from twisted.python import log
from twisted.python import util
from twisted.spread import pb
from twisted.trial import unittest
import buildbot
from buildbot import config
from buildbot import pbmanager
from buildbot import worker
from buildbot.process import botmaster
from buildbot.process import builder
from buildbot.process import factory
from buildbot.test.fake import fakemaster
from buildbot.test.util.misc import TestReactorMixin
from buildbot.util.eventual import eventually
from buildbot.worker import manager as workermanager
PKI_DIR = util.sibpath(__file__, 'pki')
class FakeWorkerForBuilder(pb.Referenceable):
"""
Fake worker-side WorkerForBuilder object
"""
class FakeWorkerWorker(pb.Referenceable):
"""
Fake worker-side Worker object
@ivar master_persp: remote perspective on the master
"""
def __init__(self, callWhenBuilderListSet):
self.callWhenBuilderListSet = callWhenBuilderListSet
self.master_persp = None
self._detach_deferreds = []
self._detached = False<|fim▁hole|> def waitForDetach(self):
if self._detached:
return defer.succeed(None)
d = defer.Deferred()
self._detach_deferreds.append(d)
return d
def setMasterPerspective(self, persp):
self.master_persp = persp
# clear out master_persp on disconnect
def clear_persp():
self.master_persp = None
persp.broker.notifyOnDisconnect(clear_persp)
def fire_deferreds():
self._detached = True
self._detach_deferreds, deferreds = None, self._detach_deferreds
for d in deferreds:
d.callback(None)
persp.broker.notifyOnDisconnect(fire_deferreds)
def remote_print(self, message):
log.msg("WORKER-SIDE: remote_print(%r)" % (message,))
def remote_getWorkerInfo(self):
return {
'info': 'here',
'worker_commands': {
'x': 1,
},
'numcpus': 1,
'none': None,
'os_release': b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(),
b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82\xb9\xe3'
b'\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode():
b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(),
}
def remote_getVersion(self):
return buildbot.version
def remote_getCommands(self):
return {'x': 1}
def remote_setBuilderList(self, builder_info):
builder_names = [n for n, dir in builder_info]
slbuilders = [FakeWorkerForBuilder() for n in builder_names]
eventually(self.callWhenBuilderListSet)
return dict(zip(builder_names, slbuilders))
class FakeBuilder(builder.Builder):
def attached(self, worker, commands):
return defer.succeed(None)
def detached(self, worker):
pass
def getOldestRequestTime(self):
return 0
def maybeStartBuild(self):
return defer.succeed(None)
class MyWorker(worker.Worker):
def attached(self, conn):
self.detach_d = defer.Deferred()
return super().attached(conn)
def detached(self):
super().detached()
self.detach_d, d = None, self.detach_d
d.callback(None)
class TestWorkerComm(unittest.TestCase, TestReactorMixin):
"""
Test handling of connections from workers as integrated with
- Twisted Spread
- real TCP connections.
- PBManager
@ivar master: fake build master
@ivar pbamanger: L{PBManager} instance
@ivar botmaster: L{BotMaster} instance
@ivar worker: master-side L{Worker} instance
@ivar workerworker: worker-side L{FakeWorkerWorker} instance
@ivar port: TCP port to connect to
@ivar server_connection_string: description string for the server endpoint
@ivar client_connection_string_tpl: description string template for the client
endpoint (expects to passed 'port')
@ivar endpoint: endpoint controlling the outbound connection
from worker to master
"""
@defer.inlineCallbacks
def setUp(self):
self.setUpTestReactor()
self.master = fakemaster.make_master(self, wantMq=True, wantData=True,
wantDb=True)
# set the worker port to a loopback address with unspecified
# port
self.pbmanager = self.master.pbmanager = pbmanager.PBManager()
yield self.pbmanager.setServiceParent(self.master)
# remove the fakeServiceParent from fake service hierarchy, and replace
# by a real one
yield self.master.workers.disownServiceParent()
self.workers = self.master.workers = workermanager.WorkerManager(
self.master)
yield self.workers.setServiceParent(self.master)
self.botmaster = botmaster.BotMaster()
yield self.botmaster.setServiceParent(self.master)
self.master.botmaster = self.botmaster
self.master.data.updates.workerConfigured = lambda *a, **k: None
yield self.master.startService()
self.buildworker = None
self.port = None
self.workerworker = None
self.endpoint = None
self.broker = None
self._detach_deferreds = []
# patch in our FakeBuilder for the regular Builder class
self.patch(botmaster, 'Builder', FakeBuilder)
self.server_connection_string = "tcp:0:interface=127.0.0.1"
self.client_connection_string_tpl = "tcp:host=127.0.0.1:port={port}"
def tearDown(self):
if self.broker:
del self.broker
if self.endpoint:
del self.endpoint
deferreds = self._detach_deferreds + [
self.pbmanager.stopService(),
self.botmaster.stopService(),
self.workers.stopService(),
]
# if the worker is still attached, wait for it to detach, too
if self.buildworker and self.buildworker.detach_d:
deferreds.append(self.buildworker.detach_d)
return defer.gatherResults(deferreds)
@defer.inlineCallbacks
def addWorker(self, **kwargs):
"""
Create a master-side worker instance and add it to the BotMaster
@param **kwargs: arguments to pass to the L{Worker} constructor.
"""
self.buildworker = MyWorker("testworker", "pw", **kwargs)
# reconfig the master to get it set up
new_config = self.master.config
new_config.protocols = {"pb": {"port": self.server_connection_string}}
new_config.workers = [self.buildworker]
new_config.builders = [config.BuilderConfig(
name='bldr',
workername='testworker', factory=factory.BuildFactory())]
yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config)
yield self.workers.reconfigServiceWithBuildbotConfig(new_config)
# as part of the reconfig, the worker registered with the pbmanager, so
# get the port it was assigned
self.port = self.buildworker.registration.getPBPort()
def connectWorker(self, waitForBuilderList=True):
"""
Connect a worker the master via PB
@param waitForBuilderList: don't return until the setBuilderList has
been called
@returns: L{FakeWorkerWorker} and a Deferred that will fire when it
is detached; via deferred
"""
factory = pb.PBClientFactory()
creds = credentials.UsernamePassword(b"testworker", b"pw")
setBuilderList_d = defer.Deferred()
workerworker = FakeWorkerWorker(
lambda: setBuilderList_d.callback(None))
login_d = factory.login(creds, workerworker)
@login_d.addCallback
def logged_in(persp):
workerworker.setMasterPerspective(persp)
# set up to hear when the worker side disconnects
workerworker.detach_d = defer.Deferred()
persp.broker.notifyOnDisconnect(
lambda: workerworker.detach_d.callback(None))
self._detach_deferreds.append(workerworker.detach_d)
return workerworker
self.endpoint = clientFromString(
reactor, self.client_connection_string_tpl.format(port=self.port))
connected_d = self.endpoint.connect(factory)
dlist = [connected_d, login_d]
if waitForBuilderList:
dlist.append(setBuilderList_d)
d = defer.DeferredList(dlist,
consumeErrors=True, fireOnOneErrback=True)
d.addCallback(lambda _: workerworker)
return d
def workerSideDisconnect(self, worker):
"""Disconnect from the worker side"""
worker.master_persp.broker.transport.loseConnection()
@defer.inlineCallbacks
def test_connect_disconnect(self):
"""Test a single worker connecting and disconnecting."""
yield self.addWorker()
# connect
worker = yield self.connectWorker()
# disconnect
self.workerSideDisconnect(worker)
# wait for the resulting detach
yield worker.waitForDetach()
@defer.inlineCallbacks
def test_tls_connect_disconnect(self):
"""Test with TLS or SSL endpoint.
According to the deprecation note for the SSL client endpoint,
the TLS endpoint is supported from Twistd 16.0.
TODO add certificate verification (also will require some conditionals
on various versions, including PyOpenSSL, service_identity. The CA used
to generate the testing cert is in ``PKI_DIR/ca``
"""
def escape_colon(path):
# on windows we can't have \ as it serves as the escape character for :
return path.replace('\\', '/').replace(':', '\\:')
self.server_connection_string = (
"ssl:port=0:certKey={pub}:privateKey={priv}:" +
"interface=127.0.0.1").format(
pub=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.crt')),
priv=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.key')))
self.client_connection_string_tpl = "ssl:host=127.0.0.1:port={port}"
yield self.addWorker()
# connect
worker = yield self.connectWorker()
# disconnect
self.workerSideDisconnect(worker)
# wait for the resulting detach
yield worker.waitForDetach()
@defer.inlineCallbacks
def test_worker_info(self):
yield self.addWorker()
worker = yield self.connectWorker()
props = self.buildworker.info
# check worker info passing
self.assertEqual(props.getProperty("info"),
"here")
# check worker info passing with UTF-8
self.assertEqual(props.getProperty("os_release"),
b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode())
self.assertEqual(props.getProperty(b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82'
b'\xb9\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()),
b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode())
self.assertEqual(props.getProperty("none"), None)
self.assertEqual(props.getProperty("numcpus"), 1)
self.workerSideDisconnect(worker)
yield worker.waitForDetach()
@defer.inlineCallbacks
def _test_duplicate_worker(self):
yield self.addWorker()
# connect first worker
worker1 = yield self.connectWorker()
# connect second worker; this should fail
try:
yield self.connectWorker(waitForBuilderList=False)
connect_failed = False
except Exception:
connect_failed = True
self.assertTrue(connect_failed)
# disconnect both and wait for that to percolate
self.workerSideDisconnect(worker1)
yield worker1.waitForDetach()
# flush the exception logged for this on the master
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
@defer.inlineCallbacks
def _test_duplicate_worker_old_dead(self):
yield self.addWorker()
# connect first worker
worker1 = yield self.connectWorker()
# monkeypatch that worker to fail with PBConnectionLost when its
# remote_print method is called
def remote_print(message):
worker1.master_persp.broker.transport.loseConnection()
raise pb.PBConnectionLost("fake!")
worker1.remote_print = remote_print
# connect second worker; this should succeed, and the old worker
# should be disconnected.
worker2 = yield self.connectWorker()
# disconnect both and wait for that to percolate
self.workerSideDisconnect(worker2)
yield worker1.waitForDetach()
# flush the exception logged for this on the worker
self.assertEqual(len(self.flushLoggedErrors(pb.PBConnectionLost)), 1)<|fim▁end|> | |
<|file_name|>method.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
# Method lookup
Method lookup can be rather complex due to the interaction of a number
of factors, such as self types, autoderef, trait lookup, etc. The
algorithm is divided into two parts: candidate collection and
candidate selection.
## Candidate collection
A `Candidate` is a method item that might plausibly be the method
being invoked. Candidates are grouped into two kinds, inherent and
extension. Inherent candidates are those that are derived from the
type of the receiver itself. So, if you have a receiver of some
nominal type `Foo` (e.g., a struct), any methods defined within an
impl like `impl Foo` are inherent methods. Nothing needs to be
imported to use an inherent method, they are associated with the type
itself (note that inherent impls can only be defined in the same
module as the type itself).
Inherent candidates are not always derived from impls. If you have a
trait instance, such as a value of type `@ToStr`, then the trait
methods (`to_str()`, in this case) are inherently associated with it.
Another case is type parameters, in which case the methods of their
bounds are inherent.
Extension candidates are derived from imported traits. If I have the
trait `ToStr` imported, and I call `to_str()` on a value of type `T`,
then we will go off to find out whether there is an impl of `ToStr`
for `T`. These kinds of method calls are called "extension methods".
They can be defined in any module, not only the one that defined `T`.
Furthermore, you must import the trait to call such a method.
For better or worse, we currently give weight to inherent methods over
extension methods during candidate selection (below).
## Candidate selection
Once we know the set of candidates, we can go off and try to select
which one is actually being called. We do this by taking the type of
the receiver, let's call it R, and checking whether it matches against
the expected receiver type for each of the collected candidates. We
first check for inherent candidates and see whether we get exactly one
match (zero means keep searching, more than one is an error). If so,
we return that as the candidate. Otherwise we search the extension
candidates in the same way.
If find no matching candidate at all, we proceed to auto-deref the
receiver type and search again. We keep doing that until we cannot
auto-deref any longer. At each step, we also check for candidates
based on "autoptr", which if the current type is `T`, checks for `&mut
T`, `&const T`, and `&T` receivers. Finally, at the very end, we will
also try autoslice, which converts `~[]` to `&[]` (there is no point
at trying autoslice earlier, because no autoderefable type is also
sliceable).
## Why two phases?
You might wonder why we first collect the candidates and then select.
Both the inherent candidate collection and the candidate selection
proceed by progressively deref'ing the receiver type, after all. The
answer is that two phases are needed to elegantly deal with explicit
self. After all, if there is an impl for the type `Foo`, it can
define a method with the type `@self`, which means that it expects a
receiver of type `@Foo`. If we have a receiver of type `@Foo`, but we
waited to search for that impl until we have deref'd the `@` away and
obtained the type `Foo`, we would never match this method.
<|fim▁hole|>*/
use middle::resolve;
use middle::ty::*;
use middle::ty;
use middle::typeck::check::{FnCtxt, impl_self_ty};
use middle::typeck::check::{structurally_resolved_type};
use middle::typeck::check::vtable::VtableContext;
use middle::typeck::check::vtable;
use middle::typeck::check;
use middle::typeck::infer;
use middle::typeck::{method_map_entry, method_origin, method_param};
use middle::typeck::{method_self, method_static, method_trait, method_super};
use middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig;
use util::common::indenter;
use std::hashmap::HashSet;
use std::result;
use std::uint;
use std::vec;
use extra::list::Nil;
use syntax::ast::{def_id, sty_value, sty_region, sty_box};
use syntax::ast::{sty_uniq, sty_static, node_id};
use syntax::ast::{m_const, m_mutbl, m_imm};
use syntax::ast;
use syntax::ast_map;
#[deriving(Eq)]
pub enum CheckTraitsFlag {
CheckTraitsOnly,
CheckTraitsAndInherentMethods,
}
#[deriving(Eq)]
pub enum AutoderefReceiverFlag {
AutoderefReceiver,
DontAutoderefReceiver,
}
pub fn lookup(
fcx: @mut FnCtxt,
// In a call `a.b::<X, Y, ...>(...)`:
expr: @ast::expr, // The expression `a.b(...)`.
self_expr: @ast::expr, // The expression `a`.
callee_id: node_id, /* Where to store `a.b`'s type,
* also the scope of the call */
m_name: ast::ident, // The ident `b`.
self_ty: ty::t, // The type of `a`.
supplied_tps: &[ty::t], // The list of types X, Y, ... .
deref_args: check::DerefArgs, // Whether we autopointer first.
check_traits: CheckTraitsFlag, // Whether we check traits only.
autoderef_receiver: AutoderefReceiverFlag)
-> Option<method_map_entry> {
let impl_dups = @mut HashSet::new();
let lcx = LookupContext {
fcx: fcx,
expr: expr,
self_expr: self_expr,
callee_id: callee_id,
m_name: m_name,
supplied_tps: supplied_tps,
impl_dups: impl_dups,
inherent_candidates: @mut ~[],
extension_candidates: @mut ~[],
deref_args: deref_args,
check_traits: check_traits,
autoderef_receiver: autoderef_receiver,
};
let mme = lcx.do_lookup(self_ty);
debug!("method lookup for %s yielded %?", expr.repr(fcx.tcx()), mme);
return mme;
}
pub struct LookupContext<'self> {
fcx: @mut FnCtxt,
expr: @ast::expr,
self_expr: @ast::expr,
callee_id: node_id,
m_name: ast::ident,
supplied_tps: &'self [ty::t],
impl_dups: @mut HashSet<def_id>,
inherent_candidates: @mut ~[Candidate],
extension_candidates: @mut ~[Candidate],
deref_args: check::DerefArgs,
check_traits: CheckTraitsFlag,
autoderef_receiver: AutoderefReceiverFlag,
}
/**
* A potential method that might be called, assuming the receiver
* is of a suitable type.
*/
pub struct Candidate {
rcvr_ty: ty::t,
rcvr_substs: ty::substs,
method_ty: @ty::Method,
origin: method_origin,
}
impl<'self> LookupContext<'self> {
pub fn do_lookup(&self, self_ty: ty::t) -> Option<method_map_entry> {
let self_ty = structurally_resolved_type(self.fcx,
self.self_expr.span,
self_ty);
debug!("do_lookup(self_ty=%s, expr=%s, self_expr=%s)",
self.ty_to_str(self_ty),
self.expr.repr(self.tcx()),
self.self_expr.repr(self.tcx()));
// Prepare the list of candidates
self.push_inherent_candidates(self_ty);
self.push_extension_candidates();
let mut enum_dids = ~[];
let mut self_ty = self_ty;
let mut autoderefs = 0;
loop {
debug!("loop: self_ty=%s autoderefs=%u",
self.ty_to_str(self_ty), autoderefs);
match self.deref_args {
check::DontDerefArgs => {
match self.search_for_autoderefd_method(self_ty,
autoderefs) {
Some(mme) => { return Some(mme); }
None => {}
}
match self.search_for_autoptrd_method(self_ty,
autoderefs) {
Some(mme) => { return Some(mme); }
None => {}
}
}
check::DoDerefArgs => {
match self.search_for_autoptrd_method(self_ty,
autoderefs) {
Some(mme) => { return Some(mme); }
None => {}
}
match self.search_for_autoderefd_method(self_ty,
autoderefs) {
Some(mme) => { return Some(mme); }
None => {}
}
}
}
// Don't autoderef if we aren't supposed to.
if self.autoderef_receiver == DontAutoderefReceiver {
break;
}
// Otherwise, perform autoderef.
match self.deref(self_ty, &mut enum_dids) {
None => { break; }
Some(ty) => {
self_ty = ty;
autoderefs += 1;
}
}
}
self.search_for_autosliced_method(self_ty, autoderefs)
}
pub fn deref(&self, ty: ty::t, enum_dids: &mut ~[ast::def_id])
-> Option<ty::t> {
match ty::get(ty).sty {
ty_enum(did, _) => {
// Watch out for newtype'd enums like "enum t = @T".
// See discussion in typeck::check::do_autoderef().
if enum_dids.iter().any_(|x| x == &did) {
return None;
}
enum_dids.push(did);
}
_ => {}
}
match ty::deref(self.tcx(), ty, false) {
None => None,
Some(t) => {
Some(structurally_resolved_type(self.fcx,
self.self_expr.span,
t.ty))
}
}
}
// ______________________________________________________________________
// Candidate collection (see comment at start of file)
pub fn push_inherent_candidates(&self, self_ty: ty::t) {
/*!
* Collect all inherent candidates into
* `self.inherent_candidates`. See comment at the start of
* the file. To find the inherent candidates, we repeatedly
* deref the self-ty to find the "base-type". So, for
* example, if the receiver is @@C where `C` is a struct type,
* we'll want to find the inherent impls for `C`.
*/
let mut enum_dids = ~[];
let mut self_ty = self_ty;
loop {
match get(self_ty).sty {
ty_param(p) => {
self.push_inherent_candidates_from_param(self_ty, p);
}
ty_trait(did, ref substs, store, _, _) => {
self.push_inherent_candidates_from_trait(
self_ty, did, substs, store);
self.push_inherent_impl_candidates_for_type(did);
}
ty_self(self_did) => {
// Call is of the form "self.foo()" and appears in one
// of a trait's default method implementations.
self.push_inherent_candidates_from_self(
self_ty, self_did);
}
ty_enum(did, _) | ty_struct(did, _) => {
if self.check_traits == CheckTraitsAndInherentMethods {
self.push_inherent_impl_candidates_for_type(did);
}
}
_ => { /* No inherent methods in these types */ }
}
// n.b.: Generally speaking, we only loop if we hit the
// fallthrough case in the match above. The exception
// would be newtype enums.
self_ty = match self.deref(self_ty, &mut enum_dids) {
None => { return; }
Some(ty) => { ty }
}
}
}
pub fn push_extension_candidates(&self) {
// If the method being called is associated with a trait, then
// find all the impls of that trait. Each of those are
// candidates.
let trait_map: &mut resolve::TraitMap = &mut self.fcx.ccx.trait_map;
let opt_applicable_traits = trait_map.find(&self.expr.id);
for opt_applicable_traits.iter().advance |applicable_traits| {
for applicable_traits.iter().advance |trait_did| {
// Look for explicit implementations.
let opt_impl_infos =
self.fcx.ccx.coherence_info.extension_methods.find(trait_did);
for opt_impl_infos.iter().advance |impl_infos| {
for impl_infos.iter().advance |impl_info| {
self.push_candidates_from_impl(
self.extension_candidates, *impl_info);
}
}
}
}
}
pub fn push_inherent_candidates_from_param(&self,
rcvr_ty: ty::t,
param_ty: param_ty) {
debug!("push_inherent_candidates_from_param(param_ty=%?)",
param_ty);
let _indenter = indenter();
let tcx = self.tcx();
let mut next_bound_idx = 0; // count only trait bounds
let type_param_def = match tcx.ty_param_defs.find(¶m_ty.def_id.node) {
Some(t) => t,
None => {
tcx.sess.span_bug(
self.expr.span,
fmt!("No param def for %?", param_ty));
}
};
for ty::each_bound_trait_and_supertraits(tcx, type_param_def.bounds)
|bound_trait_ref|
{
let this_bound_idx = next_bound_idx;
next_bound_idx += 1;
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
let pos = {
match trait_methods.iter().position_(|m| {
m.explicit_self != ast::sty_static &&
m.ident == self.m_name })
{
Some(pos) => pos,
None => {
debug!("trait doesn't contain method: %?",
bound_trait_ref.def_id);
loop; // check next trait or bound
}
}
};
let method = trait_methods[pos];
let cand = Candidate {
rcvr_ty: rcvr_ty,
rcvr_substs: copy bound_trait_ref.substs,
method_ty: method,
origin: method_param(
method_param {
trait_id: bound_trait_ref.def_id,
method_num: pos,
param_num: param_ty.idx,
bound_num: this_bound_idx,
})
};
debug!("pushing inherent candidate for param: %?", cand);
self.inherent_candidates.push(cand);
}
}
pub fn push_inherent_candidates_from_trait(&self,
self_ty: ty::t,
did: def_id,
substs: &ty::substs,
store: ty::TraitStore) {
debug!("push_inherent_candidates_from_trait(did=%s, substs=%s)",
self.did_to_str(did),
substs_to_str(self.tcx(), substs));
let _indenter = indenter();
let tcx = self.tcx();
let ms = ty::trait_methods(tcx, did);
let index = match ms.iter().position_(|m| m.ident == self.m_name) {
Some(i) => i,
None => { return; } // no method with the right name
};
let method = ms[index];
/* FIXME(#5762) we should transform the vstore in accordance
with the self type
match method.self_type {
ast::sty_region(_) => {
return; // inapplicable
}
ast::sty_region(_) => vstore_slice(r)
ast::sty_box(_) => vstore_box, // NDM mutability, as per #5762
ast::sty_uniq(_) => vstore_uniq
}
*/
// It is illegal to invoke a method on a trait instance that
// refers to the `self` type. Nonetheless, we substitute
// `trait_ty` for `self` here, because it allows the compiler
// to soldier on. An error will be reported should this
// candidate be selected if the method refers to `self`.
//
// NB: `confirm_candidate()` also relies upon this substitution
// for Self.
let rcvr_substs = substs {
self_ty: Some(self_ty),
../*bad*/copy *substs
};
self.inherent_candidates.push(Candidate {
rcvr_ty: self_ty,
rcvr_substs: rcvr_substs,
method_ty: method,
origin: method_trait(did, index, store)
});
}
pub fn push_inherent_candidates_from_self(&self,
self_ty: ty::t,
did: def_id) {
struct MethodInfo {
method_ty: @ty::Method,
trait_def_id: ast::def_id,
index: uint,
trait_ref: @ty::TraitRef
}
let tcx = self.tcx();
// First, try self methods
let mut method_info: Option<MethodInfo> = None;
let methods = ty::trait_methods(tcx, did);
match methods.iter().position_(|m| m.ident == self.m_name) {
Some(i) => {
method_info = Some(MethodInfo {
method_ty: methods[i],
index: i,
trait_def_id: did,
trait_ref: ty::lookup_trait_def(tcx, did).trait_ref
});
}
None => ()
}
// No method found yet? Check each supertrait
if method_info.is_none() {
for ty::trait_supertraits(tcx, did).iter().advance |trait_ref| {
let supertrait_methods =
ty::trait_methods(tcx, trait_ref.def_id);
match supertrait_methods.iter().position_(|m| m.ident == self.m_name) {
Some(i) => {
method_info = Some(MethodInfo {
method_ty: supertrait_methods[i],
index: i,
trait_def_id: trait_ref.def_id,
trait_ref: *trait_ref
});
break;
}
None => ()
}
}
}
match method_info {
Some(ref info) => {
// We've found a method -- return it
let origin = if did == info.trait_def_id {
method_self(info.trait_def_id, info.index)
} else {
method_super(info.trait_def_id, info.index)
};
self.inherent_candidates.push(Candidate {
rcvr_ty: self_ty,
rcvr_substs: copy info.trait_ref.substs,
method_ty: info.method_ty,
origin: origin
});
}
_ => return
}
}
pub fn push_inherent_impl_candidates_for_type(&self, did: def_id) {
let opt_impl_infos =
self.fcx.ccx.coherence_info.inherent_methods.find(&did);
for opt_impl_infos.iter().advance |impl_infos| {
for impl_infos.iter().advance |impl_info| {
self.push_candidates_from_impl(
self.inherent_candidates, *impl_info);
}
}
}
pub fn push_candidates_from_impl(&self,
candidates: &mut ~[Candidate],
impl_info: &resolve::Impl) {
if !self.impl_dups.insert(impl_info.did) {
return; // already visited
}
debug!("push_candidates_from_impl: %s %s %s",
self.m_name.repr(self.tcx()),
impl_info.ident.repr(self.tcx()),
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
let idx = {
match impl_info.methods.iter().position_(|m| m.ident == self.m_name) {
Some(idx) => idx,
None => { return; } // No method with the right name.
}
};
let method = ty::method(self.tcx(), impl_info.methods[idx].did);
// determine the `self` of the impl with fresh
// variables for each parameter:
let location_info = &vtable::location_info_for_expr(self.self_expr);
let vcx = VtableContext {
ccx: self.fcx.ccx,
infcx: self.fcx.infcx()
};
let ty::ty_param_substs_and_ty {
substs: impl_substs,
ty: impl_ty
} = impl_self_ty(&vcx, location_info, impl_info.did);
candidates.push(Candidate {
rcvr_ty: impl_ty,
rcvr_substs: impl_substs,
method_ty: method,
origin: method_static(method.def_id)
});
}
// ______________________________________________________________________
// Candidate selection (see comment at start of file)
pub fn search_for_autoderefd_method(&self,
self_ty: ty::t,
autoderefs: uint)
-> Option<method_map_entry> {
let (self_ty, autoadjust) =
self.consider_reborrow(self_ty, autoderefs);
match self.search_for_method(self_ty) {
None => None,
Some(mme) => {
debug!("(searching for autoderef'd method) writing \
adjustment (%u) to %d",
autoderefs,
self.self_expr.id);
self.fcx.write_adjustment(self.self_expr.id, @autoadjust);
Some(mme)
}
}
}
pub fn consider_reborrow(&self,
self_ty: ty::t,
autoderefs: uint)
-> (ty::t, ty::AutoAdjustment) {
/*!
*
* In the event that we are invoking a method with a receiver
* of a borrowed type like `&T`, `&mut T`, or `&mut [T]`,
* we will "reborrow" the receiver implicitly. For example, if
* you have a call `r.inc()` and where `r` has type `&mut T`,
* then we treat that like `(&mut *r).inc()`. This avoids
* consuming the original pointer.
*
* You might think that this would be a natural byproduct of
* the auto-deref/auto-ref process. This is true for `@mut T`
* but not for an `&mut T` receiver. With `@mut T`, we would
* begin by testing for methods with a self type `@mut T`,
* then autoderef to `T`, then autoref to `&mut T`. But with
* an `&mut T` receiver the process begins with `&mut T`, only
* without any autoadjustments.
*/
let tcx = self.tcx();
return match ty::get(self_ty).sty {
ty::ty_rptr(_, self_mt) if default_method_hack(self_mt) => {
(self_ty,
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: None}))
}
ty::ty_rptr(_, self_mt) => {
let region =
self.infcx().next_region_var(
infer::Autoref(self.expr.span));
(ty::mk_rptr(tcx, region, self_mt),
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs+1,
autoref: Some(ty::AutoPtr(region, self_mt.mutbl))}))
}
ty::ty_evec(self_mt, vstore_slice(_)) => {
let region =
self.infcx().next_region_var(
infer::Autoref(self.expr.span));
(ty::mk_evec(tcx, self_mt, vstore_slice(region)),
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(ty::AutoBorrowVec(region, self_mt.mutbl))}))
}
_ => {
(self_ty,
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: None}))
}
};
fn default_method_hack(self_mt: ty::mt) -> bool {
// FIXME(#6129). Default methods can't deal with autoref.
//
// I am a horrible monster and I pray for death. Currently
// the default method code fails when you try to reborrow
// because it is not handling types correctly. In lieu of
// fixing that, I am introducing this horrible hack. - ndm
self_mt.mutbl == m_imm && ty::type_is_self(self_mt.ty)
}
}
pub fn search_for_autosliced_method(&self,
self_ty: ty::t,
autoderefs: uint)
-> Option<method_map_entry> {
/*!
*
* Searches for a candidate by converting things like
* `~[]` to `&[]`. */
let tcx = self.tcx();
match ty::get(self_ty).sty {
ty_evec(mt, vstore_box) |
ty_evec(mt, vstore_uniq) |
ty_evec(mt, vstore_slice(_)) | // NDM(#3148)
ty_evec(mt, vstore_fixed(_)) => {
// First try to borrow to a slice
let entry = self.search_for_some_kind_of_autorefd_method(
AutoBorrowVec, autoderefs, [m_const, m_imm, m_mutbl],
|m,r| ty::mk_evec(tcx,
ty::mt {ty:mt.ty, mutbl:m},
vstore_slice(r)));
if entry.is_some() { return entry; }
// Then try to borrow to a slice *and* borrow a pointer.
self.search_for_some_kind_of_autorefd_method(
AutoBorrowVecRef, autoderefs, [m_const, m_imm, m_mutbl],
|m,r| {
let slice_ty = ty::mk_evec(tcx,
ty::mt {ty:mt.ty, mutbl:m},
vstore_slice(r));
// NB: we do not try to autoref to a mutable
// pointer. That would be creating a pointer
// to a temporary pointer (the borrowed
// slice), so any update the callee makes to
// it can't be observed.
ty::mk_rptr(tcx, r, ty::mt {ty:slice_ty, mutbl:m_imm})
})
}
ty_estr(vstore_box) |
ty_estr(vstore_uniq) |
ty_estr(vstore_fixed(_)) => {
let entry = self.search_for_some_kind_of_autorefd_method(
AutoBorrowVec, autoderefs, [m_imm],
|_m,r| ty::mk_estr(tcx, vstore_slice(r)));
if entry.is_some() { return entry; }
self.search_for_some_kind_of_autorefd_method(
AutoBorrowVecRef, autoderefs, [m_imm],
|m,r| {
let slice_ty = ty::mk_estr(tcx, vstore_slice(r));
ty::mk_rptr(tcx, r, ty::mt {ty:slice_ty, mutbl:m})
})
}
ty_trait(*) | ty_closure(*) => {
// NDM---eventually these should be some variant of autoref
None
}
_ => None
}
}
pub fn search_for_autoptrd_method(&self, self_ty: ty::t, autoderefs: uint)
-> Option<method_map_entry> {
/*!
*
* Converts any type `T` to `&M T` where `M` is an
* appropriate mutability.
*/
let tcx = self.tcx();
match ty::get(self_ty).sty {
ty_bare_fn(*) | ty_box(*) | ty_uniq(*) | ty_rptr(*) |
ty_infer(IntVar(_)) |
ty_infer(FloatVar(_)) |
ty_self(_) | ty_param(*) | ty_nil | ty_bot | ty_bool |
ty_int(*) | ty_uint(*) |
ty_float(*) | ty_enum(*) | ty_ptr(*) | ty_struct(*) | ty_tup(*) |
ty_estr(*) | ty_evec(*) | ty_trait(*) | ty_closure(*) => {
self.search_for_some_kind_of_autorefd_method(
AutoPtr, autoderefs, [m_const, m_imm, m_mutbl],
|m,r| ty::mk_rptr(tcx, r, ty::mt {ty:self_ty, mutbl:m}))
}
ty_err => None,
ty_opaque_closure_ptr(_) | ty_unboxed_vec(_) |
ty_opaque_box | ty_type | ty_infer(TyVar(_)) => {
self.bug(fmt!("Unexpected type: %s",
self.ty_to_str(self_ty)));
}
}
}
pub fn search_for_some_kind_of_autorefd_method(
&self,
kind: &fn(Region, ast::mutability) -> ty::AutoRef,
autoderefs: uint,
mutbls: &[ast::mutability],
mk_autoref_ty: &fn(ast::mutability, ty::Region) -> ty::t)
-> Option<method_map_entry> {
// This is hokey. We should have mutability inference as a
// variable. But for now, try &const, then &, then &mut:
let region =
self.infcx().next_region_var(
infer::Autoref(self.expr.span));
for mutbls.iter().advance |mutbl| {
let autoref_ty = mk_autoref_ty(*mutbl, region);
match self.search_for_method(autoref_ty) {
None => {}
Some(mme) => {
self.fcx.write_adjustment(
self.self_expr.id,
@ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(kind(region, *mutbl))}));
return Some(mme);
}
}
}
return None;
}
pub fn search_for_method(&self, rcvr_ty: ty::t)
-> Option<method_map_entry> {
debug!("search_for_method(rcvr_ty=%s)", self.ty_to_str(rcvr_ty));
let _indenter = indenter();
// I am not sure that inherent methods should have higher
// priority, but it is necessary ATM to handle some of the
// existing code.
debug!("searching inherent candidates");
match self.consider_candidates(rcvr_ty, self.inherent_candidates) {
None => {}
Some(mme) => {
return Some(mme);
}
}
debug!("searching extension candidates");
match self.consider_candidates(rcvr_ty, self.extension_candidates) {
None => {
return None;
}
Some(mme) => {
return Some(mme);
}
}
}
pub fn consider_candidates(&self,
rcvr_ty: ty::t,
candidates: &mut ~[Candidate])
-> Option<method_map_entry> {
let relevant_candidates: ~[Candidate] =
candidates.iter().transform(|c| copy *c).
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
let relevant_candidates = self.merge_candidates(relevant_candidates);
if relevant_candidates.len() == 0 {
return None;
}
if relevant_candidates.len() > 1 {
self.tcx().sess.span_err(
self.expr.span,
"multiple applicable methods in scope");
for uint::range(0, relevant_candidates.len()) |idx| {
self.report_candidate(idx, &relevant_candidates[idx].origin);
}
}
Some(self.confirm_candidate(rcvr_ty, &relevant_candidates[0]))
}
pub fn merge_candidates(&self, candidates: &[Candidate]) -> ~[Candidate] {
let mut merged = ~[];
let mut i = 0;
while i < candidates.len() {
let candidate_a = /*bad*/copy candidates[i];
let mut skip = false;
let mut j = i + 1;
while j < candidates.len() {
let candidate_b = &candidates[j];
debug!("attempting to merge %? and %?",
candidate_a, candidate_b);
let candidates_same = match (&candidate_a.origin,
&candidate_b.origin) {
(&method_param(ref p1), &method_param(ref p2)) => {
let same_trait = p1.trait_id == p2.trait_id;
let same_method = p1.method_num == p2.method_num;
let same_param = p1.param_num == p2.param_num;
// The bound number may be different because
// multiple bounds may lead to the same trait
// impl
same_trait && same_method && same_param
}
_ => false
};
if candidates_same {
skip = true;
break;
}
j += 1;
}
i += 1;
if skip {
// There are more than one of these and we need only one
loop;
} else {
merged.push(candidate_a);
}
}
return merged;
}
pub fn confirm_candidate(&self, rcvr_ty: ty::t, candidate: &Candidate)
-> method_map_entry {
let tcx = self.tcx();
let fty = self.fn_ty_from_origin(&candidate.origin);
debug!("confirm_candidate(expr=%s, candidate=%s, fty=%s)",
self.expr.repr(tcx),
self.cand_to_str(candidate),
self.ty_to_str(fty));
self.enforce_trait_instance_limitations(fty, candidate);
self.enforce_drop_trait_limitations(candidate);
// static methods should never have gotten this far:
assert!(candidate.method_ty.explicit_self != sty_static);
let transformed_self_ty = match candidate.origin {
method_trait(*) => {
match candidate.method_ty.explicit_self {
sty_region(*) => {
// FIXME(#5762) again, preserving existing
// behavior here which (for &self) desires
// &@Trait where @Trait is the type of the
// receiver. Here we fetch the method's
// transformed_self_ty which will be something
// like &'a Self. We then perform a
// substitution which will replace Self with
// @Trait.
let t = candidate.method_ty.transformed_self_ty.get();
ty::subst(tcx, &candidate.rcvr_substs, t)
}
_ => {
candidate.rcvr_ty
}
}
}
_ => {
let t = candidate.method_ty.transformed_self_ty.get();
ty::subst(tcx, &candidate.rcvr_substs, t)
}
};
// Determine the values for the type parameters of the method.
// If they were not explicitly supplied, just construct fresh
// type variables.
let num_supplied_tps = self.supplied_tps.len();
let num_method_tps = candidate.method_ty.generics.type_param_defs.len();
let m_substs = {
if num_supplied_tps == 0u {
self.fcx.infcx().next_ty_vars(num_method_tps)
} else if num_method_tps == 0u {
tcx.sess.span_err(
self.expr.span,
"this method does not take type parameters");
self.fcx.infcx().next_ty_vars(num_method_tps)
} else if num_supplied_tps != num_method_tps {
tcx.sess.span_err(
self.expr.span,
"incorrect number of type \
parameters given for this method");
self.fcx.infcx().next_ty_vars(num_method_tps)
} else {
self.supplied_tps.to_owned()
}
};
// Construct the full set of type parameters for the method,
// which is equal to the class tps + the method tps.
let all_substs = substs {
tps: vec::append(/*bad*/copy candidate.rcvr_substs.tps,
m_substs),
../*bad*/copy candidate.rcvr_substs
};
// Compute the method type with type parameters substituted
debug!("fty=%s all_substs=%s",
self.ty_to_str(fty),
ty::substs_to_str(tcx, &all_substs));
let fty = ty::subst(tcx, &all_substs, fty);
debug!("after subst, fty=%s", self.ty_to_str(fty));
// Replace any bound regions that appear in the function
// signature with region variables
let bare_fn_ty = match ty::get(fty).sty {
ty::ty_bare_fn(ref f) => copy *f,
ref s => {
tcx.sess.span_bug(
self.expr.span,
fmt!("Invoking method with non-bare-fn ty: %?", s));
}
};
let (_, opt_transformed_self_ty, fn_sig) =
replace_bound_regions_in_fn_sig(
tcx, @Nil, Some(transformed_self_ty), &bare_fn_ty.sig,
|br| self.fcx.infcx().next_region_var(
infer::BoundRegionInFnCall(self.expr.span, br)));
let transformed_self_ty = opt_transformed_self_ty.get();
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {sig: fn_sig, ..bare_fn_ty});
debug!("after replacing bound regions, fty=%s", self.ty_to_str(fty));
let self_mode = get_mode_from_explicit_self(candidate.method_ty.explicit_self);
// before we only checked whether self_ty could be a subtype
// of rcvr_ty; now we actually make it so (this may cause
// variables to unify etc). Since we checked beforehand, and
// nothing has changed in the meantime, this unification
// should never fail.
match self.fcx.mk_subty(false, infer::Misc(self.self_expr.span),
rcvr_ty, transformed_self_ty) {
result::Ok(_) => (),
result::Err(_) => {
self.bug(fmt!("%s was a subtype of %s but now is not?",
self.ty_to_str(rcvr_ty),
self.ty_to_str(transformed_self_ty)));
}
}
self.fcx.write_ty(self.callee_id, fty);
self.fcx.write_substs(self.callee_id, all_substs);
method_map_entry {
self_ty: rcvr_ty,
self_mode: self_mode,
explicit_self: candidate.method_ty.explicit_self,
origin: candidate.origin,
}
}
pub fn enforce_trait_instance_limitations(&self,
method_fty: ty::t,
candidate: &Candidate) {
/*!
*
* There are some limitations to calling functions through a
* traint instance, because (a) the self type is not known
* (that's the whole point of a trait instance, after all, to
* obscure the self type) and (b) the call must go through a
* vtable and hence cannot be monomorphized. */
match candidate.origin {
method_static(*) | method_param(*) |
method_self(*) | method_super(*) => {
return; // not a call to a trait instance
}
method_trait(*) => {}
}
if ty::type_has_self(method_fty) {
self.tcx().sess.span_err(
self.expr.span,
"cannot call a method whose type contains a \
self-type through a boxed trait");
}
if candidate.method_ty.generics.has_type_params() {
self.tcx().sess.span_err(
self.expr.span,
"cannot call a generic method through a boxed trait");
}
}
pub fn enforce_drop_trait_limitations(&self, candidate: &Candidate) {
// No code can call the finalize method explicitly.
let bad;
match candidate.origin {
method_static(method_id) | method_self(method_id, _)
| method_super(method_id, _) => {
bad = self.tcx().destructors.contains(&method_id);
}
method_param(method_param { trait_id: trait_id, _ }) |
method_trait(trait_id, _, _) => {
bad = self.tcx().destructor_for_type.contains_key(&trait_id);
}
}
if bad {
self.tcx().sess.span_err(self.expr.span,
"explicit call to destructor");
}
}
// `rcvr_ty` is the type of the expression. It may be a subtype of a
// candidate method's `self_ty`.
pub fn is_relevant(&self, rcvr_ty: ty::t, candidate: &Candidate) -> bool {
debug!("is_relevant(rcvr_ty=%s, candidate=%s)",
self.ty_to_str(rcvr_ty), self.cand_to_str(candidate));
// Check for calls to object methods. We resolve these differently.
//
// FIXME(#5762)---we don't check that an @self method is only called
// on an @Trait object here and so forth
match candidate.origin {
method_trait(*) => {
match candidate.method_ty.explicit_self {
sty_static | sty_value => {
return false;
}
sty_region(*) => {
// just echoing current behavior here, which treats
// an &self method on an @Trait object as requiring
// an &@Trait receiver (wacky)
}
sty_box(*) | sty_uniq(*) => {
return self.fcx.can_mk_subty(rcvr_ty,
candidate.rcvr_ty).is_ok();
}
};
}
_ => {}
}
let result = match candidate.method_ty.explicit_self {
sty_static => {
debug!("(is relevant?) explicit self is static");
false
}
sty_value => {
debug!("(is relevant?) explicit self is by-value");
self.fcx.can_mk_subty(rcvr_ty, candidate.rcvr_ty).is_ok()
}
sty_region(_, m) => {
debug!("(is relevant?) explicit self is a region");
match ty::get(rcvr_ty).sty {
ty::ty_rptr(_, mt) => {
mutability_matches(mt.mutbl, m) &&
self.fcx.can_mk_subty(mt.ty, candidate.rcvr_ty).is_ok()
}
_ => false
}
}
sty_box(m) => {
debug!("(is relevant?) explicit self is a box");
match ty::get(rcvr_ty).sty {
ty::ty_box(mt) => {
mutability_matches(mt.mutbl, m) &&
self.fcx.can_mk_subty(mt.ty, candidate.rcvr_ty).is_ok()
}
_ => false
}
}
sty_uniq => {
debug!("(is relevant?) explicit self is a unique pointer");
match ty::get(rcvr_ty).sty {
ty::ty_uniq(mt) => {
mutability_matches(mt.mutbl, ast::m_imm) &&
self.fcx.can_mk_subty(mt.ty, candidate.rcvr_ty).is_ok()
}
_ => false
}
}
};
debug!("(is relevant?) %s", if result { "yes" } else { "no" });
return result;
fn mutability_matches(self_mutbl: ast::mutability,
candidate_mutbl: ast::mutability) -> bool {
//! True if `self_mutbl <: candidate_mutbl`
match (self_mutbl, candidate_mutbl) {
(_, m_const) => true,
(m_mutbl, m_mutbl) => true,
(m_imm, m_imm) => true,
(m_mutbl, m_imm) => false,
(m_imm, m_mutbl) => false,
(m_const, m_imm) => false,
(m_const, m_mutbl) => false,
}
}
}
pub fn fn_ty_from_origin(&self, origin: &method_origin) -> ty::t {
return match *origin {
method_static(did) => {
ty::lookup_item_type(self.tcx(), did).ty
}
method_param(ref mp) => {
type_of_trait_method(self.tcx(), mp.trait_id, mp.method_num)
}
method_trait(did, idx, _) | method_self(did, idx) |
method_super(did, idx) => {
type_of_trait_method(self.tcx(), did, idx)
}
};
fn type_of_trait_method(tcx: ty::ctxt,
trait_did: def_id,
method_num: uint) -> ty::t {
let trait_methods = ty::trait_methods(tcx, trait_did);
ty::mk_bare_fn(tcx, copy trait_methods[method_num].fty)
}
}
pub fn report_candidate(&self, idx: uint, origin: &method_origin) {
match *origin {
method_static(impl_did) => {
self.report_static_candidate(idx, impl_did)
}
method_param(ref mp) => {
self.report_param_candidate(idx, (*mp).trait_id)
}
method_trait(trait_did, _, _) | method_self(trait_did, _)
| method_super(trait_did, _) => {
self.report_trait_candidate(idx, trait_did)
}
}
}
pub fn report_static_candidate(&self, idx: uint, did: def_id) {
let span = if did.crate == ast::local_crate {
match self.tcx().items.find(&did.node) {
Some(&ast_map::node_method(m, _, _)) => m.span,
_ => fail!("report_static_candidate: bad item %?", did)
}
} else {
self.expr.span
};
self.tcx().sess.span_note(
span,
fmt!("candidate #%u is `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
pub fn report_param_candidate(&self, idx: uint, did: def_id) {
self.tcx().sess.span_note(
self.expr.span,
fmt!("candidate #%u derives from the bound `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
pub fn report_trait_candidate(&self, idx: uint, did: def_id) {
self.tcx().sess.span_note(
self.expr.span,
fmt!("candidate #%u derives from the type of the receiver, \
which is the trait `%s`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
pub fn infcx(&self) -> @mut infer::InferCtxt {
self.fcx.inh.infcx
}
pub fn tcx(&self) -> ty::ctxt {
self.fcx.tcx()
}
pub fn ty_to_str(&self, t: ty::t) -> ~str {
self.fcx.infcx().ty_to_str(t)
}
pub fn cand_to_str(&self, cand: &Candidate) -> ~str {
fmt!("Candidate(rcvr_ty=%s, rcvr_substs=%s, origin=%?)",
self.ty_to_str(cand.rcvr_ty),
ty::substs_to_str(self.tcx(), &cand.rcvr_substs),
cand.origin)
}
pub fn did_to_str(&self, did: def_id) -> ~str {
ty::item_path_str(self.tcx(), did)
}
pub fn bug(&self, s: ~str) -> ! {
self.tcx().sess.bug(s)
}
}
pub fn get_mode_from_explicit_self(explicit_self: ast::explicit_self_) -> SelfMode {
match explicit_self {
sty_value => ty::ByRef,
_ => ty::ByCopy,
}
}<|fim▁end|> | |
<|file_name|>asteroids.py<|end_file_name|><|fim▁begin|># program template for Spaceship
import simplegui
import math
import random
# globals for user interface
WIDTH = 800
HEIGHT = 600
score = 0
lives = 3
time = 0
game_mode = 0 # 0 = splash screen, 1 = game mode, 2 = game over
ANGULAR_ACCEL_SCALAR = math.pi / 800.0
ANGULAR_FRICTION = 0.95
LINEAR_ACCEL_SCALAR = 0.25
LINEAR_FRICTION = 0.99
RANDOM_VEL_MAX = 4.0
RANDOM_VEL_MIN = 0.5
RANDOM_ANG_MAX = math.pi / 100.0
BULLET_VEL = 10
SMALL_ROCK_SPEED = 3
class ImageInfo:
def __init__(self, center, size, radius = 0, lifespan = None, animated = False):
self.center = center
self.size = size
self.radius = radius
if lifespan:
self.lifespan = lifespan
else:
self.lifespan = float('inf')
self.animated = animated
def get_center(self):
return self.center
def get_size(self):
return self.size
def get_radius(self):
return self.radius
def get_lifespan(self):
return self.lifespan
def get_animated(self):
return self.animated
# art assets created by Kim Lathrop, may be freely re-used in non-commercial projects, please credit Kim
# debris images - debris1_brown.png, debris2_brown.png, debris3_brown.png, debris4_brown.png
# debris1_blue.png, debris2_blue.png, debris3_blue.png, debris4_blue.png, debris_blend.png
debris_info = ImageInfo([320, 240], [640, 480])
debris_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/debris2_blue.png")
# nebula images - nebula_brown.png, nebula_blue.png
nebula_info = ImageInfo([400, 300], [800, 600])
nebula_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/nebula_blue.f2014.png")
# splash image
splash_info = ImageInfo([200, 150], [400, 300])
splash_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/splash.png")
# ship image
ship_info = ImageInfo([45, 45], [90, 90], 35)
ship_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/double_ship.png")
# missile image - shot1.png, shot2.png, shot3.png
missile_info = ImageInfo([5,5], [10, 10], 3, 75)
missile_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/shot2.png")
# asteroid images - asteroid_blue.png, asteroid_brown.png, asteroid_blend.png
asteroid_info = ImageInfo([45, 45], [90, 90], 40)
asteroid_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/asteroid_blue.png")
# animated explosion - explosion_orange.png, explosion_blue.png, explosion_blue2.png, explosion_alpha.png
explosion_info = ImageInfo([64, 64], [128, 128], 17, 24, True)
explosion_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/explosion_alpha.png")
# sound assets purchased from sounddogs.com, please do not redistribute
soundtrack = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/soundtrack.mp3")
missile_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/missile.mp3")
missile_sound.set_volume(.5)
ship_thrust_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/thrust.mp3")
explosion_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/explosion.mp3")
## CC 3.0 sound file by Jesus Lastra, http://opengameart.org/content/8-bit-pickup-1
extra_life_sound = simplegui.load_sound("http://mwales.net/junk/SFX_Pickup_44.mp3")
# helper functions to handle transformations
def angle_to_vector(ang):
return [math.cos(ang), math.sin(ang)]
def vector_to_angle(v):
return math.atan2(v[0],v[1])
def vector_scale(vec, scale):
return [vec[0] * scale, vec[1] * scale]
def vector_add(vec1, vec2):
return [vec1[0] + vec2[0], vec1[1] + vec2[1]]
def dist(p,q):
return math.sqrt((p[0] - q[0]) ** 2+(p[1] - q[1]) ** 2)
def smallRockExplode(rockInstance):
# Return an explosion sprite
explodeObj = Sprite(rockInstance.get_position(),
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound,
relSize = 0.3)
return explodeObj
def rockExplode(rockInstance, deathBullet):
# Return an explosion sprite
explodeObj = Sprite(rockInstance.get_position(),
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound)
# Create 4 smaller rocks that explode away based on angle bullet came in at
bulletAngle = vector_to_angle(deathBullet.get_velocity())
smallRockAngle = bulletAngle + 45.0 / 360.0 * math.pi * 2.0
for i in range(0,4):
smallRockAngle += math.pi / 2.0
smallRockVel = angle_to_vector(smallRockAngle)
smallRockVel = vector_scale(smallRockVel, SMALL_ROCK_SPEED)
smallRockVel = vector_add(smallRockVel, rockInstance.get_velocity())
randomAngVel = random.random() * RANDOM_ANG_MAX * 4.0 - RANDOM_ANG_MAX
smallRock = Sprite(rockInstance.get_position(),
smallRockVel,
random.random() * 2 * math.pi,
randomAngVel,
asteroid_image,
asteroid_info,
relSize = 0.5)
smallRockList.append(smallRock)
return explodeObj
# Ship class
class Ship:
def __init__(self, pos, vel, angle, image, info, bulletTimer):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.thrust = False
self.angle = angle
self.angle_vel = 0
self.angle_acc = 0
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
self.bullet_timer = bulletTimer
self.spawn_bullets = False
self.bullets = []
self.bullet_type = 0
self.weapon_name = {}
self.weapon_name[0] = "Speed Shot"
self.weapon_name[1] = "Spread Shot"
self.weapon_name[2] = "Power Shot"
def get_weapon_name(self):
return self.weapon_name[self.bullet_type]
def draw(self,canvas):
if self.thrust:
canvas.draw_image(self.image,
(self.image_center[0] + self.image_size[0], self.image_center[1]),
self.image_size,
self.pos,
self.image_size,
self.angle)
else:
canvas.draw_image(self.image,
self.image_center,
self.image_size,
self.pos,
self.image_size,
self.angle)
for singleBullets in self.bullets:
singleBullets.draw(canvas)
def update(self):
self.pos = vector_add(self.pos, self.vel)
# Position should wrap around the screen
self.pos = [self.pos[0] % WIDTH, self.pos[1] % HEIGHT]
# Handle ship thrust
if self.thrust:
accel = angle_to_vector(self.angle)
accel = vector_scale(accel, LINEAR_ACCEL_SCALAR)
self.vel = vector_add(self.vel, accel)
# Friction against motion
self.vel = vector_scale(self.vel, LINEAR_FRICTION)
self.angle = self.angle + self.angle_vel
self.angle_vel = self.angle_vel + self.angle_acc
self.angle_vel = self.angle_vel * ANGULAR_FRICTION
oldBullets = []
for singleBullets in self.bullets:
if singleBullets.update():
oldBullets.append(singleBullets)
for bulletToDelete in oldBullets:
self.bullets.remove(bulletToDelete)
def process_collisions(self, rockList, smallRockList, explosionList):
global score, lives, extra_life_sound
# Don't change containers while looping through them
shipExplodes = False
rockListCopy = rockList
bulletListCopy = self.bullets
for singleRock in rockListCopy:
for singleBullet in bulletListCopy:
# Collisions of bullets and rocks
if singleBullet.collide(singleRock):
# delete the bullet
self.bullets.remove(singleBullet)
# delete and explode the rock
if singleRock in rockList:
rockList.remove(singleRock)
explosionList.append(rockExplode(singleRock, singleBullet))
print "Rock goes boom"
# increase score , 1-up consideration
self.scorePoint()
# Collisions of rock and ship
if singleRock.collide(self):
#print "Ship goes boom"
shipExplodes = True
smallRockListCopy = smallRockList
bulletListCopy = self.bullets
for singleSmallRock in smallRockListCopy:
for singleBullet in bulletListCopy:
if singleBullet.collide(singleSmallRock):
# delete the bullet
self.bullets.remove(singleBullet)
# delete and explode the rock
if singleSmallRock in smallRockList:
smallRockList.remove(singleSmallRock)
explosionList.append(smallRockExplode(singleSmallRock))
print "Small Rock goes boom"
# increase score , 1-up consideration
self.scorePoint()
# Collisions of rock and ship
if singleSmallRock.collide(self):
#print "Ship goes boom"
shipExplodes = True
if shipExplodes:
self.attemptRespawn(rockList, explosionList)
def scorePoint(self):
global lives, score
score += 1
if ((score % 100) == 0):
print "1-up"
lives += 1
extra_life_sound.rewind()
extra_life_sound.play()
def attemptRespawn(self, rockList, explosionList):
global lives
lives -= 1
if (lives == 0):
game_over()
return
# Find a safe spot to respawn
bestLocation = []
bestLocationClosestRock = 0
for respawnX in range( int(WIDTH / 10), int(WIDTH * .9), 10):<|fim▁hole|> # Determine at this location how close closest rock is
for singleRock in rockList:
distFromRock = dist(potentialLocation, singleRock.get_position())
if (distFromRock < closestRock):
closestRock = distFromRock
for singleRock in smallRockList:
distFromRock = dist(potentialLocation, singleRock.get_position())
if (distFromRock < closestRock):
closestRock = distFromRock
# If the closest rock is farther away than other locations, use this location
if (closestRock > bestLocationClosestRock):
bestLocationClosestRock = closestRock
bestLocation = potentialLocation
# Move ship to new location
shipExplosion = Sprite(self.pos,
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound,
relSize = 3.0)
explosionList.append(shipExplosion)
self.pos = bestLocation
self.vel = [0,0]
self.angle_vel = 0
# Just pass in -1 to rotate right, +1 to rotate left
def rotate(self, angularAcceleration):
self.angle_acc = angularAcceleration * ANGULAR_ACCEL_SCALAR
#print "Alpha =" + str(self.angle_acc)
# Just pass in True to thrust, False to not thrust
def setThrust(self, thrustBool):
global ship_thrust_sound
self.thrust = thrustBool
if thrustBool:
ship_thrust_sound.rewind()
ship_thrust_sound.play()
else:
ship_thrust_sound.pause()
def startShooting(self):
self.spawn_bullets = True;
self.bullet_timer.start()
self.spawn_bullet()
def stopShooting(self):
self.spawn_bullets = False
self.bullet_timer.stop()
def change_bullet_type(self):
self.bullet_type = (self.bullet_type + 1) % 3
def set_bullet_type(self, bulletType):
self.bullet_type = bulletType % 3
def get_bullet_type(self):
return self.bullet_type
def spawn_bullet(self):
if (self.bullet_type == 0):
# speed shot
self.make_bullet()
elif (self.bullet_type == 1):
# spread
self.make_bullet(relSpeed=0.5)
self.make_bullet(relAngle=-math.pi * 2 * 30.0 / 360.0,
relSpeed=0.5)
self.make_bullet(relAngle=math.pi * 2 * 30.0 / 360.0,
relSpeed=0.5)
else:
# big bullet
self.make_bullet(relSpeed=0.25,
relSize=3.0,
relLifetime=5.0)
curDirection = angle_to_vector(self.angle)
recoil = vector_scale(curDirection, -1.0)
self.vel = vector_add(self.vel, recoil)
def make_bullet(self, relAngle=0, relSpeed=1.0, relSize=1.0, relLifetime=1.0):
global missle_sound
bulletPos = angle_to_vector(self.angle)
bulletPos = vector_scale(bulletPos, self.image_size[0] / 2)
bulletPos = vector_add(self.pos, bulletPos)
bulletVel = angle_to_vector(self.angle + relAngle)
bulletVel = vector_scale(bulletVel, BULLET_VEL * relSpeed)
bulletVel = vector_add(bulletVel, self.vel)
bulletObj = Sprite(bulletPos,
bulletVel,
self.angle,
0,
missile_image,
missile_info,
missile_sound,
relSize,
relLifetime)
self.bullets.append(bulletObj)
def get_position(self):
return self.pos
def reset(self):
self.pos = [WIDTH / 2, HEIGHT / 2]
self.vel = [0,0]
self.angle = 0
self.bullets = []
def get_radius(self):
return self.radius
def get_velocity(self):
return self.vel
# Sprite class
class Sprite:
def __init__(self, pos, vel, ang, ang_vel, image, info, sound = None, relSize=1.0, relLifetime=1.0):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.angle = ang
self.angle_vel = ang_vel
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.draw_size = vector_scale(self.image_size, relSize)
self.radius = info.get_radius() * relSize
self.lifespan = info.get_lifespan() * relLifetime
self.animated = info.get_animated()
self.age = 0
if sound:
sound.rewind()
sound.play()
def draw(self, canvas):
if self.animated:
frameCenter = vector_add(self.image_center, [self.image_size[0] * self.age,0])
canvas.draw_image(self.image,
frameCenter,
self.image_size,
self.pos,
self.draw_size,
self.angle)
else:
canvas.draw_image(self.image,
self.image_center,
self.image_size,
self.pos,
self.draw_size,
self.angle)
def update(self):
pass
self.pos = vector_add(self.pos, self.vel)
# Position should wrap around the screen
self.pos = [self.pos[0] % WIDTH, self.pos[1] % HEIGHT]
self.angle = self.angle + self.angle_vel
# Age out?
self.age += 1
return (self.age > self.lifespan)
def collide(self, otherObject):
currentDistOfCenters = dist(otherObject.get_position(),
self.pos)
minSafeDistance = (otherObject.get_radius() + \
self.radius) * 0.9
return (currentDistOfCenters < minSafeDistance)
def get_position(self):
return self.pos
def get_radius(self):
return self.radius
def get_velocity(self):
return self.vel
def process_sprites(canvas):
global explodeList
# draw ship and sprites
my_ship.draw(canvas)
for singleRock in rockList:
singleRock.draw(canvas)
for smallRock in smallRockList:
smallRock.draw(canvas)
# update ship and sprites
my_ship.update()
for singleRock in rockList:
singleRock.update()
for smallRock in smallRockList:
smallRock.update()
# update explosions
splodeCopy = explodeList
for singleSplosion in splodeCopy:
singleSplosion.draw(canvas)
if singleSplosion.update():
explodeList.remove(singleSplosion)
my_ship.process_collisions(rockList, smallRockList, explodeList)
def draw(canvas):
global time
# animiate background
time += 1
wtime = (time / 4) % WIDTH
center = debris_info.get_center()
size = debris_info.get_size()
canvas.draw_image(nebula_image, nebula_info.get_center(), nebula_info.get_size(), [WIDTH / 2, HEIGHT / 2], [WIDTH, HEIGHT])
canvas.draw_image(debris_image, center, size, (wtime - WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
canvas.draw_image(debris_image, center, size, (wtime + WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
if game_mode == 1:
process_sprites(canvas)
if ( (game_mode == 1) or (game_mode == 2) ):
canvas.draw_text("Score: " + str(score),
(WIDTH - 250,60),
30,
'White')
canvas.draw_text("Lives: " + str(lives),
(150,60),
30,
'White')
canvas.draw_text("Weapon: " + my_ship.get_weapon_name(),
(WIDTH-400, HEIGHT - 50),
25,
'White',
'monospace')
if game_mode == 0:
canvas.draw_image(splash_image,
splash_info.get_center(),
splash_info.get_size(),
[WIDTH / 2, HEIGHT / 2],
splash_info.get_size())
# timer handler that spawns a rock
def rock_spawner(recurseDepth = 10):
global rockList
if (len(rockList) > 12):
print "Too many rocks"
return
randomX = random.choice(range(0, WIDTH))
randomY = random.choice(range(0, HEIGHT))
#print "Rock + " + str(recurseDepth) + " dist = " + str(dist(my_ship.get_position(), [randomX, randomY]))
if (dist(my_ship.get_position(), [randomX, randomY]) < 150):
print "too close for a rock"
if recurseDepth == 0:
return
else:
rock_spawner(recurseDepth - 1)
return
randomVel = angle_to_vector(random.random() * math.pi * 2.0)
randomVel = vector_scale(randomVel, random.random() * (RANDOM_VEL_MAX - RANDOM_VEL_MIN) + RANDOM_VEL_MIN)
randomAngVel = random.random() * RANDOM_ANG_MAX * 2.0 - RANDOM_ANG_MAX
#print "Spawn rock: [" + str(randomX) + "," + str(randomY) + "] v=" + \
# str(randomVel) + " Alpha=" + str(randomAngVel)
spawnRock = Sprite([randomX, randomY],
randomVel,
random.random() * math.pi * 2.0,
randomAngVel,
asteroid_image,
asteroid_info)
rockList.append(spawnRock)
def bullet_spawner():
global my_ship
my_ship.spawn_bullet()
def key_down_handler(key):
global my_ship, game_mode
if (game_mode == 1):
if ( (key == simplegui.KEY_MAP['left']) or (key == simplegui.KEY_MAP['a']) ):
my_ship.rotate(-1)
elif ( (key == simplegui.KEY_MAP['right']) or (key == simplegui.KEY_MAP['d']) ):
my_ship.rotate(1)
elif ( (key == simplegui.KEY_MAP['up']) or (key == simplegui.KEY_MAP['w']) ):
my_ship.setThrust(True)
elif ( (key == simplegui.KEY_MAP['down']) or (key == simplegui.KEY_MAP['s']) ):
pass
elif (key == simplegui.KEY_MAP['space']):
my_ship.startShooting()
elif (key == simplegui.KEY_MAP['1']):
pass
elif (key == simplegui.KEY_MAP['2']):
pass
elif (key == simplegui.KEY_MAP['3']):
pass
elif (game_mode == 0):
if (key == simplegui.KEY_MAP['space']):
start_game()
else:
if (key == simplegui.KEY_MAP['space']):
game_mode = 0
def key_up_handler(key):
global my_ship
if ( (key == simplegui.KEY_MAP['left']) or (key == simplegui.KEY_MAP['a']) ):
my_ship.rotate(0)
elif ( (key == simplegui.KEY_MAP['right']) or (key == simplegui.KEY_MAP['d']) ):
my_ship.rotate(0)
elif ( (key == simplegui.KEY_MAP['up']) or (key == simplegui.KEY_MAP['w']) ):
my_ship.setThrust(False)
elif ( (key == simplegui.KEY_MAP['down']) or (key == simplegui.KEY_MAP['s']) ):
my_ship.change_bullet_type()
elif (key == simplegui.KEY_MAP['space']):
my_ship.stopShooting()
elif (key == simplegui.KEY_MAP['1']):
my_ship.set_bullet_type(0)
elif (key == simplegui.KEY_MAP['2']):
my_ship.set_bullet_type(1)
elif (key == simplegui.KEY_MAP['3']):
my_ship.set_bullet_type(2)
def game_over():
global my_ship, rockList, smallRockList, timer, game_mode, soundtrack
rockList = []
smallRockList = []
timer.stop()
game_mode = 2
soundtrack.pause()
def start_game():
global timer, game_mode, lives, score, soundtrack
my_ship.reset()
timer.start()
game_mode = 1
lives = 3
score = 0
soundtrack.rewind()
soundtrack.play()
def mouse_handler(position):
if (game_mode == 0):
start_game()
# initialize frame
frame = simplegui.create_frame("Asteroids", WIDTH, HEIGHT)
frame.set_keydown_handler(key_down_handler)
frame.set_keyup_handler(key_up_handler)
frame.set_mouseclick_handler(mouse_handler)
frame.add_label("A/D or Left/Right to rotate")
frame.add_label("W or Up to thrust")
frame.add_label("S or Down to change weapon")
frame.add_label("1,2,3 are weapon hot key")
# initialize ship and two sprites
bulletSpawnerTimer = simplegui.create_timer(200, bullet_spawner)
my_ship = Ship([WIDTH / 2, HEIGHT / 2], [0, 0], math.pi, ship_image, ship_info, bulletSpawnerTimer)
rockList = []
smallRockList = []
explodeList = []
# register handlers
frame.set_draw_handler(draw)
timer = simplegui.create_timer(1000.0, rock_spawner)
# get things rolling
frame.start()<|fim▁end|> | for respawnY in range( int(HEIGHT / 10), int(HEIGHT * .9), 10):
closestRock = WIDTH * HEIGHT
potentialLocation = [respawnX, respawnY]
|
<|file_name|>htlc.py<|end_file_name|><|fim▁begin|>import click
from bitshares.amount import Amount
from .decorators import online, unlock
from .main import main, config
from .ui import print_tx
@main.group()
def htlc():
pass
@htlc.command()
@click.argument("to")
@click.argument("amount")
@click.argument("symbol")
@click.option(
"--type", type=click.Choice(["ripemd160", "sha1", "sha256", "hash160"]),
default="sha256", prompt="Hash algorithm", show_default=True,
help="Hash algorithm"
)
@click.option(
"--hash", prompt="Hash (hex string)", hide_input=False, confirmation_prompt=True,
help="Hash value as string of hex digits"
)
@click.option(
"--expiration", default=60 * 60, prompt="Expiration (seconds)",
help="Duration of HTLC in seconds"
)
@click.option(
"--length", help="Length of PREIMAGE (not of hash). Generally OK " +
"to leave this as 0 for unconstrained.", default=0, show_default=True
)
@click.option("--account")
@click.pass_context
@online
@unlock
def create(ctx, to, amount, symbol, type, hash, expiration, length, account):
""" Create an HTLC contract from a hash and lock-time
"""
ctx.blockchain.blocking = True
tx = ctx.blockchain.htlc_create(
Amount(amount, symbol),
to,
hash_type=type,
hash_hex=hash,
expiration=expiration,
account=account,
preimage_length=length
)
tx.pop("trx", None)
print_tx(tx)
results = tx.get("operation_results", {})
if results:
htlc_id = results[0][1]
print("Your htlc_id is: {}".format(htlc_id))
@htlc.command()
@click.argument("to")
@click.argument("amount")
@click.argument("symbol")
@click.option(
"--type", type=click.Choice(["ripemd160", "sha1", "sha256", "hash160"]),
default="sha256", prompt="Hash algorithm", show_default=True,
help="Hash algorithm"
)
@click.option(
"--secret", prompt="Redeem Password", hide_input=True, confirmation_prompt=True,
help="Ascii-text preimage"
)
@click.option("--expiration", default=60 * 60, prompt="Expiration (seconds)",
help="Duration of HTLC in seconds"
)
@click.option(
"--length", help="Length of PREIMAGE (not of hash). Generally OK " +
"to leave this as 0 for unrestricted. If non-zero, must match length " +
"of provided preimage", default=0, show_default=True
)
@click.option("--account")
@click.pass_context
@online
@unlock
def create_from_secret(ctx, to, amount, symbol, type, secret, expiration,
length, account):
"""Create an HTLC contract from a secret preimage
If you are the party choosing the preimage, this version of
htlc_create will compute the hash for you from the supplied
preimage, and create the HTLC with the resulting hash.
"""<|fim▁hole|> raise ValueError("Length must be zero or agree with actual preimage length")
ctx.blockchain.blocking = True
tx = ctx.blockchain.htlc_create(
Amount(amount, symbol),
to,
preimage=secret,
preimage_length=length,
hash_type=type,
expiration=expiration,
account=account,
)
tx.pop("trx", None)
print_tx(tx)
results = tx.get("operation_results", {})
if results:
htlc_id = results[0][1]
print("Your htlc_id is: {}".format(htlc_id))
@htlc.command()
@click.argument("htlc_id")
@click.option(
"--secret", prompt="Redeem Password", hide_input=False, confirmation_prompt=False,
type=str, help="The preimage, as ascii-text, unless --hex is passed"
)
@click.option(
"--hex", is_flag=True, help="Interpret preimage as hex-encoded bytes"
)
@click.option("--account")
@click.pass_context
@online
@unlock
def redeem(ctx, htlc_id, secret, hex, account):
""" Redeem an HTLC contract by providing preimage
"""
encoding = "hex" if hex else "utf-8"
print_tx(ctx.blockchain.htlc_redeem(htlc_id, secret, encoding=encoding,
account=account)
)<|fim▁end|> | if length != 0 and length != len(secret): |
<|file_name|>direct_kinematics.py<|end_file_name|><|fim▁begin|>from math import *
from Vertex import *
#Length of the three subparts of the robot leg
L1 = 51.0
L2 = 63.7
L3 = 93.0
Alpha = 20.69 #Mecanic constraint on Theta 2
Beta = 5.06 #Mecanic constraint on Theta 3
# Check if the given float match with radian (between 2PI and -2PI)
def radValidation (radian):
return (radian <= 2 * pi and radian >= -2 * pi)
# Direct kinamatics for our considered robot (specific of our leg setting)
def leg_dk(theta1, theta2, theta3, l1=L1, l2=L2, l3=L3, alpha = Alpha, beta = Beta):
Angle = Vertex(theta1,theta2,theta3)
#Modification od theta1 and theta2 according constraint
theta2 += alpha
theta3 = 90-(alpha+beta+theta3)
#print "Angles : " + str(theta1) + " ; " + str(theta2) + " ; " + str(theta3)
theta1=radians(theta1)
theta2=-radians(theta2)
theta3=-radians(theta3)
#Storing all the sinus and cosinus into variable in order to simplify and run the calculation only once
c_1 = cos(theta1)
c_2 = cos(theta2)
c_2_3 = cos(theta2 + theta3)
s_1 = sin(theta1)
s_2 = sin(theta2)
s_2_3 = sin(theta2 + theta3)
#calculation of the projections and the differences due to the robot setting<|fim▁hole|> projection = l1 + (l2 * c_2) + (l3 * c_2_3)
#Calculation of the final position
Final = Vertex((projection * c_1), (projection * s_1), ((l2 * s_2) + (l3 * s_2_3)))
return Final
leg_dk(0, 0, 0)
leg_dk(90, 0, 0)
leg_dk(180, -30.501, -67.819)
leg_dk(0, -30.645, 38.501)<|fim▁end|> | |
<|file_name|>mqtt.go<|end_file_name|><|fim▁begin|>// Package mqtt provides an mqtt broker
package mqtt
/*
MQTT is a go-micro Broker for the MQTT protocol.
This can be integrated with any broker that supports MQTT,
including Mosquito and AWS IoT.
Note: Because of the way the MQTT library works, when you
unsubscribe from a topic it will unsubscribe all subscribers.
TODO: Perhaps create a unique client per subscription.
Becomes slightly more difficult to track a disconnect.
*/
import (
"errors"
"fmt"
"math/rand"
"strconv"
"strings"
"time"
"github.com/eclipse/paho.mqtt.golang"
"github.com/micro/go-log"
"github.com/micro/go-micro/broker"
"github.com/micro/go-micro/cmd"
"github.com/micro/go-micro/codec/json"
)
type mqttBroker struct {
addrs []string
opts broker.Options
client mqtt.Client
}
func init() {
cmd.DefaultBrokers["mqtt"] = NewBroker
rand.Seed(time.Now().UnixNano())
}
func setAddrs(addrs []string) []string {
var cAddrs []string
for _, addr := range addrs {
if len(addr) == 0 {
continue
}
var scheme string
var host string
var port int
// split on scheme
parts := strings.Split(addr, "://")
// no scheme
if len(parts) < 2 {
// default tcp scheme
scheme = "tcp"
parts = strings.Split(parts[0], ":")
// got scheme
} else {
scheme = parts[0]
parts = strings.Split(parts[1], ":")
}
// no parts
if len(parts) == 0 {
continue
}
// check scheme
switch scheme {
case "tcp", "ssl", "ws":
default:
continue
}
if len(parts) < 2 {
// no port
host = parts[0]
switch scheme {
case "tcp":
port = 1883<|fim▁hole|> // support secure port
port = 80
default:
port = 1883
}
// got host port
} else {
host = parts[0]
port, _ = strconv.Atoi(parts[1])
}
addr = fmt.Sprintf("%s://%s:%d", scheme, host, port)
cAddrs = append(cAddrs, addr)
}
// default an address if we have none
if len(cAddrs) == 0 {
cAddrs = []string{"tcp://127.0.0.1:1883"}
}
return cAddrs
}
func newClient(addrs []string, opts broker.Options) mqtt.Client {
// create opts
cOpts := mqtt.NewClientOptions()
cOpts.SetClientID(fmt.Sprintf("%d%d", time.Now().UnixNano(), rand.Intn(10)))
cOpts.SetCleanSession(false)
// setup tls
if opts.TLSConfig != nil {
cOpts.SetTLSConfig(opts.TLSConfig)
}
// add brokers
for _, addr := range addrs {
cOpts.AddBroker(addr)
}
return mqtt.NewClient(cOpts)
}
func newBroker(opts ...broker.Option) broker.Broker {
options := broker.Options{
// Default codec
Codec: json.Marshaler{},
}
for _, o := range opts {
o(&options)
}
addrs := setAddrs(options.Addrs)
client := newClient(addrs, options)
return &mqttBroker{
opts: options,
client: client,
addrs: addrs,
}
}
func (m *mqttBroker) Options() broker.Options {
return m.opts
}
func (m *mqttBroker) Address() string {
return strings.Join(m.addrs, ",")
}
func (m *mqttBroker) Connect() error {
if m.client.IsConnected() {
return nil
}
if t := m.client.Connect(); t.Wait() && t.Error() != nil {
return t.Error()
}
return nil
}
func (m *mqttBroker) Disconnect() error {
if !m.client.IsConnected() {
return nil
}
m.client.Disconnect(0)
return nil
}
func (m *mqttBroker) Init(opts ...broker.Option) error {
if m.client.IsConnected() {
return errors.New("cannot init while connected")
}
for _, o := range opts {
o(&m.opts)
}
m.addrs = setAddrs(m.opts.Addrs)
m.client = newClient(m.addrs, m.opts)
return nil
}
func (m *mqttBroker) Publish(topic string, msg *broker.Message, opts ...broker.PublishOption) error {
if !m.client.IsConnected() {
return errors.New("not connected")
}
b, err := m.opts.Codec.Marshal(msg)
if err != nil {
return err
}
t := m.client.Publish(topic, 1, false, b)
return t.Error()
}
func (m *mqttBroker) Subscribe(topic string, h broker.Handler, opts ...broker.SubscribeOption) (broker.Subscriber, error) {
if !m.client.IsConnected() {
return nil, errors.New("not connected")
}
var options broker.SubscribeOptions
for _, o := range opts {
o(&options)
}
t := m.client.Subscribe(topic, 1, func(c mqtt.Client, mq mqtt.Message) {
var msg broker.Message
if err := m.opts.Codec.Unmarshal(mq.Payload(), &msg); err != nil {
log.Log(err)
return
}
if err := h(&mqttPub{topic: topic, msg: &msg}); err != nil {
log.Log(err)
}
})
if t.Wait() && t.Error() != nil {
return nil, t.Error()
}
return &mqttSub{
opts: options,
client: m.client,
topic: topic,
}, nil
}
func (m *mqttBroker) String() string {
return "mqtt"
}
func NewBroker(opts ...broker.Option) broker.Broker {
return newBroker(opts...)
}<|fim▁end|> | case "ssl":
port = 8883
case "ws": |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# BASE_DIR = "/Users/jmitch/desktop/blog/src/"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sm@g)(fbwdh5wc*xe@j++m9rh^uza5se9a57c5ptwkg*b@ki0x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['tienduong.pythonanywhere.com', '127.0.0.1', '10.169.3.13', '172.20.10.5', '172.20.10.10']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third party
'crispy_forms',
'markdown_deux',
'pagedown',
'rest_framework',
'django_tables2',
# local apps
'comments',
'posts',
'pingow_api',
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
LOGIN_URL = "/login/"
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',<|fim▁hole|> 'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Singapore'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
#'/var/www/static/',
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media_cdn")<|fim▁end|> | 'DIRS': [os.path.join(BASE_DIR, 'templates')], |
<|file_name|>hash.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Implementation of SipHash 2-4
*
* See: http://131002.net/siphash/
*
* Consider this as a main "general-purpose" hash for all hashtables: it
* runs at good speed (competitive with spooky and city) and permits
* cryptographically strong _keyed_ hashing. Key your hashtables from a
* CPRNG like rand::rng.
*/
use io;
use io::Writer;
use to_bytes::IterBytes;
use uint;
use vec;
/**
* Types that can meaningfully be hashed should implement this.
*
* Note that this trait is likely to change somewhat as it is
* closely related to `to_bytes::IterBytes` and in almost all
* cases presently the two are (and must be) used together.
*
* In general, most types only need to implement `IterBytes`,
* and the implementation of `Hash` below will take care of
* the rest. This is the recommended approach, since constructing
* good keyed hash functions is quite difficult.
*/
pub trait Hash {
/**
* Compute a "keyed" hash of the value implementing the trait,
* taking `k0` and `k1` as "keying" parameters that randomize or
* otherwise perturb the hash function in such a way that a
* hash table built using such "keyed hash functions" cannot
* be made to perform linearly by an attacker controlling the
* hashtable's contents.
*
* In practical terms, we implement this using the SipHash 2-4
* function and require most types to only implement the
* IterBytes trait, that feeds SipHash.
*/
fn hash_keyed(&self, k0: u64, k1: u64) -> u64;
}
// When we have default methods, won't need this.
pub trait HashUtil {
fn hash(&self) -> u64;
}
impl<A:Hash> HashUtil for A {
#[inline(always)]
fn hash(&self) -> u64 { self.hash_keyed(0,0) }
}
/// Streaming hash-functions should implement this.
pub trait Streaming {
fn input(&self, (&const [u8]));
// These can be refactored some when we have default methods.
fn result_bytes(&self) -> ~[u8];
fn result_str(&self) -> ~str;
fn result_u64(&self) -> u64;
fn reset(&self);
}
impl<A:IterBytes> Hash for A {
#[inline(always)]
fn hash_keyed(&self, k0: u64, k1: u64) -> u64 {
unsafe {
let s = &State(k0, k1);
for self.iter_bytes(true) |bytes| {
s.input(bytes);
}
s.result_u64()<|fim▁hole|>
fn hash_keyed_2<A: IterBytes,
B: IterBytes>(a: &A, b: &B, k0: u64, k1: u64) -> u64 {
unsafe {
let s = &State(k0, k1);
for a.iter_bytes(true) |bytes| { s.input(bytes); }
for b.iter_bytes(true) |bytes| { s.input(bytes); }
s.result_u64()
}
}
fn hash_keyed_3<A: IterBytes,
B: IterBytes,
C: IterBytes>(a: &A, b: &B, c: &C, k0: u64, k1: u64) -> u64 {
unsafe {
let s = &State(k0, k1);
for a.iter_bytes(true) |bytes| { s.input(bytes); }
for b.iter_bytes(true) |bytes| { s.input(bytes); }
for c.iter_bytes(true) |bytes| { s.input(bytes); }
s.result_u64()
}
}
fn hash_keyed_4<A: IterBytes,
B: IterBytes,
C: IterBytes,
D: IterBytes>(a: &A, b: &B, c: &C, d: &D, k0: u64, k1: u64)
-> u64 {
unsafe {
let s = &State(k0, k1);
for a.iter_bytes(true) |bytes| { s.input(bytes); }
for b.iter_bytes(true) |bytes| { s.input(bytes); }
for c.iter_bytes(true) |bytes| { s.input(bytes); }
for d.iter_bytes(true) |bytes| { s.input(bytes); }
s.result_u64()
}
}
fn hash_keyed_5<A: IterBytes,
B: IterBytes,
C: IterBytes,
D: IterBytes,
E: IterBytes>(a: &A, b: &B, c: &C, d: &D, e: &E,
k0: u64, k1: u64) -> u64 {
unsafe {
let s = &State(k0, k1);
for a.iter_bytes(true) |bytes| { s.input(bytes); }
for b.iter_bytes(true) |bytes| { s.input(bytes); }
for c.iter_bytes(true) |bytes| { s.input(bytes); }
for d.iter_bytes(true) |bytes| { s.input(bytes); }
for e.iter_bytes(true) |bytes| { s.input(bytes); }
s.result_u64()
}
}
// Implement State as SipState
pub type State = SipState;
#[inline(always)]
pub fn State(k0: u64, k1: u64) -> State {
SipState(k0, k1)
}
#[inline(always)]
pub fn default_state() -> State {
State(0,0)
}
struct SipState {
k0: u64,
k1: u64,
mut length: uint, // how many bytes we've processed
mut v0: u64, // hash state
mut v1: u64,
mut v2: u64,
mut v3: u64,
mut tail: [u8, ..8], // unprocessed bytes
mut ntail: uint, // how many bytes in tail are valid
}
#[inline(always)]
fn SipState(key0: u64, key1: u64) -> SipState {
let state = SipState {
k0 : key0,
k1 : key1,
mut length : 0u,
mut v0 : 0u64,
mut v1 : 0u64,
mut v2 : 0u64,
mut v3 : 0u64,
mut tail : [0u8,0,0,0,0,0,0,0],
mut ntail : 0u,
};
(&state).reset();
state
}
// sadly, these macro definitions can't appear later,
// because they're needed in the following defs;
// this design could be improved.
macro_rules! u8to64_le (
($buf:expr, $i:expr) =>
($buf[0+$i] as u64 |
$buf[1+$i] as u64 << 8 |
$buf[2+$i] as u64 << 16 |
$buf[3+$i] as u64 << 24 |
$buf[4+$i] as u64 << 32 |
$buf[5+$i] as u64 << 40 |
$buf[6+$i] as u64 << 48 |
$buf[7+$i] as u64 << 56)
)
macro_rules! rotl (
($x:expr, $b:expr) =>
(($x << $b) | ($x >> (64 - $b)))
)
macro_rules! compress (
($v0:expr, $v1:expr, $v2:expr, $v3:expr) =>
({
$v0 += $v1; $v1 = rotl!($v1, 13); $v1 ^= $v0;
$v0 = rotl!($v0, 32);
$v2 += $v3; $v3 = rotl!($v3, 16); $v3 ^= $v2;
$v0 += $v3; $v3 = rotl!($v3, 21); $v3 ^= $v0;
$v2 += $v1; $v1 = rotl!($v1, 17); $v1 ^= $v2;
$v2 = rotl!($v2, 32);
})
)
impl io::Writer for SipState {
// Methods for io::writer
#[inline(always)]
fn write(&self, msg: &const [u8]) {
let length = msg.len();
self.length += length;
let mut needed = 0u;
if self.ntail != 0 {
needed = 8 - self.ntail;
if length < needed {
let mut t = 0;
while t < length {
self.tail[self.ntail+t] = msg[t];
t += 1;
}
self.ntail += length;
return;
}
let mut t = 0;
while t < needed {
self.tail[self.ntail+t] = msg[t];
t += 1;
}
let m = u8to64_le!(self.tail, 0);
self.v3 ^= m;
compress!(self.v0, self.v1, self.v2, self.v3);
compress!(self.v0, self.v1, self.v2, self.v3);
self.v0 ^= m;
self.ntail = 0;
}
// Buffered tail is now flushed, process new input.
let len = length - needed;
let end = len & (!0x7);
let left = len & 0x7;
let mut i = needed;
while i < end {
let mi = u8to64_le!(msg, i);
self.v3 ^= mi;
compress!(self.v0, self.v1, self.v2, self.v3);
compress!(self.v0, self.v1, self.v2, self.v3);
self.v0 ^= mi;
i += 8;
}
let mut t = 0u;
while t < left {
self.tail[t] = msg[i+t];
t += 1
}
self.ntail = left;
}
fn seek(&self, _x: int, _s: io::SeekStyle) {
fail!();
}
fn tell(&self) -> uint {
self.length
}
fn flush(&self) -> int {
0
}
fn get_type(&self) -> io::WriterType {
io::File
}
}
impl Streaming for SipState {
#[inline(always)]
fn input(&self, buf: &const [u8]) {
self.write(buf);
}
#[inline(always)]
fn result_u64(&self) -> u64 {
let mut v0 = self.v0;
let mut v1 = self.v1;
let mut v2 = self.v2;
let mut v3 = self.v3;
let mut b : u64 = (self.length as u64 & 0xff) << 56;
if self.ntail > 0 { b |= self.tail[0] as u64 << 0; }
if self.ntail > 1 { b |= self.tail[1] as u64 << 8; }
if self.ntail > 2 { b |= self.tail[2] as u64 << 16; }
if self.ntail > 3 { b |= self.tail[3] as u64 << 24; }
if self.ntail > 4 { b |= self.tail[4] as u64 << 32; }
if self.ntail > 5 { b |= self.tail[5] as u64 << 40; }
if self.ntail > 6 { b |= self.tail[6] as u64 << 48; }
v3 ^= b;
compress!(v0, v1, v2, v3);
compress!(v0, v1, v2, v3);
v0 ^= b;
v2 ^= 0xff;
compress!(v0, v1, v2, v3);
compress!(v0, v1, v2, v3);
compress!(v0, v1, v2, v3);
compress!(v0, v1, v2, v3);
return (v0 ^ v1 ^ v2 ^ v3);
}
fn result_bytes(&self) -> ~[u8] {
let h = self.result_u64();
~[(h >> 0) as u8,
(h >> 8) as u8,
(h >> 16) as u8,
(h >> 24) as u8,
(h >> 32) as u8,
(h >> 40) as u8,
(h >> 48) as u8,
(h >> 56) as u8,
]
}
fn result_str(&self) -> ~str {
let r = self.result_bytes();
let mut s = ~"";
for vec::each(r) |b| {
s += uint::to_str_radix(*b as uint, 16u);
}
s
}
#[inline(always)]
fn reset(&self) {
self.length = 0;
self.v0 = self.k0 ^ 0x736f6d6570736575;
self.v1 = self.k1 ^ 0x646f72616e646f6d;
self.v2 = self.k0 ^ 0x6c7967656e657261;
self.v3 = self.k1 ^ 0x7465646279746573;
self.ntail = 0;
}
}
#[test]
pub fn test_siphash() {
let vecs : [[u8, ..8], ..64] = [
[ 0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72, ],
[ 0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74, ],
[ 0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d, ],
[ 0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85, ],
[ 0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf, ],
[ 0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18, ],
[ 0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb, ],
[ 0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab, ],
[ 0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93, ],
[ 0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e, ],
[ 0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a, ],
[ 0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4, ],
[ 0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75, ],
[ 0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14, ],
[ 0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7, ],
[ 0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1, ],
[ 0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f, ],
[ 0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69, ],
[ 0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b, ],
[ 0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb, ],
[ 0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe, ],
[ 0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0, ],
[ 0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93, ],
[ 0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8, ],
[ 0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8, ],
[ 0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc, ],
[ 0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17, ],
[ 0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f, ],
[ 0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde, ],
[ 0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6, ],
[ 0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad, ],
[ 0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32, ],
[ 0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71, ],
[ 0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7, ],
[ 0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12, ],
[ 0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15, ],
[ 0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31, ],
[ 0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02, ],
[ 0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca, ],
[ 0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a, ],
[ 0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e, ],
[ 0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad, ],
[ 0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18, ],
[ 0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4, ],
[ 0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9, ],
[ 0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9, ],
[ 0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb, ],
[ 0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0, ],
[ 0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6, ],
[ 0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7, ],
[ 0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee, ],
[ 0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1, ],
[ 0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a, ],
[ 0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81, ],
[ 0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f, ],
[ 0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24, ],
[ 0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7, ],
[ 0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea, ],
[ 0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60, ],
[ 0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66, ],
[ 0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c, ],
[ 0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f, ],
[ 0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5, ],
[ 0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95, ]
];
let k0 = 0x_07_06_05_04_03_02_01_00_u64;
let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08_u64;
let mut buf : ~[u8] = ~[];
let mut t = 0;
let stream_inc = &State(k0,k1);
let stream_full = &State(k0,k1);
fn to_hex_str(r: &[u8, ..8]) -> ~str {
let mut s = ~"";
for vec::each(*r) |b| {
s += uint::to_str_radix(*b as uint, 16u);
}
s
}
while t < 64 {
debug!("siphash test %?", t);
let vec = u8to64_le!(vecs[t], 0);
let out = buf.hash_keyed(k0, k1);
debug!("got %?, expected %?", out, vec);
assert!(vec == out);
stream_full.reset();
stream_full.input(buf);
let f = stream_full.result_str();
let i = stream_inc.result_str();
let v = to_hex_str(&vecs[t]);
debug!("%d: (%s) => inc=%s full=%s", t, v, i, f);
assert!(f == i && f == v);
buf += ~[t as u8];
stream_inc.input(~[t as u8]);
t += 1;
}
}
#[test] #[cfg(target_arch = "arm")]
pub fn test_hash_uint() {
let val = 0xdeadbeef_deadbeef_u64;
assert!((val as u64).hash() != (val as uint).hash());
assert!((val as u32).hash() == (val as uint).hash());
}
#[test] #[cfg(target_arch = "x86_64")]
pub fn test_hash_uint() {
let val = 0xdeadbeef_deadbeef_u64;
assert!((val as u64).hash() == (val as uint).hash());
assert!((val as u32).hash() != (val as uint).hash());
}
#[test] #[cfg(target_arch = "x86")]
pub fn test_hash_uint() {
let val = 0xdeadbeef_deadbeef_u64;
assert!((val as u64).hash() != (val as uint).hash());
assert!((val as u32).hash() == (val as uint).hash());
}
#[test]
pub fn test_hash_idempotent() {
let val64 = 0xdeadbeef_deadbeef_u64;
val64.hash() == val64.hash();
let val32 = 0xdeadbeef_u32;
val32.hash() == val32.hash();
}
#[test]
pub fn test_hash_no_bytes_dropped_64() {
let val = 0xdeadbeef_deadbeef_u64;
assert!(val.hash() != zero_byte(val, 0).hash());
assert!(val.hash() != zero_byte(val, 1).hash());
assert!(val.hash() != zero_byte(val, 2).hash());
assert!(val.hash() != zero_byte(val, 3).hash());
assert!(val.hash() != zero_byte(val, 4).hash());
assert!(val.hash() != zero_byte(val, 5).hash());
assert!(val.hash() != zero_byte(val, 6).hash());
assert!(val.hash() != zero_byte(val, 7).hash());
fn zero_byte(val: u64, byte: uint) -> u64 {
assert!(byte < 8);
val & !(0xff << (byte * 8))
}
}
#[test]
pub fn test_hash_no_bytes_dropped_32() {
let val = 0xdeadbeef_u32;
assert!(val.hash() != zero_byte(val, 0).hash());
assert!(val.hash() != zero_byte(val, 1).hash());
assert!(val.hash() != zero_byte(val, 2).hash());
assert!(val.hash() != zero_byte(val, 3).hash());
fn zero_byte(val: u32, byte: uint) -> u32 {
assert!(byte < 4);
val & !(0xff << (byte * 8))
}
}<|fim▁end|> | }
}
} |
<|file_name|>FWebFormPdfServlet.java<|end_file_name|><|fim▁begin|>package org.mo.game.editor.face.apl.logic.form;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.Element;
import com.lowagie.text.Font;
import com.lowagie.text.HeaderFooter;
import com.lowagie.text.PageSize;
import com.lowagie.text.Paragraph;
import com.lowagie.text.Phrase;
import com.lowagie.text.Rectangle;
import com.lowagie.text.pdf.BaseFont;
import com.lowagie.text.pdf.PdfPTable;
import com.lowagie.text.pdf.PdfWriter;
import java.awt.Color;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.mo.com.lang.FFatalError;
import org.mo.com.lang.RString;
import org.mo.com.logging.ILogger;
import org.mo.com.logging.RLogger;
import org.mo.com.validator.RStringValidator;
import org.mo.com.xml.FXmlNode;
import org.mo.com.xml.IXmlObject;
import org.mo.core.aop.face.ALink;
import org.mo.eng.data.common.ISqlContext;
import org.mo.web.core.servlet.common.IWebServletResponse;
import org.mo.web.core.webform.FWebFormDatasetArgs;
import org.mo.web.core.webform.IWebFormConsole;
import org.mo.web.core.webform.IWebFormDatasetConsole;
import org.mo.web.protocol.context.IWebContext;
public class FWebFormPdfServlet
implements
IWebFormPdfServlet{
private static ILogger _logger = RLogger.find(FWebFormPdfServlet.class);
public static byte[] creatPdf(FXmlNode dsNode,
IXmlObject formNode){
// 创建一个Document对象
Rectangle rectPageSize = new Rectangle(PageSize.A4);// 定义A4页面大小
rectPageSize = rectPageSize.rotate();// 实现A4页面的横置
Document document = new Document(rectPageSize, 50, 30, 30, 30);// 4个参数,
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
// 设置了页面的4个边距
try{
// 生成名为 HelloWorld.pdf 的文档
PdfWriter.getInstance(document, buffer);
BaseFont bfChinese;
try{
bfChinese = BaseFont.createFont("STSongStd-Light", "UniGB-UCS2-H", false);
}catch(IOException e){
throw new FFatalError(e);
}
Font fontChinese = new Font(bfChinese, 12, Font.NORMAL, Color.red);
document.open();
// 插入一个段落
//获得模块名称
Paragraph par = new Paragraph(formNode.innerGet("label"), fontChinese);
document.add(par);
int tableColumns = formNode.children().count();
// 定义表格填充内容
PdfPTable datatable = new PdfPTable(tableColumns); // 创建新表.
// int headerwidths[] = { 9, 4, 8, 10, 8, 7 }; // percentage 定义表格头宽度
// datatable.setWidths(headerwidths);
datatable.setWidthPercentage(100);
// 设置表头的高度
datatable.getDefaultCell().setPadding(2);
// 设置表头的粗细线条
datatable.getDefaultCell().setBorderWidth(1);
datatable.getDefaultCell().setGrayFill(0.8f);
datatable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_CENTER);
// 以下是填充表头
for(int i = 0; i < tableColumns; i++){<|fim▁hole|> // 设置页码
HeaderFooter footer = new HeaderFooter(new Phrase("页码:", fontChinese), true);
footer.setBorder(Rectangle.NO_BORDER);
document.setFooter(footer);
// 结束页码的设置
//设置表格颜色参数
int i = 1;
datatable.getDefaultCell().setBorderWidth(1);
for(FXmlNode row : dsNode.nodes()){
if(i % 2 == 1){
// 设置表格颜色
datatable.getDefaultCell().setGrayFill(1.0f);
}
//根据数据列项,依次取出该列所对应的值
for(int x = 0; x < tableColumns; x++){
String columnName = formNode.children().get(x).innerGet("data_name");
datatable.addCell(new Phrase(row.get(columnName), fontChinese));
}
if(i % 2 == 1){
// 设置表格颜色
datatable.getDefaultCell().setGrayFill(0.9f);
}
i++;
}
document.add(datatable);// 加载新表
}catch(DocumentException de){
de.printStackTrace();
System.err.println("document: " + de.getMessage());
}finally{
document.close();
}
return buffer.toByteArray();
}
@ALink
protected IWebFormConsole _webformConsole;
@ALink
IWebFormDatasetConsole _webFormDataConsole;
@Override
public void build(IWebContext context,
ISqlContext sqlContext,
IWebServletResponse response){
String formName = context.parameter("form_name");
RStringValidator.checkEmpty(formName, "form_name");
IXmlObject xform = findForm(formName);
// 查找数据集
FWebFormDatasetArgs args = new FWebFormDatasetArgs(context, sqlContext);
args.setPageSize(-1);
xform.children().get(0).innerGet("label");
args.setForm(xform);
FXmlNode dsNode = _webFormDataConsole.fetchNode(args);
// 生成PDF文件
byte[] bytes = creatPdf(dsNode, xform);
_logger.debug(this, "build", "Make form pdf file. (form={0}, pdf size={1})", xform.name(), bytes.length);
response.write(bytes);
}
public IXmlObject findForm(String formName){
IXmlObject xform = null;
if(RString.isNotEmpty(formName)){
xform = _webformConsole.find(formName);
if(null == xform){
throw new FFatalError("Show form is null. (name={0})", formName);
}
}
return xform;
}
}<|fim▁end|> | datatable.addCell(new Phrase(formNode.children().get(i).innerGet("label"), fontChinese));
}
datatable.setHeaderRows(1);
// 结束表格的头部 |
<|file_name|>info_model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import QApplication
from ..info_model import TableInfo, VectorTableInfo, RasterTableInfo
from ..html_elems import HtmlSection, HtmlParagraph, HtmlTable, HtmlTableHeader, HtmlTableCol
class PGTableInfo(TableInfo):
def __init__(self, table):
self.table = table
def generalInfo(self):
ret = []
# if the estimation is less than 100 rows, try to count them - it shouldn't take long time
if self.table.rowCount is None and self.table.estimatedRowCount < 100:
# row count information is not displayed yet, so just block
# table signals to avoid double refreshing (infoViewer->refreshRowCount->tableChanged->infoViewer)
self.table.blockSignals(True)
self.table.refreshRowCount()
self.table.blockSignals(False)
tbl = [
(QApplication.translate("DBManagerPlugin", "Relation type:"),
QApplication.translate("DBManagerPlugin", "View") if self.table.isView else QApplication.translate(
"DBManagerPlugin", "Table")),
(QApplication.translate("DBManagerPlugin", "Owner:"), self.table.owner)
]
if self.table.comment:
tbl.append((QApplication.translate("DBManagerPlugin", "Comment:"), self.table.comment))
tbl.extend([
(QApplication.translate("DBManagerPlugin", "Pages:"), self.table.pages),
(QApplication.translate("DBManagerPlugin", "Rows (estimation):"), self.table.estimatedRowCount )
])
# privileges
# has the user access to this schema?
schema_priv = self.table.database().connector.getSchemaPrivileges(
self.table.schemaName()) if self.table.schema() else None
if schema_priv is None:
pass
elif not schema_priv[1]: # no usage privileges on the schema
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"),
QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have usage privileges for this schema!") ))
else:
table_priv = self.table.database().connector.getTablePrivileges((self.table.schemaName(), self.table.name))
privileges = []
if table_priv[0]:
privileges.append("select")
if self.table.rowCount is not None or self.table.rowCount >= 0:
tbl.append((QApplication.translate("DBManagerPlugin", "Rows (counted):"),
self.table.rowCount if self.table.rowCount is not None else QApplication.translate(
"DBManagerPlugin", 'Unknown (<a href="action:rows/count">find out</a>)')))
if table_priv[1]: privileges.append("insert")
if table_priv[2]: privileges.append("update")
if table_priv[3]: privileges.append("delete")
priv_string = u", ".join(privileges) if len(privileges) > 0 else QApplication.translate("DBManagerPlugin",
'<warning> This user has no privileges!')
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"), priv_string ))
ret.append(HtmlTable(tbl))
if schema_priv is not None and schema_priv[1]:
if table_priv[0] and not table_priv[1] and not table_priv[2] and not table_priv[3]:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> This user has read-only privileges.")))
if not self.table.isView:
if self.table.rowCount is not None:
if abs(self.table.estimatedRowCount - self.table.rowCount) > 1 and \
(self.table.estimatedRowCount > 2 * self.table.rowCount or
self.table.rowCount > 2 * self.table.estimatedRowCount):
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> There's a significant difference between estimated and real row count. "
'Consider running <a href="action:vacuumanalyze/run">VACUUM ANALYZE</a>.')))
# primary key defined?
if not self.table.isView:
if len(filter(lambda fld: fld.primaryKey, self.table.fields())) <= 0:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> No primary key defined for this table!")))
return ret
def getSpatialInfo(self):
ret = []
info = self.db.connector.getSpatialInfo()
if info is None:
return
tbl = [
(QApplication.translate("DBManagerPlugin", "Library:"), info[0]),
(QApplication.translate("DBManagerPlugin", "Scripts:"), info[3]),
("GEOS:", info[1]),
("Proj:", info[2])
]
ret.append(HtmlTable(tbl))
if info[1] is not None and info[1] != info[2]:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> Version of installed scripts doesn't match version of released scripts!\n"
"This is probably a result of incorrect PostGIS upgrade.")))
if not self.db.connector.has_geometry_columns:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> geometry_columns table doesn't exist!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
elif not self.db.connector.has_geometry_columns_access:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have privileges to read contents of geometry_columns table!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
return ret
def fieldsDetails(self):
tbl = []
# define the table header
header = (
"#", QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Type"),
QApplication.translate("DBManagerPlugin", "Length"), QApplication.translate("DBManagerPlugin", "Null"),
QApplication.translate("DBManagerPlugin", "Default") )
tbl.append(HtmlTableHeader(header))
# add table contents
for fld in self.table.fields():
char_max_len = fld.charMaxLen if fld.charMaxLen is not None and fld.charMaxLen != -1 else ""
is_null_txt = "N" if fld.notNull else "Y"
# make primary key field underlined
attrs = {"class": "underline"} if fld.primaryKey else None
name = HtmlTableCol(fld.name, attrs)
tbl.append((fld.num, name, fld.type2String(), char_max_len, is_null_txt, fld.default2String()))
return HtmlTable(tbl, {"class": "header"})
def triggersDetails(self):
if self.table.triggers() is None or len(self.table.triggers()) <= 0:
return None
ret = []
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Function"),
QApplication.translate("DBManagerPlugin", "Type"), QApplication.translate("DBManagerPlugin", "Enabled") )
tbl.append(HtmlTableHeader(header))
# add table contents
for trig in self.table.triggers():
name = u'%(name)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {"name": trig.name,
"action": "delete"}
(enabled, action) = (QApplication.translate("DBManagerPlugin", "Yes"), "disable") if trig.enabled else (
QApplication.translate("DBManagerPlugin", "No"), "enable")
txt_enabled = u'%(enabled)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {
"name": trig.name, "action": action, "enabled": enabled}
tbl.append((name, trig.function, trig.type2String(), txt_enabled))
ret.append(HtmlTable(tbl, {"class": "header"}))
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
'<a href="action:triggers/enable">Enable all triggers</a> / <a href="action:triggers/disable">Disable all triggers</a>')))
return ret
def rulesDetails(self):
if self.table.rules() is None or len(self.table.rules()) <= 0:
return None
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Definition") )
tbl.append(HtmlTableHeader(header))
# add table contents
for rule in self.table.rules():
name = u'%(name)s (<a href="action:rule/%(name)s/%(action)s">%(action)s</a>)' % {"name": rule.name,
"action": "delete"}
tbl.append((name, rule.definition))
return HtmlTable(tbl, {"class": "header"})
def getTableInfo(self):
ret = TableInfo.getTableInfo(self)
# rules
rules_details = self.rulesDetails()
if rules_details is None:
pass
else:
ret.append(HtmlSection(QApplication.translate("DBManagerPlugin", 'Rules'), rules_details))
return ret
class PGVectorTableInfo(PGTableInfo, VectorTableInfo):
def __init__(self, table):
VectorTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return VectorTableInfo.spatialInfo(self)
class PGRasterTableInfo(PGTableInfo, RasterTableInfo):
def __init__(self, table):
RasterTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return RasterTableInfo.spatialInfo(self)<|fim▁end|> | """
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS |
<|file_name|>apm_process_paraview_data.py<|end_file_name|><|fim▁begin|>r"""
Description: Generates 2-D data maps from OpenFoam data saved by paraview
as a CSV file. The data has to be saved as point data and the following fields
are expected p, points:0->2, u:0->2. An aperture map is the second main input
and is used to generate the interpolation coordinates as well as convert
the flow velocities into volumetic flow rates. This script assumes the OpenFoam
simulation was performed on a geometry symmetric about the X-Z plane.
For usage information run: ``apm_process_paraview_data -h``
| Written By: Matthew stadelman
| Date Written: 2016/09/29
| Last Modfied: 2017/04/23
|
"""
import argparse
from argparse import RawDescriptionHelpFormatter as RawDesc
import os
import scipy as sp
from scipy.interpolate import griddata
from apmapflow import _get_logger, set_main_logger_level, DataField
# setting up logger
set_main_logger_level('info')<|fim▁hole|>voxel_size = None
base_name = None
# creating arg parser
parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawDesc)
# adding arguments
parser.add_argument('-v', '--verbose', action='store_true',
help='debug messages are printed to the screen')
parser.add_argument('-o', '--output-dir',
type=os.path.realpath, default=os.getcwd(),
help='''outputs file to the specified
directory, sub-directories are created as needed''')
parser.add_argument('--rho', type=float, default=1000,
help='fluid density for kinematic pressure conversion')
parser.add_argument('data_file', type=os.path.realpath,
help='paraview CSV data file')
parser.add_argument('map_file', type=os.path.realpath,
help='matching aperture map used for OpenFoam simulation')
parser.add_argument('voxel_size', type=float,
help='voxel to meter conversion factor of aperture map')
parser.add_argument('avg_fact', type=float,
help='''horizontal averaging factor of aperture map''')
parser.add_argument('base_name', nargs='?', default=None,
help='''base name to save fields as, i.e. base_name + "-p-map.txt",
defaults to the name of the CSV file''')
def main():
r"""
Processes commandline args and runs script
"""
global avg_fact, voxel_size, base_name
#
args = parser.parse_args()
if args.verbose:
set_main_logger_level('debug')
#
# these will be command-line args
para_infile = args.data_file
aper_infile = args.map_file
avg_fact = args.avg_fact
voxel_size = args.voxel_size
#
base_name = args.base_name
if base_name is None:
base_name = os.path.basename(para_infile).split('.')[0]
base_name = os.path.join(args.output_dir, base_name)
#
aper_map, data_dict = read_data_files(para_infile, aper_infile)
map_coords, data_coords = generate_coordinate_arrays(aper_map, data_dict)
save_data_maps(map_coords, data_coords, aper_map, data_dict, args.rho)
def read_data_files(para_file, map_file):
r"""
Reads in the paraview data file and aperture map file.
"""
#
# reading aperture map
logger.info('reading aperture map...')
aper_map = DataField(map_file)
#
# reading first line of paraview file to get column names
logger.info('reading paraview data file')
with open(para_file, 'r') as file:
cols = file.readline()
cols = cols.strip().replace('"', '').lower()
cols = cols.split(',')
#
# reading entire dataset and splitting into column vectors
data = sp.loadtxt(para_file, delimiter=',', dtype=float, skiprows=1)
data_dict = {}
for i, col in enumerate(cols):
data_dict[col] = data[:, i]
#
return aper_map, data_dict
def generate_coordinate_arrays(aper_map, para_data_dict):
r"""
Generates the coordinate arrays to use in data interpolation for coverting
paraview point data into a 2-D data map.
"""
#
# generating XYZ coordinates from map to interpolate to
logger.info('calculating aperture map cell center coordinates...')
temp = sp.arange(aper_map.data_map.size, dtype=int)
temp = sp.unravel_index(temp, aper_map.data_map.shape[::-1])
map_coords = sp.zeros((aper_map.data_map.size, 3), dtype=float)
#
# half voxel added to make map points be cell centers
map_coords[:, 0] = temp[0] * avg_fact * voxel_size + voxel_size/2.0
map_coords[:, 2] = temp[1] * avg_fact * voxel_size + voxel_size/2.0
#
# pulling XYZ coordinates from the data file
logger.info('processing data file data for coordinates...')
data_coords = sp.zeros((para_data_dict['points:0'].shape[0], 3))
data_coords[:, 0] = para_data_dict['points:0']
data_coords[:, 1] = para_data_dict['points:1']
data_coords[:, 2] = para_data_dict['points:2']
#
return map_coords, data_coords
def save_data_maps(map_coords, data_coords, aper_map, data_dict, density):
r"""
Converts the raw paraview point data into a 2-D data distribution and
saves the file by appending to the base_name.
"""
#
# generating p field
logger.info('generating and saving pressure field...')
field = data_dict['p'] * density # openFoam outputs kinematic pressure
field = griddata(data_coords, field, map_coords, method='nearest')
field = sp.reshape(field, aper_map.data_map.shape[::-1])
sp.savetxt(base_name+'-p-map.txt', field.T, delimiter='\t')
#
# generating Ux -> Qx field
logger.info('generating and saving Qx field...')
field = data_dict['u:0']
field = griddata(data_coords, field, map_coords, method='nearest')
field = sp.reshape(field, aper_map.data_map.shape[::-1])
field = field * aper_map.data_map.T * voxel_size**2
sp.savetxt(base_name+'-qx-map.txt', field.T, delimiter='\t')
#
# generating Uz -> Qz field
logger.info('generating and saving Qz field...')
field = data_dict['u:2']
field = griddata(data_coords, field, map_coords, method='nearest')
field = sp.reshape(field, aper_map.data_map.shape[::-1])
field = field * aper_map.data_map.T * voxel_size**2
sp.savetxt(base_name+'-qz-map.txt', field.T, delimiter='\t')
#
# generating Um -> Qm field
logger.info('generating and saving Q magnitude field...')
field = sp.sqrt(data_dict['u:0'] ** 2 + data_dict['u:2'] ** 2)
field = griddata(data_coords, field, map_coords, method='nearest')
field = sp.reshape(field, aper_map.data_map.shape[::-1])
field = field * aper_map.data_map.T * voxel_size**2
sp.savetxt(base_name+'-qm-map.txt', field.T, delimiter='\t')<|fim▁end|> | logger = _get_logger('apmapflow.scripts')
# setting a few convenience globals
avg_fact = None |
<|file_name|>treemap.src.js<|end_file_name|><|fim▁begin|>/**
* @license Highcharts JS v7.1.1 (2019-04-09)
*
* (c) 2014-2019 Highsoft AS
* Authors: Jon Arild Nygard / Oystein Moseng
*
* License: www.highcharts.com/license
*/
'use strict';
(function (factory) {
if (typeof module === 'object' && module.exports) {
factory['default'] = factory;
module.exports = factory;
} else if (typeof define === 'function' && define.amd) {
define('highcharts/modules/treemap', ['highcharts'], function (Highcharts) {
factory(Highcharts);
factory.Highcharts = Highcharts;
return factory;
});
} else {
factory(typeof Highcharts !== 'undefined' ? Highcharts : undefined);
}
}(function (Highcharts) {
var _modules = Highcharts ? Highcharts._modules : {};
function _registerModule(obj, path, args, fn) {
if (!obj.hasOwnProperty(path)) {
obj[path] = fn.apply(null, args);
}
}
_registerModule(_modules, 'mixins/tree-series.js', [_modules['parts/Globals.js']], function (H) {
var extend = H.extend,
isArray = H.isArray,
isBoolean = function (x) {
return typeof x === 'boolean';
},
isFn = function (x) {
return typeof x === 'function';
},
isObject = H.isObject,
isNumber = H.isNumber,
merge = H.merge,
pick = H.pick;
// TODO Combine buildTree and buildNode with setTreeValues
// TODO Remove logic from Treemap and make it utilize this mixin.
var setTreeValues = function setTreeValues(tree, options) {
var before = options.before,
idRoot = options.idRoot,
mapIdToNode = options.mapIdToNode,
nodeRoot = mapIdToNode[idRoot],
levelIsConstant = (
isBoolean(options.levelIsConstant) ?
options.levelIsConstant :
true
),
points = options.points,
point = points[tree.i],
optionsPoint = point && point.options || {},
childrenTotal = 0,
children = [],
value;
extend(tree, {
levelDynamic: tree.level - (levelIsConstant ? 0 : nodeRoot.level),
name: pick(point && point.name, ''),
visible: (
idRoot === tree.id ||
(isBoolean(options.visible) ? options.visible : false)
)
});
if (isFn(before)) {
tree = before(tree, options);
}
// First give the children some values
tree.children.forEach(function (child, i) {
var newOptions = extend({}, options);
extend(newOptions, {
index: i,
siblings: tree.children.length,
visible: tree.visible
});
child = setTreeValues(child, newOptions);
children.push(child);
if (child.visible) {
childrenTotal += child.val;
}
});
tree.visible = childrenTotal > 0 || tree.visible;
// Set the values
value = pick(optionsPoint.value, childrenTotal);
extend(tree, {
children: children,
childrenTotal: childrenTotal,
isLeaf: tree.visible && !childrenTotal,
val: value
});
return tree;
};
var getColor = function getColor(node, options) {
var index = options.index,
mapOptionsToLevel = options.mapOptionsToLevel,
parentColor = options.parentColor,
parentColorIndex = options.parentColorIndex,
series = options.series,
colors = options.colors,
siblings = options.siblings,
points = series.points,
getColorByPoint,
chartOptionsChart = series.chart.options.chart,
point,
level,
colorByPoint,
colorIndexByPoint,
color,
colorIndex;
function variation(color) {
var colorVariation = level && level.colorVariation;
if (colorVariation) {
if (colorVariation.key === 'brightness') {
return H.color(color).brighten(
colorVariation.to * (index / siblings)
).get();
}
}
return color;
}
if (node) {
point = points[node.i];
level = mapOptionsToLevel[node.level] || {};
getColorByPoint = point && level.colorByPoint;
if (getColorByPoint) {
colorIndexByPoint = point.index % (colors ?
colors.length :
chartOptionsChart.colorCount
);
colorByPoint = colors && colors[colorIndexByPoint];
}
// Select either point color, level color or inherited color.
if (!series.chart.styledMode) {
color = pick(
point && point.options.color,
level && level.color,
colorByPoint,
parentColor && variation(parentColor),
series.color
);
}
colorIndex = pick(
point && point.options.colorIndex,
level && level.colorIndex,
colorIndexByPoint,
parentColorIndex,
options.colorIndex
);
}
return {
color: color,
colorIndex: colorIndex
};
};
/**
* Creates a map from level number to its given options.
*
* @private
* @function getLevelOptions
*
* @param {object} params
* Object containing parameters.
* - `defaults` Object containing default options. The default options
* are merged with the userOptions to get the final options for a
* specific level.
* - `from` The lowest level number.
* - `levels` User options from series.levels.
* - `to` The highest level number.
*
* @return {Highcharts.Dictionary<object>}
* Returns a map from level number to its given options.
*/
var getLevelOptions = function getLevelOptions(params) {
var result = null,
defaults,
converted,
i,
from,
to,
levels;
if (isObject(params)) {
result = {};
from = isNumber(params.from) ? params.from : 1;
levels = params.levels;
converted = {};
defaults = isObject(params.defaults) ? params.defaults : {};
if (isArray(levels)) {
converted = levels.reduce(function (obj, item) {
var level,
levelIsConstant,
options;
if (isObject(item) && isNumber(item.level)) {
options = merge({}, item);
levelIsConstant = (
isBoolean(options.levelIsConstant) ?
options.levelIsConstant :
defaults.levelIsConstant
);
// Delete redundant properties.
delete options.levelIsConstant;
delete options.level;
// Calculate which level these options apply to.
level = item.level + (levelIsConstant ? 0 : from - 1);
if (isObject(obj[level])) {
extend(obj[level], options);
} else {
obj[level] = options;
}
}
return obj;
}, {});
}
to = isNumber(params.to) ? params.to : 1;
for (i = 0; i <= to; i++) {
result[i] = merge(
{},
defaults,
isObject(converted[i]) ? converted[i] : {}
);
}
}
return result;
};
/**
* Update the rootId property on the series. Also makes sure that it is
* accessible to exporting.
*
* @private
* @function updateRootId
*
* @param {object} series
* The series to operate on.
*
* @return {string}
* Returns the resulting rootId after update.
*/
var updateRootId = function (series) {
var rootId,
options;
if (isObject(series)) {
// Get the series options.
options = isObject(series.options) ? series.options : {};
// Calculate the rootId.
rootId = pick(series.rootNode, options.rootId, '');
// Set rootId on series.userOptions to pick it up in exporting.
if (isObject(series.userOptions)) {
series.userOptions.rootId = rootId;
}
// Set rootId on series to pick it up on next update.
series.rootNode = rootId;
}
return rootId;
};
var result = {
getColor: getColor,
getLevelOptions: getLevelOptions,
setTreeValues: setTreeValues,
updateRootId: updateRootId
};
return result;
});
_registerModule(_modules, 'mixins/draw-point.js', [], function () {
var isFn = function (x) {
return typeof x === 'function';
};
/**
* Handles the drawing of a component.
* Can be used for any type of component that reserves the graphic property, and
* provides a shouldDraw on its context.
*
* @private
* @function draw
*
* @param {object} params
* Parameters.
*
* TODO: add type checking.
* TODO: export this function to enable usage
*/
var draw = function draw(params) {
var component = this,
graphic = component.graphic,
animatableAttribs = params.animatableAttribs,
onComplete = params.onComplete,
css = params.css,
renderer = params.renderer;
if (component.shouldDraw()) {
if (!graphic) {
component.graphic = graphic =
renderer[params.shapeType](params.shapeArgs).add(params.group);
}
graphic
.css(css)
.attr(params.attribs)
.animate(
animatableAttribs,
params.isNew ? false : undefined,
onComplete
);
} else if (graphic) {
var destroy = function () {
component.graphic = graphic = graphic.destroy();
if (isFn(onComplete)) {
onComplete();
}
};
// animate only runs complete callback if something was animated.
if (Object.keys(animatableAttribs).length) {
graphic.animate(animatableAttribs, undefined, function () {
destroy();
});
} else {
destroy();
}
}
};
/**
* An extended version of draw customized for points.
* It calls additional methods that is expected when rendering a point.
*
* @param {object} params Parameters
*/
var drawPoint = function drawPoint(params) {
var point = this,
attribs = params.attribs = params.attribs || {};
// Assigning class in dot notation does go well in IE8
// eslint-disable-next-line dot-notation
attribs['class'] = point.getClassName();
// Call draw to render component
draw.call(point, params);
};
return drawPoint;
});
_registerModule(_modules, 'modules/treemap.src.js', [_modules['parts/Globals.js'], _modules['mixins/tree-series.js'], _modules['mixins/draw-point.js']], function (H, mixinTreeSeries, drawPoint) {
/* *
* (c) 2014-2019 Highsoft AS
*
* Authors: Jon Arild Nygard / Oystein Moseng
*
* License: www.highcharts.com/license
*/
var seriesType = H.seriesType,
seriesTypes = H.seriesTypes,
addEvent = H.addEvent,
merge = H.merge,
extend = H.extend,
error = H.error,
defined = H.defined,
noop = H.noop,
fireEvent = H.fireEvent,
getColor = mixinTreeSeries.getColor,
getLevelOptions = mixinTreeSeries.getLevelOptions,
isArray = H.isArray,
isBoolean = function (x) {
return typeof x === 'boolean';
},
isNumber = H.isNumber,
isObject = H.isObject,
isString = H.isString,
pick = H.pick,
Series = H.Series,
stableSort = H.stableSort,
color = H.Color,
eachObject = function (list, func, context) {
context = context || this;
H.objectEach(list, function (val, key) {
func.call(context, val, key, list);
});
},
// @todo find correct name for this function.
// @todo Similar to reduce, this function is likely redundant
recursive = function (item, func, context) {
var next;
context = context || this;
next = func.call(context, item);
if (next !== false) {
recursive(next, func, context);
}
},
updateRootId = mixinTreeSeries.updateRootId;
/**
* @private
* @class
* @name Highcharts.seriesTypes.treemap
*
* @augments Highcharts.Series
*/
seriesType(
'treemap',
'scatter'
/**
* A treemap displays hierarchical data using nested rectangles. The data
* can be laid out in varying ways depending on options.
*
* @sample highcharts/demo/treemap-large-dataset/
* Treemap
*
* @extends plotOptions.scatter
* @excluding marker, jitter
* @product highcharts
* @optionparent plotOptions.treemap
*/
, {
/**
* When enabled the user can click on a point which is a parent and
* zoom in on its children. Deprecated and replaced by
* [allowTraversingTree](#plotOptions.treemap.allowTraversingTree).
*
* @sample {highcharts} highcharts/plotoptions/treemap-allowdrilltonode/
* Enabled
*
* @deprecated
* @type {boolean}
* @default false
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.allowDrillToNode
*/
/**
* When enabled the user can click on a point which is a parent and
* zoom in on its children.
*
* @sample {highcharts} highcharts/plotoptions/treemap-allowtraversingtree/
* Enabled
*
* @since 7.0.3
* @product highcharts
*/
allowTraversingTree: false,
animationLimit: 250,
/**
* When the series contains less points than the crop threshold, all
* points are drawn, event if the points fall outside the visible plot
* area at the current zoom. The advantage of drawing all points
* (including markers and columns), is that animation is performed on
* updates. On the other hand, when the series contains more points than
* the crop threshold, the series data is cropped to only contain points
* that fall within the plot area. The advantage of cropping away
* invisible points is to increase performance on large series.
*
* @type {number}
* @default 300
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.cropThreshold
*/
/**
* Fires on a request for change of root node for the tree, before the
* update is made. An event object is passed to the function, containing
* additional properties `newRootId`, `previousRootId`, `redraw` and
* `trigger`.
*
* @type {function}
* @default undefined
* @sample {highcharts} highcharts/plotoptions/treemap-events-setrootnode/
* Alert update information on setRootNode event.
* @since 7.0.3
* @product highcharts
* @apioption plotOptions.treemap.events.setRootNode
*/
/**
* This option decides if the user can interact with the parent nodes
* or just the leaf nodes. When this option is undefined, it will be
* true by default. However when allowTraversingTree is true, then it
* will be false by default.
*
* @sample {highcharts} highcharts/plotoptions/treemap-interactbyleaf-false/
* False
* @sample {highcharts} highcharts/plotoptions/treemap-interactbyleaf-true-and-allowtraversingtree/
* InteractByLeaf and allowTraversingTree is true
*
* @type {boolean}
* @since 4.1.2
* @product highcharts
* @apioption plotOptions.treemap.interactByLeaf
*/
/**
* The sort index of the point inside the treemap level.
*
* @sample {highcharts} highcharts/plotoptions/treemap-sortindex/
* Sort by years
*
* @type {number}
* @since 4.1.10
* @product highcharts
* @apioption plotOptions.treemap.sortIndex
*/
/**
* When using automatic point colors pulled from the `options.colors`
* collection, this option determines whether the chart should receive
* one color per series or one color per point.
*
* @see [series colors](#plotOptions.treemap.colors)
*
* @type {boolean}
* @default false
* @since 2.0
* @product highcharts
* @apioption plotOptions.treemap.colorByPoint
*/
/**
* A series specific or series type specific color set to apply instead
* of the global [colors](#colors) when
* [colorByPoint](#plotOptions.treemap.colorByPoint) is true.
*
* @type {Array<Highcharts.ColorString|Highcharts.GradientColorObject|Highcharts.PatternObject>}
* @since 3.0
* @product highcharts
* @apioption plotOptions.treemap.colors
*/
/**
* Whether to display this series type or specific series item in the
* legend.
*/
showInLegend: false,
/**
* @ignore-option
*/
marker: false,
colorByPoint: false,
/**
* @since 4.1.0
*/
dataLabels: {
/** @ignore-option */
defer: false,
/** @ignore-option */
enabled: true,
/** @ignore-option */
formatter: function () {
var point = this && this.point ? this.point : {},
name = isString(point.name) ? point.name : '';
return name;
},
/** @ignore-option */
inside: true,
/** @ignore-option */
verticalAlign: 'middle'
},
tooltip: {
headerFormat: '',
pointFormat: '<b>{point.name}</b>: {point.value}<br/>'
},
/**
* Whether to ignore hidden points when the layout algorithm runs.
* If `false`, hidden points will leave open spaces.
*
* @since 5.0.8
*/
ignoreHiddenPoint: true,
/**
* This option decides which algorithm is used for setting position
* and dimensions of the points.
*
* @see [How to write your own algorithm](https://www.highcharts.com/docs/chart-and-series-types/treemap)
*
* @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-sliceanddice/
* SliceAndDice by default
* @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-stripes/
* Stripes
* @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-squarified/
* Squarified
* @sample {highcharts} highcharts/plotoptions/treemap-layoutalgorithm-strip/
* Strip
*
* @since 4.1.0
* @validvalue ["sliceAndDice", "stripes", "squarified", "strip"]
*/
layoutAlgorithm: 'sliceAndDice',
/**
* Defines which direction the layout algorithm will start drawing.
*
* @since 4.1.0
* @validvalue ["vertical", "horizontal"]
*/
layoutStartingDirection: 'vertical',
/**
* Enabling this option will make the treemap alternate the drawing
* direction between vertical and horizontal. The next levels starting
* direction will always be the opposite of the previous.
*
* @sample {highcharts} highcharts/plotoptions/treemap-alternatestartingdirection-true/
* Enabled
*
* @since 4.1.0
*/
alternateStartingDirection: false,
/**
* Used together with the levels and allowTraversingTree options. When
* set to false the first level visible to be level one, which is
* dynamic when traversing the tree. Otherwise the level will be the
* same as the tree structure.
*
* @since 4.1.0
*/
levelIsConstant: true,
/**
* Options for the button appearing when drilling down in a treemap.
* Deprecated and replaced by
* [traverseUpButton](#plotOptions.treemap.traverseUpButton).
*
* @deprecated
*/
drillUpButton: {
/**
* The position of the button.
*
* @deprecated
*/
position: {
/**
* Vertical alignment of the button.
*
* @deprecated
* @type {Highcharts.VerticalAlignValue}
* @default top
* @product highcharts
* @apioption plotOptions.treemap.drillUpButton.position.verticalAlign
*/
/**
* Horizontal alignment of the button.
*
* @deprecated
* @type {Highcharts.AlignValue}
*/
align: 'right',
/**
* Horizontal offset of the button.
*
* @deprecated
*/
x: -10,
/**
* Vertical offset of the button.
*
* @deprecated
*/
y: 10
}
},
/**
* Options for the button appearing when traversing down in a treemap.
*/
traverseUpButton: {
/**
* The position of the button.
*/
position: {
/**
* Vertical alignment of the button.
*
* @type {Highcharts.VerticalAlignValue}
* @default top
* @product highcharts
* @apioption plotOptions.treemap.traverseUpButton.position.verticalAlign
*/
/**
* Horizontal alignment of the button.
*
* @type {Highcharts.AlignValue}
*/
align: 'right',
/**
* Horizontal offset of the button.
*/
x: -10,
/**
* Vertical offset of the button.
*/
y: 10
}
},
/**
* Set options on specific levels. Takes precedence over series options,
* but not point options.
*
* @sample {highcharts} highcharts/plotoptions/treemap-levels/
* Styling dataLabels and borders
* @sample {highcharts} highcharts/demo/treemap-with-levels/
* Different layoutAlgorithm
*
* @type {Array<*>}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels
*/
/**
* Can set a `borderColor` on all points which lies on the same level.
*
* @type {Highcharts.ColorString}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.borderColor
*/
/**
* Set the dash style of the border of all the point which lies on the
* level. See <a href"#plotoptions.scatter.dashstyle">
* plotOptions.scatter.dashStyle</a> for possible options.
*
* @type {string}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.borderDashStyle
*/
/**
* Can set the borderWidth on all points which lies on the same level.
*
* @type {number}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.borderWidth
*/
/**
* Can set a color on all points which lies on the same level.
*
* @type {Highcharts.ColorString|Highcharts.GradientColorObject|Highcharts.PatternObject}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.color
*/
/**
* A configuration object to define how the color of a child varies from
* the parent's color. The variation is distributed among the children
* of node. For example when setting brightness, the brightness change
* will range from the parent's original brightness on the first child,
* to the amount set in the `to` setting on the last node. This allows a
* gradient-like color scheme that sets children out from each other
* while highlighting the grouping on treemaps and sectors on sunburst
* charts.
*
* @sample highcharts/demo/sunburst/
* Sunburst with color variation
*
* @since 6.0.0
* @product highcharts
* @apioption plotOptions.treemap.levels.colorVariation
*/
/**
* The key of a color variation. Currently supports `brightness` only.
*
* @type {string}
* @since 6.0.0
* @product highcharts
* @validvalue ["brightness"]
* @apioption plotOptions.treemap.levels.colorVariation.key
*/
/**
* The ending value of a color variation. The last sibling will receive
* this value.
*
* @type {number}
* @since 6.0.0
* @product highcharts
* @apioption plotOptions.treemap.levels.colorVariation.to
*/
/**
* Can set the options of dataLabels on each point which lies on the
* level.
* [plotOptions.treemap.dataLabels](#plotOptions.treemap.dataLabels) for
* possible values.
*
* @type {object}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.dataLabels
*/
/**
* Can set the layoutAlgorithm option on a specific level.
*
* @type {string}
* @since 4.1.0
* @product highcharts
* @validvalue ["sliceAndDice", "stripes", "squarified", "strip"]
* @apioption plotOptions.treemap.levels.layoutAlgorithm
*/
/**
* Can set the layoutStartingDirection option on a specific level.
*
* @type {string}
* @since 4.1.0
* @product highcharts
* @validvalue ["vertical", "horizontal"]
* @apioption plotOptions.treemap.levels.layoutStartingDirection
*/
<|fim▁hole|> * object.
*
* @sample {highcharts} highcharts/plotoptions/treemap-levels/
* Styling of both levels
*
* @type {number}
* @since 4.1.0
* @product highcharts
* @apioption plotOptions.treemap.levels.level
*/
// Presentational options
/**
* The color of the border surrounding each tree map item.
*
* @type {Highcharts.ColorString}
*/
borderColor: '#e6e6e6',
/**
* The width of the border surrounding each tree map item.
*/
borderWidth: 1,
/**
* The opacity of a point in treemap. When a point has children, the
* visibility of the children is determined by the opacity.
*
* @since 4.2.4
*/
opacity: 0.15,
/**
* A wrapper object for all the series options in specific states.
*
* @extends plotOptions.heatmap.states
*/
states: {
/**
* Options for the hovered series
*
* @extends plotOptions.heatmap.states.hover
* @excluding halo
*/
hover: {
/**
* The border color for the hovered state.
*/
borderColor: '#999999',
/**
* Brightness for the hovered point. Defaults to 0 if the
* heatmap series is loaded first, otherwise 0.1.
*
* @type {number}
* @default undefined
*/
brightness: seriesTypes.heatmap ? 0 : 0.1,
/**
* @extends plotOptions.heatmap.states.hover.halo
*/
halo: false,
/**
* The opacity of a point in treemap. When a point has children,
* the visibility of the children is determined by the opacity.
*
* @since 4.2.4
*/
opacity: 0.75,
/**
* The shadow option for hovered state.
*/
shadow: false
}
}
// Prototype members
}, {
pointArrayMap: ['value'],
directTouch: true,
optionalAxis: 'colorAxis',
getSymbol: noop,
parallelArrays: ['x', 'y', 'value', 'colorValue'],
colorKey: 'colorValue', // Point color option key
trackerGroups: ['group', 'dataLabelsGroup'],
/**
* Creates an object map from parent id to childrens index.
*
* @private
* @function Highcharts.Series#getListOfParents
*
* @param {Highcharts.SeriesTreemapDataOptions} data
* List of points set in options.
*
* @param {Array<string>} existingIds
* List of all point ids.
*
* @return {object}
* Map from parent id to children index in data.
*/
getListOfParents: function (data, existingIds) {
var arr = isArray(data) ? data : [],
ids = isArray(existingIds) ? existingIds : [],
listOfParents = arr.reduce(function (prev, curr, i) {
var parent = pick(curr.parent, '');
if (prev[parent] === undefined) {
prev[parent] = [];
}
prev[parent].push(i);
return prev;
}, {
'': [] // Root of tree
});
// If parent does not exist, hoist parent to root of tree.
eachObject(listOfParents, function (children, parent, list) {
if ((parent !== '') && (ids.indexOf(parent) === -1)) {
children.forEach(function (child) {
list[''].push(child);
});
delete list[parent];
}
});
return listOfParents;
},
// Creates a tree structured object from the series points
getTree: function () {
var series = this,
allIds = this.data.map(function (d) {
return d.id;
}),
parentList = series.getListOfParents(this.data, allIds);
series.nodeMap = [];
return series.buildNode('', -1, 0, parentList, null);
},
// Define hasData function for non-cartesian series.
// Returns true if the series has points at all.
hasData: function () {
return !!this.processedXData.length; // != 0
},
init: function (chart, options) {
var series = this,
colorSeriesMixin = H.colorSeriesMixin;
// If color series logic is loaded, add some properties
if (H.colorSeriesMixin) {
this.translateColors = colorSeriesMixin.translateColors;
this.colorAttribs = colorSeriesMixin.colorAttribs;
this.axisTypes = colorSeriesMixin.axisTypes;
}
// Handle deprecated options.
addEvent(series, 'setOptions', function (event) {
var options = event.userOptions;
if (
defined(options.allowDrillToNode) &&
!defined(options.allowTraversingTree)
) {
options.allowTraversingTree = options.allowDrillToNode;
delete options.allowDrillToNode;
}
if (
defined(options.drillUpButton) &&
!defined(options.traverseUpButton)
) {
options.traverseUpButton = options.drillUpButton;
delete options.drillUpButton;
}
});
Series.prototype.init.call(series, chart, options);
if (series.options.allowTraversingTree) {
addEvent(series, 'click', series.onClickDrillToNode);
}
},
buildNode: function (id, i, level, list, parent) {
var series = this,
children = [],
point = series.points[i],
height = 0,
node,
child;
// Actions
((list[id] || [])).forEach(function (i) {
child = series.buildNode(
series.points[i].id,
i,
(level + 1),
list,
id
);
height = Math.max(child.height + 1, height);
children.push(child);
});
node = {
id: id,
i: i,
children: children,
height: height,
level: level,
parent: parent,
visible: false // @todo move this to better location
};
series.nodeMap[node.id] = node;
if (point) {
point.node = node;
}
return node;
},
setTreeValues: function (tree) {
var series = this,
options = series.options,
idRoot = series.rootNode,
mapIdToNode = series.nodeMap,
nodeRoot = mapIdToNode[idRoot],
levelIsConstant = (
isBoolean(options.levelIsConstant) ?
options.levelIsConstant :
true
),
childrenTotal = 0,
children = [],
val,
point = series.points[tree.i];
// First give the children some values
tree.children.forEach(function (child) {
child = series.setTreeValues(child);
children.push(child);
if (!child.ignore) {
childrenTotal += child.val;
}
});
// Sort the children
stableSort(children, function (a, b) {
return a.sortIndex - b.sortIndex;
});
// Set the values
val = pick(point && point.options.value, childrenTotal);
if (point) {
point.value = val;
}
extend(tree, {
children: children,
childrenTotal: childrenTotal,
// Ignore this node if point is not visible
ignore: !(pick(point && point.visible, true) && (val > 0)),
isLeaf: tree.visible && !childrenTotal,
levelDynamic: (
tree.level - (levelIsConstant ? 0 : nodeRoot.level)
),
name: pick(point && point.name, ''),
sortIndex: pick(point && point.sortIndex, -val),
val: val
});
return tree;
},
/**
* Recursive function which calculates the area for all children of a
* node.
*
* @private
* @function Highcharts.Series#calculateChildrenAreas
*
* @param {object} node
* The node which is parent to the children.
*
* @param {object} area
* The rectangular area of the parent.
*/
calculateChildrenAreas: function (parent, area) {
var series = this,
options = series.options,
mapOptionsToLevel = series.mapOptionsToLevel,
level = mapOptionsToLevel[parent.level + 1],
algorithm = pick(
(
series[level &&
level.layoutAlgorithm] &&
level.layoutAlgorithm
),
options.layoutAlgorithm
),
alternate = options.alternateStartingDirection,
childrenValues = [],
children;
// Collect all children which should be included
children = parent.children.filter(function (n) {
return !n.ignore;
});
if (level && level.layoutStartingDirection) {
area.direction = level.layoutStartingDirection === 'vertical' ?
0 :
1;
}
childrenValues = series[algorithm](area, children);
children.forEach(function (child, index) {
var values = childrenValues[index];
child.values = merge(values, {
val: child.childrenTotal,
direction: (alternate ? 1 - area.direction : area.direction)
});
child.pointValues = merge(values, {
x: (values.x / series.axisRatio),
width: (values.width / series.axisRatio)
});
// If node has children, then call method recursively
if (child.children.length) {
series.calculateChildrenAreas(child, child.values);
}
});
},
setPointValues: function () {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis;
series.points.forEach(function (point) {
var node = point.node,
values = node.pointValues,
x1,
x2,
y1,
y2,
crispCorr = 0;
// Get the crisp correction in classic mode. For this to work in
// styled mode, we would need to first add the shape (without x,
// y, width and height), then read the rendered stroke width
// using point.graphic.strokeWidth(), then modify and apply the
// shapeArgs. This applies also to column series, but the
// downside is performance and code complexity.
if (!series.chart.styledMode) {
crispCorr = (
(series.pointAttribs(point)['stroke-width'] || 0) % 2
) / 2;
}
// Points which is ignored, have no values.
if (values && node.visible) {
x1 = Math.round(
xAxis.translate(values.x, 0, 0, 0, 1)
) - crispCorr;
x2 = Math.round(
xAxis.translate(values.x + values.width, 0, 0, 0, 1)
) - crispCorr;
y1 = Math.round(
yAxis.translate(values.y, 0, 0, 0, 1)
) - crispCorr;
y2 = Math.round(
yAxis.translate(values.y + values.height, 0, 0, 0, 1)
) - crispCorr;
// Set point values
point.shapeArgs = {
x: Math.min(x1, x2),
y: Math.min(y1, y2),
width: Math.abs(x2 - x1),
height: Math.abs(y2 - y1)
};
point.plotX =
point.shapeArgs.x + (point.shapeArgs.width / 2);
point.plotY =
point.shapeArgs.y + (point.shapeArgs.height / 2);
} else {
// Reset visibility
delete point.plotX;
delete point.plotY;
}
});
},
// Set the node's color recursively, from the parent down.
setColorRecursive: function (
node,
parentColor,
colorIndex,
index,
siblings
) {
var series = this,
chart = series && series.chart,
colors = chart && chart.options && chart.options.colors,
colorInfo,
point;
if (node) {
colorInfo = getColor(node, {
colors: colors,
index: index,
mapOptionsToLevel: series.mapOptionsToLevel,
parentColor: parentColor,
parentColorIndex: colorIndex,
series: series,
siblings: siblings
});
point = series.points[node.i];
if (point) {
point.color = colorInfo.color;
point.colorIndex = colorInfo.colorIndex;
}
// Do it all again with the children
(node.children || []).forEach(function (child, i) {
series.setColorRecursive(
child,
colorInfo.color,
colorInfo.colorIndex,
i,
node.children.length
);
});
}
},
algorithmGroup: function (h, w, d, p) {
this.height = h;
this.width = w;
this.plot = p;
this.direction = d;
this.startDirection = d;
this.total = 0;
this.nW = 0;
this.lW = 0;
this.nH = 0;
this.lH = 0;
this.elArr = [];
this.lP = {
total: 0,
lH: 0,
nH: 0,
lW: 0,
nW: 0,
nR: 0,
lR: 0,
aspectRatio: function (w, h) {
return Math.max((w / h), (h / w));
}
};
this.addElement = function (el) {
this.lP.total = this.elArr[this.elArr.length - 1];
this.total = this.total + el;
if (this.direction === 0) {
// Calculate last point old aspect ratio
this.lW = this.nW;
this.lP.lH = this.lP.total / this.lW;
this.lP.lR = this.lP.aspectRatio(this.lW, this.lP.lH);
// Calculate last point new aspect ratio
this.nW = this.total / this.height;
this.lP.nH = this.lP.total / this.nW;
this.lP.nR = this.lP.aspectRatio(this.nW, this.lP.nH);
} else {
// Calculate last point old aspect ratio
this.lH = this.nH;
this.lP.lW = this.lP.total / this.lH;
this.lP.lR = this.lP.aspectRatio(this.lP.lW, this.lH);
// Calculate last point new aspect ratio
this.nH = this.total / this.width;
this.lP.nW = this.lP.total / this.nH;
this.lP.nR = this.lP.aspectRatio(this.lP.nW, this.nH);
}
this.elArr.push(el);
};
this.reset = function () {
this.nW = 0;
this.lW = 0;
this.elArr = [];
this.total = 0;
};
},
algorithmCalcPoints: function (
directionChange, last, group, childrenArea
) {
var pX,
pY,
pW,
pH,
gW = group.lW,
gH = group.lH,
plot = group.plot,
keep,
i = 0,
end = group.elArr.length - 1;
if (last) {
gW = group.nW;
gH = group.nH;
} else {
keep = group.elArr[group.elArr.length - 1];
}
group.elArr.forEach(function (p) {
if (last || (i < end)) {
if (group.direction === 0) {
pX = plot.x;
pY = plot.y;
pW = gW;
pH = p / pW;
} else {
pX = plot.x;
pY = plot.y;
pH = gH;
pW = p / pH;
}
childrenArea.push({
x: pX,
y: pY,
width: pW,
height: H.correctFloat(pH)
});
if (group.direction === 0) {
plot.y = plot.y + pH;
} else {
plot.x = plot.x + pW;
}
}
i = i + 1;
});
// Reset variables
group.reset();
if (group.direction === 0) {
group.width = group.width - gW;
} else {
group.height = group.height - gH;
}
plot.y = plot.parent.y + (plot.parent.height - group.height);
plot.x = plot.parent.x + (plot.parent.width - group.width);
if (directionChange) {
group.direction = 1 - group.direction;
}
// If not last, then add uncalculated element
if (!last) {
group.addElement(keep);
}
},
algorithmLowAspectRatio: function (directionChange, parent, children) {
var childrenArea = [],
series = this,
pTot,
plot = {
x: parent.x,
y: parent.y,
parent: parent
},
direction = parent.direction,
i = 0,
end = children.length - 1,
group = new this.algorithmGroup( // eslint-disable-line new-cap
parent.height,
parent.width,
direction,
plot
);
// Loop through and calculate all areas
children.forEach(function (child) {
pTot =
(parent.width * parent.height) * (child.val / parent.val);
group.addElement(pTot);
if (group.lP.nR > group.lP.lR) {
series.algorithmCalcPoints(
directionChange,
false,
group,
childrenArea,
plot
);
}
// If last child, then calculate all remaining areas
if (i === end) {
series.algorithmCalcPoints(
directionChange,
true,
group,
childrenArea,
plot
);
}
i = i + 1;
});
return childrenArea;
},
algorithmFill: function (directionChange, parent, children) {
var childrenArea = [],
pTot,
direction = parent.direction,
x = parent.x,
y = parent.y,
width = parent.width,
height = parent.height,
pX,
pY,
pW,
pH;
children.forEach(function (child) {
pTot =
(parent.width * parent.height) * (child.val / parent.val);
pX = x;
pY = y;
if (direction === 0) {
pH = height;
pW = pTot / pH;
width = width - pW;
x = x + pW;
} else {
pW = width;
pH = pTot / pW;
height = height - pH;
y = y + pH;
}
childrenArea.push({
x: pX,
y: pY,
width: pW,
height: pH
});
if (directionChange) {
direction = 1 - direction;
}
});
return childrenArea;
},
strip: function (parent, children) {
return this.algorithmLowAspectRatio(false, parent, children);
},
squarified: function (parent, children) {
return this.algorithmLowAspectRatio(true, parent, children);
},
sliceAndDice: function (parent, children) {
return this.algorithmFill(true, parent, children);
},
stripes: function (parent, children) {
return this.algorithmFill(false, parent, children);
},
translate: function () {
var series = this,
options = series.options,
// NOTE: updateRootId modifies series.
rootId = updateRootId(series),
rootNode,
pointValues,
seriesArea,
tree,
val;
// Call prototype function
Series.prototype.translate.call(series);
// @todo Only if series.isDirtyData is true
tree = series.tree = series.getTree();
rootNode = series.nodeMap[rootId];
series.renderTraverseUpButton(rootId);
series.mapOptionsToLevel = getLevelOptions({
from: rootNode.level + 1,
levels: options.levels,
to: tree.height,
defaults: {
levelIsConstant: series.options.levelIsConstant,
colorByPoint: options.colorByPoint
}
});
if (
rootId !== '' &&
(!rootNode || !rootNode.children.length)
) {
series.setRootNode('', false);
rootId = series.rootNode;
rootNode = series.nodeMap[rootId];
}
// Parents of the root node is by default visible
recursive(series.nodeMap[series.rootNode], function (node) {
var next = false,
p = node.parent;
node.visible = true;
if (p || p === '') {
next = series.nodeMap[p];
}
return next;
});
// Children of the root node is by default visible
recursive(
series.nodeMap[series.rootNode].children,
function (children) {
var next = false;
children.forEach(function (child) {
child.visible = true;
if (child.children.length) {
next = (next || []).concat(child.children);
}
});
return next;
}
);
series.setTreeValues(tree);
// Calculate plotting values.
series.axisRatio = (series.xAxis.len / series.yAxis.len);
series.nodeMap[''].pointValues = pointValues =
{ x: 0, y: 0, width: 100, height: 100 };
series.nodeMap[''].values = seriesArea = merge(pointValues, {
width: (pointValues.width * series.axisRatio),
direction: (
options.layoutStartingDirection === 'vertical' ? 0 : 1
),
val: tree.val
});
series.calculateChildrenAreas(tree, seriesArea);
// Logic for point colors
if (series.colorAxis) {
series.translateColors();
} else if (!options.colorByPoint) {
series.setColorRecursive(series.tree);
}
// Update axis extremes according to the root node.
if (options.allowTraversingTree) {
val = rootNode.pointValues;
series.xAxis.setExtremes(val.x, val.x + val.width, false);
series.yAxis.setExtremes(val.y, val.y + val.height, false);
series.xAxis.setScale();
series.yAxis.setScale();
}
// Assign values to points.
series.setPointValues();
},
/**
* Extend drawDataLabels with logic to handle custom options related to
* the treemap series:
*
* - Points which is not a leaf node, has dataLabels disabled by
* default.
*
* - Options set on series.levels is merged in.
*
* - Width of the dataLabel is set to match the width of the point
* shape.
*
* @private
* @function Highcharts.Series#drawDataLabels
*/
drawDataLabels: function () {
var series = this,
mapOptionsToLevel = series.mapOptionsToLevel,
points = series.points.filter(function (n) {
return n.node.visible;
}),
options,
level;
points.forEach(function (point) {
level = mapOptionsToLevel[point.node.level];
// Set options to new object to avoid problems with scope
options = { style: {} };
// If not a leaf, then label should be disabled as default
if (!point.node.isLeaf) {
options.enabled = false;
}
// If options for level exists, include them as well
if (level && level.dataLabels) {
options = merge(options, level.dataLabels);
series._hasPointLabels = true;
}
// Set dataLabel width to the width of the point shape.
if (point.shapeArgs) {
options.style.width = point.shapeArgs.width;
if (point.dataLabel) {
point.dataLabel.css({
width: point.shapeArgs.width + 'px'
});
}
}
// Merge custom options with point options
point.dlOptions = merge(options, point.options.dataLabels);
});
Series.prototype.drawDataLabels.call(this);
},
// Over the alignment method by setting z index
alignDataLabel: function (point, dataLabel, labelOptions) {
var style = labelOptions.style;
// #8160: Prevent the label from exceeding the point's
// boundaries in treemaps by applying ellipsis overflow.
// The issue was happening when datalabel's text contained a
// long sequence of characters without a whitespace.
if (
!H.defined(style.textOverflow) &&
dataLabel.text &&
dataLabel.getBBox().width > dataLabel.text.textWidth
) {
dataLabel.css({
textOverflow: 'ellipsis',
// unit (px) is required when useHTML is true
width: style.width += 'px'
});
}
seriesTypes.column.prototype.alignDataLabel.apply(this, arguments);
if (point.dataLabel) {
// point.node.zIndex could be undefined (#6956)
point.dataLabel.attr({ zIndex: (point.node.zIndex || 0) + 1 });
}
},
// Get presentational attributes
pointAttribs: function (point, state) {
var series = this,
mapOptionsToLevel = (
isObject(series.mapOptionsToLevel) ?
series.mapOptionsToLevel :
{}
),
level = point && mapOptionsToLevel[point.node.level] || {},
options = this.options,
attr,
stateOptions = (state && options.states[state]) || {},
className = (point && point.getClassName()) || '',
opacity;
// Set attributes by precedence. Point trumps level trumps series.
// Stroke width uses pick because it can be 0.
attr = {
'stroke':
(point && point.borderColor) ||
level.borderColor ||
stateOptions.borderColor ||
options.borderColor,
'stroke-width': pick(
point && point.borderWidth,
level.borderWidth,
stateOptions.borderWidth,
options.borderWidth
),
'dashstyle':
(point && point.borderDashStyle) ||
level.borderDashStyle ||
stateOptions.borderDashStyle ||
options.borderDashStyle,
'fill': (point && point.color) || this.color
};
// Hide levels above the current view
if (className.indexOf('highcharts-above-level') !== -1) {
attr.fill = 'none';
attr['stroke-width'] = 0;
// Nodes with children that accept interaction
} else if (
className.indexOf('highcharts-internal-node-interactive') !== -1
) {
opacity = pick(stateOptions.opacity, options.opacity);
attr.fill = color(attr.fill).setOpacity(opacity).get();
attr.cursor = 'pointer';
// Hide nodes that have children
} else if (className.indexOf('highcharts-internal-node') !== -1) {
attr.fill = 'none';
} else if (state) {
// Brighten and hoist the hover nodes
attr.fill = color(attr.fill)
.brighten(stateOptions.brightness)
.get();
}
return attr;
},
// Override drawPoints
drawPoints: function () {
var series = this,
chart = series.chart,
renderer = chart.renderer,
points = series.points,
styledMode = chart.styledMode,
options = series.options,
shadow = styledMode ? {} : options.shadow,
borderRadius = options.borderRadius,
withinAnimationLimit =
chart.pointCount < options.animationLimit,
allowTraversingTree = options.allowTraversingTree;
points.forEach(function (point) {
var levelDynamic = point.node.levelDynamic,
animate = {},
attr = {},
css = {},
groupKey = 'level-group-' + levelDynamic,
hasGraphic = !!point.graphic,
shouldAnimate = withinAnimationLimit && hasGraphic,
shapeArgs = point.shapeArgs;
// Don't bother with calculate styling if the point is not drawn
if (point.shouldDraw()) {
if (borderRadius) {
attr.r = borderRadius;
}
merge(
true, // Extend object
// Which object to extend
shouldAnimate ? animate : attr,
// Add shapeArgs to animate/attr if graphic exists
hasGraphic ? shapeArgs : {},
// Add style attribs if !styleMode
styledMode ?
{} :
series.pointAttribs(
point, point.selected && 'select'
)
);
// In styled mode apply point.color. Use CSS, otherwise the
// fill used in the style sheet will take precedence over
// the fill attribute.
if (series.colorAttribs && styledMode) {
// Heatmap is loaded
extend(css, series.colorAttribs(point));
}
if (!series[groupKey]) {
series[groupKey] = renderer.g(groupKey)
.attr({
// @todo Set the zIndex based upon the number of
// levels, instead of using 1000
zIndex: 1000 - levelDynamic
})
.add(series.group);
}
}
// Draw the point
point.draw({
animatableAttribs: animate,
attribs: attr,
css: css,
group: series[groupKey],
renderer: renderer,
shadow: shadow,
shapeArgs: shapeArgs,
shapeType: 'rect'
});
// If setRootNode is allowed, set a point cursor on clickables &
// add drillId to point
if (allowTraversingTree && point.graphic) {
point.drillId = options.interactByLeaf ?
series.drillToByLeaf(point) :
series.drillToByGroup(point);
}
});
},
// Add drilling on the suitable points
onClickDrillToNode: function (event) {
var series = this,
point = event.point,
drillId = point && point.drillId;
// If a drill id is returned, add click event and cursor.
if (isString(drillId)) {
point.setState(''); // Remove hover
series.setRootNode(drillId, true, { trigger: 'click' });
}
},
/**
* Finds the drill id for a parent node. Returns false if point should
* not have a click event.
*
* @private
* @function Highcharts.Series#drillToByGroup
*
* @param {object} point
*
* @return {boolean|string}
* Drill to id or false when point should not have a click
* event.
*/
drillToByGroup: function (point) {
var series = this,
drillId = false;
if ((point.node.level - series.nodeMap[series.rootNode].level) ===
1 &&
!point.node.isLeaf
) {
drillId = point.id;
}
return drillId;
},
/**
* Finds the drill id for a leaf node. Returns false if point should not
* have a click event
*
* @private
* @function Highcharts.Series#drillToByLeaf
*
* @param {object} point
*
* @return {boolean|string}
* Drill to id or false when point should not have a click
* event.
*/
drillToByLeaf: function (point) {
var series = this,
drillId = false,
nodeParent;
if ((point.node.parent !== series.rootNode) &&
point.node.isLeaf
) {
nodeParent = point.node;
while (!drillId) {
nodeParent = series.nodeMap[nodeParent.parent];
if (nodeParent.parent === series.rootNode) {
drillId = nodeParent.id;
}
}
}
return drillId;
},
drillUp: function () {
var series = this,
node = series.nodeMap[series.rootNode];
if (node && isString(node.parent)) {
series.setRootNode(
node.parent,
true,
{ trigger: 'traverseUpButton' }
);
}
},
// TODO remove this function at a suitable version.
drillToNode: function (id, redraw) {
error(
'WARNING: treemap.drillToNode has been renamed to treemap.' +
'setRootNode, and will be removed in the next major version.'
);
this.setRootNode(id, redraw);
},
/**
* Sets a new root node for the series.
*
* @private
* @function Highcharts.Series#setRootNode
*
* @param {string} id The id of the new root node.
* @param {boolean} [redraw=true] Wether to redraw the chart or not.
* @param {object} [eventArguments] Arguments to be accessed in
* event handler.
* @param {string} [eventArguments.newRootId] Id of the new root.
* @param {string} [eventArguments.previousRootId] Id of the previous
* root.
* @param {boolean} [eventArguments.redraw] Wether to redraw the
* chart after.
* @param {object} [eventArguments.series] The series to update the root
* of.
* @param {string} [eventArguments.trigger] The action which
* triggered the event. Undefined if the setRootNode is called
* directly.
*/
setRootNode: function (id, redraw, eventArguments) {
var series = this,
eventArgs = extend({
newRootId: id,
previousRootId: series.rootNode,
redraw: pick(redraw, true),
series: series
}, eventArguments);
/**
* The default functionality of the setRootNode event.
*
* @private
* @param {object} args The event arguments.
* @param {string} args.newRootId Id of the new root.
* @param {string} args.previousRootId Id of the previous root.
* @param {boolean} args.redraw Wether to redraw the chart after.
* @param {object} args.series The series to update the root of.
* @param {string} [args.trigger=undefined] The action which
* triggered the event. Undefined if the setRootNode is called
* directly.
*/
var defaultFn = function (args) {
var series = args.series;
// Store previous and new root ids on the series.
series.idPreviousRoot = args.previousRootId;
series.rootNode = args.newRootId;
// Redraw the chart
series.isDirty = true; // Force redraw
if (args.redraw) {
series.chart.redraw();
}
};
// Fire setRootNode event.
fireEvent(series, 'setRootNode', eventArgs, defaultFn);
},
renderTraverseUpButton: function (rootId) {
var series = this,
nodeMap = series.nodeMap,
node = nodeMap[rootId],
name = node.name,
buttonOptions = series.options.traverseUpButton,
backText = pick(buttonOptions.text, name, '< Back'),
attr,
states;
if (rootId === '') {
if (series.drillUpButton) {
series.drillUpButton = series.drillUpButton.destroy();
}
} else if (!this.drillUpButton) {
attr = buttonOptions.theme;
states = attr && attr.states;
this.drillUpButton = this.chart.renderer.button(
backText,
null,
null,
function () {
series.drillUp();
},
attr,
states && states.hover,
states && states.select
)
.addClass('highcharts-drillup-button')
.attr({
align: buttonOptions.position.align,
zIndex: 7
})
.add()
.align(
buttonOptions.position,
false,
buttonOptions.relativeTo || 'plotBox'
);
} else {
this.drillUpButton.placed = false;
this.drillUpButton.attr({
text: backText
})
.align();
}
},
buildKDTree: noop,
drawLegendSymbol: H.LegendSymbolMixin.drawRectangle,
getExtremes: function () {
// Get the extremes from the value data
Series.prototype.getExtremes.call(this, this.colorValueData);
this.valueMin = this.dataMin;
this.valueMax = this.dataMax;
// Get the extremes from the y data
Series.prototype.getExtremes.call(this);
},
getExtremesFromAll: true,
bindAxes: function () {
var treeAxis = {
endOnTick: false,
gridLineWidth: 0,
lineWidth: 0,
min: 0,
dataMin: 0,
minPadding: 0,
max: 100,
dataMax: 100,
maxPadding: 0,
startOnTick: false,
title: null,
tickPositions: []
};
Series.prototype.bindAxes.call(this);
H.extend(this.yAxis.options, treeAxis);
H.extend(this.xAxis.options, treeAxis);
},
/**
* Workaround for `inactive` state. Since `series.opacity` option is
* already reserved, don't use that state at all by disabling
* `inactiveOtherPoints` and not inheriting states by points.
*
* @private
*/
setState: function (state) {
this.options.inactiveOtherPoints = true;
Series.prototype.setState.call(this, state, false);
this.options.inactiveOtherPoints = false;
},
utils: {
recursive: recursive
}
// Point class
}, {
draw: drawPoint,
getClassName: function () {
var className = H.Point.prototype.getClassName.call(this),
series = this.series,
options = series.options;
// Above the current level
if (this.node.level <= series.nodeMap[series.rootNode].level) {
className += ' highcharts-above-level';
} else if (
!this.node.isLeaf &&
!pick(options.interactByLeaf, !options.allowTraversingTree)
) {
className += ' highcharts-internal-node-interactive';
} else if (!this.node.isLeaf) {
className += ' highcharts-internal-node';
}
return className;
},
/**
* A tree point is valid if it has han id too, assume it may be a parent
* item.
*
* @private
* @function Highcharts.Point#isValid
*/
isValid: function () {
return this.id || isNumber(this.value);
},
setState: function (state) {
H.Point.prototype.setState.call(this, state);
// Graphic does not exist when point is not visible.
if (this.graphic) {
this.graphic.attr({
zIndex: state === 'hover' ? 1 : 0
});
}
},
setVisible: seriesTypes.pie.prototype.pointClass.prototype.setVisible,
shouldDraw: function () {
var point = this;
return isNumber(point.plotY) && point.y !== null;
}
}
);
/**
* A `treemap` series. If the [type](#series.treemap.type) option is
* not specified, it is inherited from [chart.type](#chart.type).
*
* @extends series,plotOptions.treemap
* @excluding dataParser, dataURL, stack
* @product highcharts
* @apioption series.treemap
*/
/**
* An array of data points for the series. For the `treemap` series
* type, points can be given in the following ways:
*
* 1. An array of numerical values. In this case, the numerical values will be
* interpreted as `value` options. Example:
* ```js
* data: [0, 5, 3, 5]
* ```
*
* 2. An array of objects with named values. The following snippet shows only a
* few settings, see the complete options set below. If the total number of
* data points exceeds the series'
* [turboThreshold](#series.treemap.turboThreshold),
* this option is not available.
* ```js
* data: [{
* value: 9,
* name: "Point2",
* color: "#00FF00"
* }, {
* value: 6,
* name: "Point1",
* color: "#FF00FF"
* }]
* ```
*
* @sample {highcharts} highcharts/chart/reflow-true/
* Numerical values
* @sample {highcharts} highcharts/series/data-array-of-objects/
* Config objects
*
* @type {Array<number|null|*>}
* @extends series.heatmap.data
* @excluding x, y
* @product highcharts
* @apioption series.treemap.data
*/
/**
* The value of the point, resulting in a relative area of the point
* in the treemap.
*
* @type {number|null}
* @product highcharts
* @apioption series.treemap.data.value
*/
/**
* Serves a purpose only if a `colorAxis` object is defined in the chart
* options. This value will decide which color the point gets from the
* scale of the colorAxis.
*
* @type {number}
* @since 4.1.0
* @product highcharts
* @apioption series.treemap.data.colorValue
*/
/**
* Only for treemap. Use this option to build a tree structure. The
* value should be the id of the point which is the parent. If no points
* has a matching id, or this option is undefined, then the parent will
* be set to the root.
*
* @sample {highcharts} highcharts/point/parent/
* Point parent
* @sample {highcharts} highcharts/demo/treemap-with-levels/
* Example where parent id is not matching
*
* @type {string}
* @since 4.1.0
* @product highcharts
* @apioption series.treemap.data.parent
*/
});
_registerModule(_modules, 'masters/modules/treemap.src.js', [], function () {
});
}));<|fim▁end|> | /**
* Decides which level takes effect from the options set in the levels |
<|file_name|>bigint.go<|end_file_name|><|fim▁begin|>package main
import (
"math/big"<|fim▁hole|>)
func main() {
i := big.NewInt(2)
j := big.NewInt(2)
k := big.NewInt(8)
m := i.Exp(i, j, k)
fmt.Printf("%+v,%+v,%+v,%+v\n", i, j, k, m)
// println(i, j, k, m)
fmt.Printf("%v\n", i.Add(i, j))
}<|fim▁end|> | "fmt" |
<|file_name|>nsiqcppstyle_rulemanager.py<|end_file_name|><|fim▁begin|># Copyright (c) 2009 NHN Inc. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of NHN Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os #@UnusedImport
import sys #@UnusedImport
import sre_compile
from nsiqcppstyle_util import * #@UnusedWildImport
class RuleManager :
def __init__(self, runtimePath) :
self.availRuleNames = []
basePath = os.path.join(runtimePath, "rules")
ruleFiles = os.listdir(basePath)
rulePattern = sre_compile.compile("^(.*)\.py$")
for eachRuleFile in ruleFiles :
if os.path.isfile(os.path.join(basePath, eachRuleFile)) :
ruleMatch = rulePattern.match(eachRuleFile)
if ruleMatch != None and eachRuleFile.find("__init__") == -1 :
ruleName = ruleMatch.group(1)
self.availRuleNames.append(ruleName)
self.availRuleCount = len(self.availRuleNames)
self.availRuleModules = {}
self.loadedRule = []
self.rules = []<|fim▁hole|> self.functionNameRules = []
self.functionScopeRules = []
self.typeNameRules = []
self.typeScopeRules = []
self.lineRules = []
self.fileEndRules = []
self.fileStartRules = []
self.projectRules = []
self.rollBackImporter = None
# self.LoadAllRules()
def LoadRules(self, checkingRuleNames, printRule = True):
"""
Load Rules. It resets rule before loading rules
"""
self.ResetRules()
self.ResetRegisteredRules()
if self.rollBackImporter != None :
self.rollBackImporter.uninstall()
self.rollBackImporter = RollbackImporter()
if printRule :
print "======================================================================================"
for ruleName in checkingRuleNames :
count = self.availRuleNames.count(ruleName)
if count == 0 :
print "%s does not exist or incompatible." % ruleName
continue
else :
if printRule :
print " - ", ruleName, "is applied."
ruleModule = __import__("rules."+ruleName)
self.loadedRule.append(ruleModule)
if len(self.loadedRule) == 0 :
print " No Rule is specified. Please configure rules in filefilter.txt."
if printRule :
print "======================================================================================"
def ResetRules(self):
self.loadedRule = []
############################################################################
# Rule Runner
############################################################################
def RunPreprocessRule(self, lexer, contextStack):
""" Run rules which runs in the preprecessor blocks """
for preprocessRule in self.preprocessRules :
data = lexer.Backup()
preprocessRule(lexer, contextStack)
lexer.Restore(data)
def RunFunctionNameRule(self, lexer, functionFullName, decl, contextStack, functionContext) :
""" Run rules which runs on the function name """
for eachFunctionNameRule in self.functionNameRules :
data = lexer.Backup()
eachFunctionNameRule(lexer, functionFullName, decl, contextStack, functionContext)
lexer.Restore(data)
def RunFunctionScopeRule(self, lexer, contextStack):
""" Run rules which runs in the function blocks """
for eachFunctionScopeRule in self.functionScopeRules :
data = lexer.Backup()
eachFunctionScopeRule(lexer, contextStack)
lexer.Restore(data)
def RunTypeNameRule(self, lexer, typeName, typeFullName, decl, contextStack, typeContext):
""" Run rules which runs on the type names """
for typeNameRule in self.typeNameRules :
data = lexer.Backup()
typeNameRule(lexer, typeName, typeFullName, decl, contextStack, typeContext)
lexer.Restore(data)
def RunTypeScopeRule(self, lexer, contextStack):
""" Run rules which runs in the type blocks """
for typeScopeRule in self.typeScopeRules :
data = lexer.Backup()
typeScopeRule(lexer, contextStack)
lexer.Restore(data)
def RunRule(self, lexer, contextStack):
""" Run rules which runs in any tokens """
for rule in self.rules :
data = lexer.Backup()
rule(lexer, contextStack)
lexer.Restore(data)
def RunLineRule(self, lexer, line, lineno):
""" Run rules which runs in each lines. """
for lineRule in self.lineRules :
data = lexer.Backup()
lineRule(lexer, line, lineno)
lexer.Restore(data)
def RunFileEndRule(self, lexer, filename, dirname):
""" Run rules which runs at the end of files. """
for fileEndRule in self.fileEndRules :
data = lexer.Backup()
fileEndRule(lexer, filename, dirname)
lexer.Restore(data)
def RunFileStartRule(self, lexer, filename, dirname):
""" Run rules which runs at the start of files. """
for fileStartRule in self.fileStartRules :
data = lexer.Backup()
fileStartRule(lexer, filename, dirname)
lexer.Restore(data)
def RunProjectRules(self, targetName):
""" Run rules which runs once a project. """
for projectRule in self.projectRules :
projectRule(targetName)
############################################################################
# Rule Resister Methods
############################################################################
def ResetRegisteredRules(self):
""" Reset all registered rules. """
del self.functionNameRules[:]
del self.functionScopeRules[:]
del self.lineRules[:]
del self.rules[:]
del self.typeNameRules[:]
del self.typeScopeRules[:]
del self.fileStartRules[:]
del self.fileEndRules[:]
del self.projectRules[:]
del self.preprocessRules[:]
def AddPreprocessRule(self, preprocessRule):
""" Add rule which runs in preprocess statements """
self.preprocessRules.append(preprocessRule)
def AddFunctionScopeRule(self, functionScopeRule):
""" Add rule which runs in function scope """
self.functionScopeRules.append(functionScopeRule)
def AddFunctionNameRule(self, functionRule):
""" Add rule on the function name place"""
self.functionNameRules.append(functionRule)
def AddLineRule(self, lineRule):
""" Add rule on the each line """
self.lineRules.append(lineRule)
def AddRule(self, rule):
""" Add rule on any token """
self.rules.append(rule)
def AddTypeNameRule(self, typeNameRule):
""" Add rule on any type (class / struct / union / namesapce / enum) """
self.typeNameRules.append(typeNameRule)
def AddTypeScopeRule(self, typeScopeRule):
""" Add rule on the any type definition scope """
self.typeScopeRules.append(typeScopeRule)
def AddFileEndRule(self, fileEndRule):
"""
Add rule on the file end
Added Rule should be function with following prototype "def RunRule(lexer, filename, dirname)"
lexer is the lexer used to analyze the source. it points the end token of source.
filename is the filename analyzed.
dirname is the file directory.
"""
self.fileEndRules.append(fileEndRule)
def AddFileStartRule(self, fileStartRule):
"""
Add rule on the file start
Added Rule should be function with following prototype "def RunRule(lexer, filename, dirname)"
lexer is the lexer used to analyze the source. it points the start token of source.
filename is the filename analyzed.
dirname is the file directory.
"""
self.fileStartRules.append(fileStartRule)
def AddProjectRules(self, projectRule):
"""
Add rule on the project
Added Rule should be function with following prototype "def RunRule(targetName)"
targetName is the analysis target directory.
"""
self.projectRules.append(projectRule)
class RollbackImporter:
def __init__(self):
"Creates an instance and installs as the global importer"
self.previousModules = sys.modules.copy()
self.realImport = __builtins__["__import__"]
__builtins__["__import__"] = self._import
self.newModules = {}
def _import(self, name, globals=None, locals=None, fromlist=[]):
result = apply(self.realImport, (name, globals, locals, fromlist))
if name.find("rules") != -1 :
self.newModules[name] = 1
return result
def uninstall(self):
for modname in self.newModules.keys():
if modname.find("rules") != -1 :
if not self.previousModules.has_key(modname):
# Force reload when modname next imported
del(sys.modules[modname])
__builtins__["__import__"] = self.realImport
ruleManager = RuleManager(GetRuntimePath())<|fim▁end|> | self.preprocessRules = [] |
<|file_name|>list_store.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use ffi;
use glib::translate::*;
use glib::{Type, ToValue, Value};
use libc::c_int;
use std::ptr;
use ListStore;
use TreeIter;
impl ListStore {
pub fn new(column_types: &[Type]) -> ListStore {
assert_initialized_main_thread!();
unsafe {
let mut column_types = column_types.iter().map(|t| t.to_glib()).collect::<Vec<_>>();
from_glib_full(
ffi::gtk_list_store_newv(column_types.len() as c_int,
column_types.as_mut_ptr()))
}
}
pub fn insert_with_values(&self, position: Option<u32>, columns: &[u32], values: &[&ToValue])
-> TreeIter {
unsafe {
assert!(position.unwrap_or(0) <= i32::max_value() as u32);
assert!(columns.len() == values.len());
let n_columns = ffi::gtk_tree_model_get_n_columns(self.to_glib_none().0) as u32;
assert!(columns.len() <= n_columns as usize);
for (&column, value) in columns.iter().zip(values.iter()) {
assert!(column < n_columns);
let type_ = from_glib(
ffi::gtk_tree_model_get_column_type(self.to_glib_none().0, column as c_int));
assert!(Value::type_transformable(value.to_value_type(), type_));
}
let mut iter = TreeIter::uninitialized();
ffi::gtk_list_store_insert_with_valuesv(self.to_glib_none().0,
iter.to_glib_none_mut().0,
position.map_or(-1, |n| n as c_int),
mut_override(columns.as_ptr() as *const c_int),
values.to_glib_none().0,
columns.len() as c_int);
iter
}
}
pub fn reorder(&self, new_order: &[u32]) {
unsafe {
let count = ffi::gtk_tree_model_iter_n_children(self.to_glib_none().0, ptr::null_mut());
let safe_count = count as usize == new_order.len();
debug_assert!(safe_count,
"Incorrect `new_order` slice length. Expected `{}`, found `{}`.",
count,
new_order.len());
let safe_values = new_order.iter()
.max()
.map_or(true, |&max| {
let max = max as i32;
max >= 0 && max < count
});
debug_assert!(safe_values,
"Some `new_order` slice values are out of range. Maximum safe value: \
`{}`. The slice contents: `{:?}`",
count - 1,
new_order);
if safe_count && safe_values {
ffi::gtk_list_store_reorder(self.to_glib_none().0,
mut_override(new_order.as_ptr() as *const c_int));
}
}
}
pub fn set(&self, iter: &TreeIter, columns: &[u32], values: &[&ToValue]) {
unsafe {
assert!(columns.len() == values.len());<|fim▁hole|> let type_ = from_glib(
ffi::gtk_tree_model_get_column_type(self.to_glib_none().0, column as c_int));
assert!(Value::type_transformable(value.to_value_type(), type_));
}
ffi::gtk_list_store_set_valuesv(self.to_glib_none().0,
mut_override(iter.to_glib_none().0),
mut_override(columns.as_ptr() as *const c_int),
values.to_glib_none().0,
columns.len() as c_int);
}
}
pub fn set_value(&self, iter: &TreeIter, column: u32, value: &Value) {
unsafe {
let columns = ffi::gtk_tree_model_get_n_columns(self.to_glib_none().0);
assert!(column < columns as u32);
let type_ = from_glib(
ffi::gtk_tree_model_get_column_type(self.to_glib_none().0, column as c_int));
assert!(Value::type_transformable(value.type_(), type_));
ffi::gtk_list_store_set_value(self.to_glib_none().0,
mut_override(iter.to_glib_none().0), column as c_int,
mut_override(value.to_glib_none().0));
}
}
}<|fim▁end|> | let n_columns = ffi::gtk_tree_model_get_n_columns(self.to_glib_none().0) as u32;
assert!(columns.len() <= n_columns as usize);
for (&column, value) in columns.iter().zip(values.iter()) {
assert!(column < n_columns); |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># HRGRN WebServices
# Copyright (C) 2016 Xinbin Dai, Irina Belyaeva
# This file is part of HRGRN WebServices API.
#
# HRGRN API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# HRGRN API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with HRGRN API. If not, see <http://www.gnu.org/licenses/>.
"""
Main Module
"""
import json
import requests
import logging
import timer as timer
from requests.exceptions import ConnectionError
from requests import Session
import service as svc
import request_handler as rh
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
# This function acts as a list endpoint
def list(args):
session = Session()
# get service url
svc_url = svc.get_svc_base_url()
params = {'listall': 'T', 'format':'json'}
try:
with timer.Timer() as t:
log.info("Service URL:" + svc_url)
# execute request
response = rh.build_payload(svc_url, params, session)
log.debug(response)
if (response):
for item in response:
print json.dumps(item, indent=3)
print '---'
else:
raise Exception("Response cannot be null!")
except ValueError as e:<|fim▁hole|> log.error(error_msg, exc_info=True)
raise Exception(error_msg)
except requests.exceptions.HTTPError as e:
error_msg = "HTTPError Exception:" + e.message
log.error(error_msg, exc_info=True)
raise Exception(error_msg)
except ConnectionError as e:
error_msg = "ConnectionError Exception:" + e.message
log.error(error_msg, exc_info=True)
raise Exception(error_msg)
except Exception as e:
error_msg = "GenericError Exception:" + e.message
log.error(error_msg, exc_info=True)
raise Exception(error_msg)
finally:
log.info('Request took %.03f sec.' % t.interval)<|fim▁end|> | error_msg = "ValueError Exception:" + e.message |
<|file_name|>test_memoize_1.py<|end_file_name|><|fim▁begin|>import argparse
import parsl
from parsl.app.app import python_app
from parsl.tests.configs.local_threads import config
@python_app(cache=True)
def random_uuid(x, cache=True):
import uuid
return str(uuid.uuid4())
def test_python_memoization(n=2):
"""Testing python memoization disable
"""
x = random_uuid(0)
print(x.result())
for i in range(0, n):
foo = random_uuid(0)
print(foo.result())<|fim▁hole|>
if __name__ == '__main__':
parsl.clear()
parsl.load(config)
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--count", default="10",
help="Count of apps to launch")
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
x = test_python_memoization(n=4)<|fim▁end|> | assert foo.result() == x.result(), "Memoized results were not used" |
<|file_name|>test_output.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import pytest
# pylint: disable=attribute-defined-outside-init
class TestOutput(object):
@pytest.fixture(autouse=True)
def init(self, ssh_audit):
self.Output = ssh_audit.Output
self.OutputBuffer = ssh_audit.OutputBuffer
def test_output_buffer_no_lines(self, output_spy):
output_spy.begin()
with self.OutputBuffer() as obuf:
pass
assert output_spy.flush() == []
output_spy.begin()
with self.OutputBuffer() as obuf:
pass
obuf.flush()
assert output_spy.flush() == []
def test_output_buffer_no_flush(self, output_spy):
output_spy.begin()
with self.OutputBuffer():
print(u'abc')
assert output_spy.flush() == []
def test_output_buffer_flush(self, output_spy):
output_spy.begin()
with self.OutputBuffer() as obuf:
print(u'abc')
print()
print(u'def')
obuf.flush()
assert output_spy.flush() == [u'abc', u'', u'def']
def test_output_defaults(self):
out = self.Output()
# default: on
assert out.batch is False
assert out.colors is True
assert out.minlevel == 'info'
def test_output_colors(self, output_spy):
out = self.Output()
# test without colors
out.colors = False
output_spy.begin()
out.info('info color')
assert output_spy.flush() == [u'info color']
output_spy.begin()
out.head('head color')
assert output_spy.flush() == [u'head color']
output_spy.begin()
out.good('good color')
assert output_spy.flush() == [u'good color']
output_spy.begin()
out.warn('warn color')
assert output_spy.flush() == [u'warn color']
output_spy.begin()
out.fail('fail color')
assert output_spy.flush() == [u'fail color']
if not out.colors_supported:
return
# test with colors
out.colors = True
output_spy.begin()
out.info('info color')
assert output_spy.flush() == [u'info color']
output_spy.begin()
out.head('head color')
assert output_spy.flush() == [u'\x1b[0;36mhead color\x1b[0m']
output_spy.begin()
out.good('good color')
assert output_spy.flush() == [u'\x1b[0;32mgood color\x1b[0m']
output_spy.begin()
out.warn('warn color')
assert output_spy.flush() == [u'\x1b[0;33mwarn color\x1b[0m']
output_spy.begin()
out.fail('fail color')
assert output_spy.flush() == [u'\x1b[0;31mfail color\x1b[0m']
def test_output_sep(self, output_spy):
out = self.Output()
output_spy.begin()
out.sep()
out.sep()
out.sep()
assert output_spy.flush() == [u'', u'', u'']
def test_output_levels(self):
out = self.Output()
assert out.getlevel('info') == 0
assert out.getlevel('good') == 0
assert out.getlevel('warn') == 1
assert out.getlevel('fail') == 2
assert out.getlevel('unknown') > 2
def test_output_minlevel_property(self):
out = self.Output()
out.minlevel = 'info'
assert out.minlevel == 'info'
out.minlevel = 'good'
assert out.minlevel == 'info'
out.minlevel = 'warn'
assert out.minlevel == 'warn'
out.minlevel = 'fail'
assert out.minlevel == 'fail'
out.minlevel = 'invalid level'
assert out.minlevel == 'unknown'
def test_output_minlevel(self, output_spy):
out = self.Output()
# visible: all
out.minlevel = 'info'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 5
# visible: head, warn, fail
out.minlevel = 'warn'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 3
# visible: head, fail
out.minlevel = 'fail'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 2
# visible: head
out.minlevel = 'invalid level'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 1
def test_output_batch(self, output_spy):
out = self.Output()
# visible: all
output_spy.begin()
out.minlevel = 'info'
out.batch = False
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 5
# visible: all except head
output_spy.begin()
out.minlevel = 'info'
out.batch = True<|fim▁hole|> out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 4<|fim▁end|> | out.info('info color') |
<|file_name|>312_test_fractions.py<|end_file_name|><|fim▁begin|>"""Tests for Lib/fractions.py."""
from decimal import Decimal
from test.support import run_unittest
import math
import numbers
import operator
import fractions
import unittest
from copy import copy, deepcopy
from pickle import dumps, loads
F = fractions.Fraction
gcd = fractions.gcd
class DummyFloat(object):
"""Dummy float class for testing comparisons with Fractions"""
def __init__(self, value):
if not isinstance(value, float):
raise TypeError("DummyFloat can only be initialized from float")
self.value = value
def _richcmp(self, other, op):
if isinstance(other, numbers.Rational):
return op(F.from_float(self.value), other)
elif isinstance(other, DummyFloat):
return op(self.value, other.value)
else:
return NotImplemented
def __eq__(self, other): return self._richcmp(other, operator.eq)
def __le__(self, other): return self._richcmp(other, operator.le)
def __lt__(self, other): return self._richcmp(other, operator.lt)
def __ge__(self, other): return self._richcmp(other, operator.ge)
def __gt__(self, other): return self._richcmp(other, operator.gt)
# shouldn't be calling __float__ at all when doing comparisons
def __float__(self):
assert False, "__float__ should not be invoked for comparisons"
# same goes for subtraction
def __sub__(self, other):
assert False, "__sub__ should not be invoked for comparisons"
__rsub__ = __sub__
class DummyRational(object):
"""Test comparison of Fraction with a naive rational implementation."""
def __init__(self, num, den):
g = gcd(num, den)
self.num = num // g
self.den = den // g
def __eq__(self, other):
if isinstance(other, fractions.Fraction):
return (self.num == other._numerator and
self.den == other._denominator)
else:
return NotImplemented
def __lt__(self, other):
return(self.num * other._denominator < self.den * other._numerator)
def __gt__(self, other):
return(self.num * other._denominator > self.den * other._numerator)
def __le__(self, other):
return(self.num * other._denominator <= self.den * other._numerator)
def __ge__(self, other):
return(self.num * other._denominator >= self.den * other._numerator)
# this class is for testing comparisons; conversion to float
# should never be used for a comparison, since it loses accuracy
def __float__(self):
assert False, "__float__ should not be invoked"
class GcdTest(unittest.TestCase):
def testMisc(self):
self.assertEquals(0, gcd(0, 0))
self.assertEquals(1, gcd(1, 0))
self.assertEquals(-1, gcd(-1, 0))
self.assertEquals(1, gcd(0, 1))
self.assertEquals(-1, gcd(0, -1))
self.assertEquals(1, gcd(7, 1))
self.assertEquals(-1, gcd(7, -1))
self.assertEquals(1, gcd(-23, 15))
self.assertEquals(12, gcd(120, 84))
self.assertEquals(-12, gcd(84, -120))
def _components(r):
return (r.numerator, r.denominator)
class FractionTest(unittest.TestCase):
def assertTypedEquals(self, expected, actual):
"""Asserts that both the types and values are the same."""
self.assertEquals(type(expected), type(actual))
self.assertEquals(expected, actual)
def assertRaisesMessage(self, exc_type, message,
callable, *args, **kwargs):
"""Asserts that callable(*args, **kwargs) raises exc_type(message)."""
try:
callable(*args, **kwargs)
except exc_type as e:
self.assertEquals(message, str(e))
else:
self.fail("%s not raised" % exc_type.__name__)
def testInit(self):
self.assertEquals((0, 1), _components(F()))
self.assertEquals((7, 1), _components(F(7)))
self.assertEquals((7, 3), _components(F(F(7, 3))))
self.assertEquals((-1, 1), _components(F(-1, 1)))
self.assertEquals((-1, 1), _components(F(1, -1)))
self.assertEquals((1, 1), _components(F(-2, -2)))
self.assertEquals((1, 2), _components(F(5, 10)))
self.assertEquals((7, 15), _components(F(7, 15)))
self.assertEquals((10**23, 1), _components(F(10**23)))
self.assertEquals((3, 77), _components(F(F(3, 7), 11)))
self.assertEquals((-9, 5), _components(F(2, F(-10, 9))))
self.assertEquals((2486, 2485), _components(F(F(22, 7), F(355, 113))))
self.assertRaisesMessage(ZeroDivisionError, "Fraction(12, 0)",
F, 12, 0)
self.assertRaises(TypeError, F, 1.5)
self.assertRaises(TypeError, F, 1.5 + 3j)
self.assertRaises(TypeError, F, "3/2", 3)
self.assertRaises(TypeError, F, 3, 0j)
self.assertRaises(TypeError, F, 3, 1j)
def testFromString(self):
self.assertEquals((5, 1), _components(F("5")))
self.assertEquals((3, 2), _components(F("3/2")))
self.assertEquals((3, 2), _components(F(" \n +3/2")))
self.assertEquals((-3, 2), _components(F("-3/2 ")))
self.assertEquals((13, 2), _components(F(" 013/02 \n ")))
self.assertEquals((16, 5), _components(F(" 3.2 ")))
self.assertEquals((-16, 5), _components(F(" -3.2 ")))
self.assertEquals((-3, 1), _components(F(" -3. ")))
self.assertEquals((3, 5), _components(F(" .6 ")))
self.assertEquals((1, 3125), _components(F("32.e-5")))
self.assertEquals((1000000, 1), _components(F("1E+06")))
self.assertEquals((-12300, 1), _components(F("-1.23e4")))
self.assertEquals((0, 1), _components(F(" .0e+0\t")))
self.assertEquals((0, 1), _components(F("-0.000e0")))
self.assertRaisesMessage(
ZeroDivisionError, "Fraction(3, 0)",
F, "3/0")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3/'",
F, "3/")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '/2'",
F, "/2")
self.assertRaisesMessage(
ValueError, "Invalid literal for Fraction: '3 /2'",
F, "3 /2")
self.assertRaisesMessage(
# Denominators don't need a sign.
ValueError, "Invalid literal for Fraction: '3/+2'",
F, "3/+2")
self.assertRaisesMessage(
# Imitate float's parsing.
ValueError, "Invalid literal for Fraction: '+ 3/2'",
F, "+ 3/2")
self.assertRaisesMessage(
# Avoid treating '.' as a regex special character.
ValueError, "Invalid literal for Fraction: '3a2'",
F, "3a2")
self.assertRaisesMessage(
# Don't accept combinations of decimals and rationals.
ValueError, "Invalid literal for Fraction: '3/7.2'",<|fim▁hole|> self.assertRaisesMessage(
# Don't accept combinations of decimals and rationals.
ValueError, "Invalid literal for Fraction: '3.2/7'",
F, "3.2/7")
self.assertRaisesMessage(
# Allow 3. and .3, but not .
ValueError, "Invalid literal for Fraction: '.'",
F, ".")
def testImmutable(self):
r = F(7, 3)
r.__init__(2, 15)
self.assertEquals((7, 3), _components(r))
self.assertRaises(AttributeError, setattr, r, 'numerator', 12)
self.assertRaises(AttributeError, setattr, r, 'denominator', 6)
self.assertEquals((7, 3), _components(r))
# But if you _really_ need to:
r._numerator = 4
r._denominator = 2
self.assertEquals((4, 2), _components(r))
# Which breaks some important operations:
self.assertNotEquals(F(4, 2), r)
def testFromFloat(self):
self.assertRaises(TypeError, F.from_float, 3+4j)
self.assertEquals((10, 1), _components(F.from_float(10)))
bigint = 1234567890123456789
self.assertEquals((bigint, 1), _components(F.from_float(bigint)))
self.assertEquals((0, 1), _components(F.from_float(-0.0)))
self.assertEquals((10, 1), _components(F.from_float(10.0)))
self.assertEquals((-5, 2), _components(F.from_float(-2.5)))
self.assertEquals((99999999999999991611392, 1),
_components(F.from_float(1e23)))
self.assertEquals(float(10**23), float(F.from_float(1e23)))
self.assertEquals((3602879701896397, 1125899906842624),
_components(F.from_float(3.2)))
self.assertEquals(3.2, float(F.from_float(3.2)))
inf = 1e1000
nan = inf - inf
self.assertRaisesMessage(
TypeError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
TypeError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
TypeError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
self.assertRaises(TypeError, F.from_decimal, 3+4j)
self.assertEquals(F(10, 1), F.from_decimal(10))
self.assertEquals(F(0), F.from_decimal(Decimal("-0")))
self.assertEquals(F(5, 10), F.from_decimal(Decimal("0.5")))
self.assertEquals(F(5, 1000), F.from_decimal(Decimal("5e-3")))
self.assertEquals(F(5000), F.from_decimal(Decimal("5e3")))
self.assertEquals(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
self.assertRaisesMessage(
TypeError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
TypeError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
TypeError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
rpi = F('3.1415926535897932')
self.assertEqual(rpi.limit_denominator(10000), F(355, 113))
self.assertEqual(-rpi.limit_denominator(10000), F(-355, 113))
self.assertEqual(rpi.limit_denominator(113), F(355, 113))
self.assertEqual(rpi.limit_denominator(112), F(333, 106))
self.assertEqual(F(201, 200).limit_denominator(100), F(1))
self.assertEqual(F(201, 200).limit_denominator(101), F(102, 101))
self.assertEqual(F(0).limit_denominator(10000), F(0))
def testConversions(self):
self.assertTypedEquals(-1, math.trunc(F(-11, 10)))
self.assertTypedEquals(-2, math.floor(F(-11, 10)))
self.assertTypedEquals(-1, math.ceil(F(-11, 10)))
self.assertTypedEquals(-1, math.ceil(F(-10, 10)))
self.assertTypedEquals(-1, int(F(-11, 10)))
self.assertTypedEquals(0, round(F(-1, 10)))
self.assertTypedEquals(0, round(F(-5, 10)))
self.assertTypedEquals(-2, round(F(-15, 10)))
self.assertTypedEquals(-1, round(F(-7, 10)))
self.assertEquals(False, bool(F(0, 1)))
self.assertEquals(True, bool(F(3, 2)))
self.assertTypedEquals(0.1, float(F(1, 10)))
# Check that __float__ isn't implemented by converting the
# numerator and denominator to float before dividing.
self.assertRaises(OverflowError, float, int('2'*400+'7'))
self.assertAlmostEquals(2.0/3,
float(F(int('2'*400+'7'), int('3'*400+'1'))))
self.assertTypedEquals(0.1+0j, complex(F(1,10)))
def testRound(self):
self.assertTypedEquals(F(-200), round(F(-150), -2))
self.assertTypedEquals(F(-200), round(F(-250), -2))
self.assertTypedEquals(F(30), round(F(26), -1))
self.assertTypedEquals(F(-2, 10), round(F(-15, 100), 1))
self.assertTypedEquals(F(-2, 10), round(F(-25, 100), 1))
def testArithmetic(self):
self.assertEquals(F(1, 2), F(1, 10) + F(2, 5))
self.assertEquals(F(-3, 10), F(1, 10) - F(2, 5))
self.assertEquals(F(1, 25), F(1, 10) * F(2, 5))
self.assertEquals(F(1, 4), F(1, 10) / F(2, 5))
self.assertTypedEquals(2, F(9, 10) // F(2, 5))
self.assertTypedEquals(10**23, F(10**23, 1) // F(1))
self.assertEquals(F(2, 3), F(-7, 3) % F(3, 2))
self.assertEquals(F(8, 27), F(2, 3) ** F(3))
self.assertEquals(F(27, 8), F(2, 3) ** F(-3))
self.assertTypedEquals(2.0, F(4) ** F(1, 2))
z = pow(F(-1), F(1, 2))
self.assertAlmostEquals(z.real, 0)
self.assertEquals(z.imag, 1)
def testMixedArithmetic(self):
self.assertTypedEquals(F(11, 10), F(1, 10) + 1)
self.assertTypedEquals(1.1, F(1, 10) + 1.0)
self.assertTypedEquals(1.1 + 0j, F(1, 10) + (1.0 + 0j))
self.assertTypedEquals(F(11, 10), 1 + F(1, 10))
self.assertTypedEquals(1.1, 1.0 + F(1, 10))
self.assertTypedEquals(1.1 + 0j, (1.0 + 0j) + F(1, 10))
self.assertTypedEquals(F(-9, 10), F(1, 10) - 1)
self.assertTypedEquals(-0.9, F(1, 10) - 1.0)
self.assertTypedEquals(-0.9 + 0j, F(1, 10) - (1.0 + 0j))
self.assertTypedEquals(F(9, 10), 1 - F(1, 10))
self.assertTypedEquals(0.9, 1.0 - F(1, 10))
self.assertTypedEquals(0.9 + 0j, (1.0 + 0j) - F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) * 1)
self.assertTypedEquals(0.1, F(1, 10) * 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) * (1.0 + 0j))
self.assertTypedEquals(F(1, 10), 1 * F(1, 10))
self.assertTypedEquals(0.1, 1.0 * F(1, 10))
self.assertTypedEquals(0.1 + 0j, (1.0 + 0j) * F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) / 1)
self.assertTypedEquals(0.1, F(1, 10) / 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) / (1.0 + 0j))
self.assertTypedEquals(F(10, 1), 1 / F(1, 10))
self.assertTypedEquals(10.0, 1.0 / F(1, 10))
self.assertTypedEquals(10.0 + 0j, (1.0 + 0j) / F(1, 10))
self.assertTypedEquals(0, F(1, 10) // 1)
self.assertTypedEquals(0, F(1, 10) // 1.0)
self.assertTypedEquals(10, 1 // F(1, 10))
self.assertTypedEquals(10**23, 10**22 // F(1, 10))
self.assertTypedEquals(10, 1.0 // F(1, 10))
self.assertTypedEquals(F(1, 10), F(1, 10) % 1)
self.assertTypedEquals(0.1, F(1, 10) % 1.0)
self.assertTypedEquals(F(0, 1), 1 % F(1, 10))
self.assertTypedEquals(0.0, 1.0 % F(1, 10))
# No need for divmod since we don't override it.
# ** has more interesting conversion rules.
self.assertTypedEquals(F(100, 1), F(1, 10) ** -2)
self.assertTypedEquals(F(100, 1), F(10, 1) ** 2)
self.assertTypedEquals(0.1, F(1, 10) ** 1.0)
self.assertTypedEquals(0.1 + 0j, F(1, 10) ** (1.0 + 0j))
self.assertTypedEquals(4 , 2 ** F(2, 1))
z = pow(-1, F(1, 2))
self.assertAlmostEquals(0, z.real)
self.assertEquals(1, z.imag)
self.assertTypedEquals(F(1, 4) , 2 ** F(-2, 1))
self.assertTypedEquals(2.0 , 4 ** F(1, 2))
self.assertTypedEquals(0.25, 2.0 ** F(-2, 1))
self.assertTypedEquals(1.0 + 0j, (1.0 + 0j) ** F(1, 10))
def testMixingWithDecimal(self):
# Decimal refuses mixed comparisons.
self.assertRaisesMessage(
TypeError,
"unsupported operand type(s) for +: 'Fraction' and 'Decimal'",
operator.add, F(3,11), Decimal('3.1415926'))
self.assertNotEquals(F(5, 2), Decimal('2.5'))
def testComparisons(self):
self.assertTrue(F(1, 2) < F(2, 3))
self.assertFalse(F(1, 2) < F(1, 2))
self.assertTrue(F(1, 2) <= F(2, 3))
self.assertTrue(F(1, 2) <= F(1, 2))
self.assertFalse(F(2, 3) <= F(1, 2))
self.assertTrue(F(1, 2) == F(1, 2))
self.assertFalse(F(1, 2) == F(1, 3))
self.assertFalse(F(1, 2) != F(1, 2))
self.assertTrue(F(1, 2) != F(1, 3))
def testComparisonsDummyRational(self):
self.assertTrue(F(1, 2) == DummyRational(1, 2))
self.assertTrue(DummyRational(1, 2) == F(1, 2))
self.assertFalse(F(1, 2) == DummyRational(3, 4))
self.assertFalse(DummyRational(3, 4) == F(1, 2))
self.assertTrue(F(1, 2) < DummyRational(3, 4))
self.assertFalse(F(1, 2) < DummyRational(1, 2))
self.assertFalse(F(1, 2) < DummyRational(1, 7))
self.assertFalse(F(1, 2) > DummyRational(3, 4))
self.assertFalse(F(1, 2) > DummyRational(1, 2))
self.assertTrue(F(1, 2) > DummyRational(1, 7))
self.assertTrue(F(1, 2) <= DummyRational(3, 4))
self.assertTrue(F(1, 2) <= DummyRational(1, 2))
self.assertFalse(F(1, 2) <= DummyRational(1, 7))
self.assertFalse(F(1, 2) >= DummyRational(3, 4))
self.assertTrue(F(1, 2) >= DummyRational(1, 2))
self.assertTrue(F(1, 2) >= DummyRational(1, 7))
self.assertTrue(DummyRational(1, 2) < F(3, 4))
self.assertFalse(DummyRational(1, 2) < F(1, 2))
self.assertFalse(DummyRational(1, 2) < F(1, 7))
self.assertFalse(DummyRational(1, 2) > F(3, 4))
self.assertFalse(DummyRational(1, 2) > F(1, 2))
self.assertTrue(DummyRational(1, 2) > F(1, 7))
self.assertTrue(DummyRational(1, 2) <= F(3, 4))
self.assertTrue(DummyRational(1, 2) <= F(1, 2))
self.assertFalse(DummyRational(1, 2) <= F(1, 7))
self.assertFalse(DummyRational(1, 2) >= F(3, 4))
self.assertTrue(DummyRational(1, 2) >= F(1, 2))
self.assertTrue(DummyRational(1, 2) >= F(1, 7))
def testComparisonsDummyFloat(self):
x = DummyFloat(1./3.)
y = F(1, 3)
self.assertTrue(x != y)
self.assertTrue(x < y or x > y)
self.assertFalse(x == y)
self.assertFalse(x <= y and x >= y)
self.assertTrue(y != x)
self.assertTrue(y < x or y > x)
self.assertFalse(y == x)
self.assertFalse(y <= x and y >= x)
def testMixedLess(self):
self.assertTrue(2 < F(5, 2))
self.assertFalse(2 < F(4, 2))
self.assertTrue(F(5, 2) < 3)
self.assertFalse(F(4, 2) < 2)
self.assertTrue(F(1, 2) < 0.6)
self.assertFalse(F(1, 2) < 0.4)
self.assertTrue(0.4 < F(1, 2))
self.assertFalse(0.5 < F(1, 2))
self.assertFalse(float('inf') < F(1, 2))
self.assertTrue(float('-inf') < F(0, 10))
self.assertFalse(float('nan') < F(-3, 7))
self.assertTrue(F(1, 2) < float('inf'))
self.assertFalse(F(17, 12) < float('-inf'))
self.assertFalse(F(144, -89) < float('nan'))
def testMixedLessEqual(self):
self.assertTrue(0.5 <= F(1, 2))
self.assertFalse(0.6 <= F(1, 2))
self.assertTrue(F(1, 2) <= 0.5)
self.assertFalse(F(1, 2) <= 0.4)
self.assertTrue(2 <= F(4, 2))
self.assertFalse(2 <= F(3, 2))
self.assertTrue(F(4, 2) <= 2)
self.assertFalse(F(5, 2) <= 2)
self.assertFalse(float('inf') <= F(1, 2))
self.assertTrue(float('-inf') <= F(0, 10))
self.assertFalse(float('nan') <= F(-3, 7))
self.assertTrue(F(1, 2) <= float('inf'))
self.assertFalse(F(17, 12) <= float('-inf'))
self.assertFalse(F(144, -89) <= float('nan'))
def testBigFloatComparisons(self):
# Because 10**23 can't be represented exactly as a float:
self.assertFalse(F(10**23) == float(10**23))
# The first test demonstrates why these are important.
self.assertFalse(1e23 < float(F(math.trunc(1e23) + 1)))
self.assertTrue(1e23 < F(math.trunc(1e23) + 1))
self.assertFalse(1e23 <= F(math.trunc(1e23) - 1))
self.assertTrue(1e23 > F(math.trunc(1e23) - 1))
self.assertFalse(1e23 >= F(math.trunc(1e23) + 1))
def testBigComplexComparisons(self):
self.assertFalse(F(10**23) == complex(10**23))
self.assertTrue(F(10**23) > complex(10**23))
self.assertFalse(F(10**23) <= complex(10**23))
def testMixedEqual(self):
self.assertTrue(0.5 == F(1, 2))
self.assertFalse(0.6 == F(1, 2))
self.assertTrue(F(1, 2) == 0.5)
self.assertFalse(F(1, 2) == 0.4)
self.assertTrue(2 == F(4, 2))
self.assertFalse(2 == F(3, 2))
self.assertTrue(F(4, 2) == 2)
self.assertFalse(F(5, 2) == 2)
self.assertFalse(F(5, 2) == float('nan'))
self.assertFalse(float('nan') == F(3, 7))
self.assertFalse(F(5, 2) == float('inf'))
self.assertFalse(float('-inf') == F(2, 5))
def testStringification(self):
self.assertEquals("Fraction(7, 3)", repr(F(7, 3)))
self.assertEquals("Fraction(6283185307, 2000000000)",
repr(F('3.1415926535')))
self.assertEquals("Fraction(-1, 100000000000000000000)",
repr(F(1, -10**20)))
self.assertEquals("7/3", str(F(7, 3)))
self.assertEquals("7", str(F(7, 1)))
def testHash(self):
self.assertEquals(hash(2.5), hash(F(5, 2)))
self.assertEquals(hash(10**50), hash(F(10**50)))
self.assertNotEquals(hash(float(10**23)), hash(F(10**23)))
def testApproximatePi(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
three = F(3)
lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24
while abs(s - lasts) > F(1, 10**9):
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
self.assertAlmostEquals(math.pi, s)
def testApproximateCos1(self):
# Algorithm borrowed from
# http://docs.python.org/lib/decimal-recipes.html
x = F(1)
i, lasts, s, fact, num, sign = 0, 0, F(1), 1, 1, 1
while abs(s - lasts) > F(1, 10**9):
lasts = s
i += 2
fact *= i * (i-1)
num *= x * x
sign *= -1
s += num / fact * sign
self.assertAlmostEquals(math.cos(1), s)
def test_copy_deepcopy_pickle(self):
r = F(13, 7)
self.assertEqual(r, loads(dumps(r)))
self.assertEqual(id(r), id(copy(r)))
self.assertEqual(id(r), id(deepcopy(r)))
def test_slots(self):
# Issue 4998
r = F(13, 7)
self.assertRaises(AttributeError, setattr, r, 'a', 10)
def test_main():
run_unittest(FractionTest, GcdTest)
if __name__ == '__main__':
test_main()<|fim▁end|> | F, "3/7.2") |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"knative.dev/pkg/signals"
"knative.dev/eventing/pkg/adapter/mtping"
"knative.dev/eventing/pkg/adapter/v2"
)
const (
component = "pingsource-mt-adapter"
)
func main() {
sctx := signals.NewContext()
// When cancelling the adapter to close to the minute, there is
// a risk of losing events due to either the delay of starting a new pod
// or for the passive pod to become active (when HA is enabled and replicas > 1).
// So when receiving a SIGTEM signal, delay the cancellation of the adapter,
// which under the cover delays the release of the lease.
ctx := mtping.NewDelayingContext(sctx, mtping.GetNoShutDownAfterValue())
<|fim▁hole|><|fim▁end|> | ctx = adapter.WithController(ctx, mtping.NewController)
ctx = adapter.WithHAEnabled(ctx)
adapter.MainWithContext(ctx, component, mtping.NewEnvConfig, mtping.NewAdapter)
} |
<|file_name|>Decorations (32x32).tsx<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="UTF-8"?>
<tileset version="1.5" tiledversion="1.7.1" name="Decorations (32x32)" tilewidth="32" tileheight="32" tilecount="42" columns="7">
<image source="Decorations (32x32).png" width="224" height="192"/>
<tile id="9">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="10"/>
</objectgroup>
</tile>
<tile id="10">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="10"/>
</objectgroup>
</tile>
<tile id="11">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="10"/>
</objectgroup>
</tile>
<tile id="12">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="10"/>
</objectgroup>
</tile>
<tile id="16">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="15"/>
</objectgroup>
</tile>
<tile id="17">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="15"/>
</objectgroup><|fim▁hole|> <objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="15"/>
</objectgroup>
</tile>
<tile id="19">
<objectgroup draworder="index" id="2">
<object id="1" type="collision" x="0" y="0" width="32" height="15"/>
</objectgroup>
</tile>
<tile id="25"/>
</tileset><|fim▁end|> | </tile>
<tile id="18"> |
<|file_name|>FlankTest.py<|end_file_name|><|fim▁begin|>import unittest
import numpy as np
from collections import OrderedDict
from gtrackcore.metadata import GenomeInfo
from gtrackcore.track.core.GenomeRegion import GenomeRegion
from gtrackcore.track.format.TrackFormat import TrackFormat
from gtrackcore.track_operations.operations.Flank import Flank
from gtrackcore.track_operations.TrackContents import TrackContents
from gtrackcore.test.track_operations.OperationTest import createTrackView
class FlankTest(unittest.TestCase):
def setUp(self):
self.chr1 = (GenomeRegion('hg19', 'chr1', 0,
GenomeInfo.GENOMES['hg19']['size']['chr1']))
self.chromosomes = (GenomeRegion('hg19', c, 0, l)
for c, l in
GenomeInfo.GENOMES['hg19']['size'].iteritems())
def _runFlankSegmentsTest(self, starts, ends, expStarts, expEnds,
nrBP, after=True, before=True):
"""
Run a test on the creation of a Flank track from a segmented track.
The test expects there to only to be segments in chr1,
All other chromosomes need to be of size zero.
:param startsA: Arrays of starts in track.
:param endsA: Array of ends in track.
:param expStarts: Expected starts of flanks.
:param expEnds: Expected ends of flanks.
:parap nrBP: INT. Size of flank i base pairs.
:param after: Boolean. Flanks from the starts.
:param before: Boolean. Flanks form the ends.
:return:
"""
track = self._createTrackContent(starts, ends)
f = Flank(track)
# Result track type is Segments as default
f.setFlankSize(nrBP)
f.setAfter(after)
f.setBefore(before)
tc = f()
for (k, v) in tc.getTrackViews().items():
print expStarts
print v.startsAsNumpyArray()
print expEnds
print v.endsAsNumpyArray()
if cmp(k, self.chr1) == 0:
# All test tracks are in chr1
self.assertTrue(np.array_equal(v.startsAsNumpyArray(),
expStarts))
self.assertTrue(np.array_equal(v.endsAsNumpyArray(), expEnds))
else:
# Tests if all tracks no in chr1 have a size of 0.
self.assertEqual(v.startsAsNumpyArray().size, 0)
self.assertEqual(v.endsAsNumpyArray().size, 0)
def _createTrackContent(self, starts, ends):
"""
Create a track view a start, end list pair.
Help method used in testing. This method will create a hg19 tracks with
data in chromosome 1 only.
:param starts: List of track start positions
:param ends: List of track end positions
:return: A TrackContent object
"""
starts = np.array(starts)
ends = np.array(ends)
tv = createTrackView(region=self.chr1, startList=starts, endList=ends,
allow_overlap=False)<|fim▁hole|> d[self.chr1] = tv
return TrackContents('hg19', d)
# **** Points tests ****
# **** Segments tests ****
def testFlankSimpleBefore(self):
"""
Simple single segment before.
:return: None
"""
self._runFlankSegmentsTest(starts=[100], ends=[150], expStarts=[50],
expEnds=[100], nrBP=50, after=False,
before=True)
def testFlankSimpleAfter(self):
"""
Simple single segment after.
:return: None
"""
self._runFlankSegmentsTest(starts=[100], ends=[150], expStarts=[150],
expEnds=[200], nrBP=50, after=True,
before=False)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | d = OrderedDict() |
<|file_name|>crf.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package crf
# Module caffe2.python.crf
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, recurrent, model_helper, brew
import numpy as np
'''
Due to a limitation in ReccurentNetworkOp, this layer only supports batch_size=1
In order to support batch_size > 1, we will have to implement the CRFUnit
and its gradient in C++ and handle the different batches there.
'''
class CRFWithLoss(object):
def __init__(self, model, num_classes, transitions_blob=None):
self.model = model
self.num_classes = num_classes
self.num_classes_padded = num_classes + 2 # After adding BOS and EOS
if not transitions_blob:
transitions_blob = self.model.param_init_net.UniformFill(
[],
[core.ScopedBlobReference('crf_transitions')],
shape=[self.num_classes_padded, self.num_classes_padded],
min=-1.0,
max=1.0
)
self.transitions = transitions_blob
self.model.params.append(self.transitions)
def crf_loss(self, predictions, labels, seq_lengths=None):
# Since the transitions matrix is a shared parameter, need to
# take a snapshot of it at the beginning since it can be updated
# in between the operators that uses it when doing parallel updates
transitions_snapshot = self.model.net.Copy(
self.transitions, core.ScopedBlobReference('transitions_snapshot')
)
# Compute best path unary score from the logits
path_unary_score = self._gather_entries_sum(
predictions, labels, self.num_classes
)
# Append BOS and EOS entries to the predictions and labels
predictions = self._pad_predictions(predictions)
labels = self._pad_labels(labels)
# Compute best path binary scores from the transitions matrix
path_binary_score = self._path_binary_scores(
labels, transitions_snapshot, seq_lengths
)
path_total_score = self.model.net.Add(
[path_binary_score, path_unary_score],
core.ScopedBlobReference('path_total')
)
# Compute all paths score
zero_index = self.model.param_init_net.ConstantFill(
[], shape=[1], value=0
)
initial_state = self.model.net.Gather(
[predictions, zero_index],
core.ScopedBlobReference('rnn_initial'),
dense_gradient=True
)
input_data, _ = self.model.net.RemovePadding(
[predictions],
padding_width=1,
end_padding_width=0,
outputs=2,
)
input_data = self.model.net.ExpandDims(
[input_data],
core.ScopedBlobReference('rnn_input_data'),
dims=[1]
)
# Due to a bug in RecurrentNetworkGradientOp, we need to copy the
# transitions blob before sending it to the recurrent network
transitions_copy = self.model.net.Copy(
transitions_snapshot, core.ScopedBlobReference('transitions_copy')
)
all_paths_scores = self._crf_forward(
input_data, initial_state, transitions_copy
)
loss = self.model.net.Sub(
[all_paths_scores, path_total_score],
core.ScopedBlobReference('crf_loss')
)
return loss
def _pad_predictions(self, predictions):
# This function will introduce two labels for beginning of sequence
# And end of sequence, it will make the necessary udpates to the
# the predictions blob
low_score = -1000.0 # An arbitray very low number
b_scores = np.array(
[[low_score] * self.num_classes + [0, low_score]]
).astype(np.float32)
e_scores = np.array(
[[low_score] * self.num_classes + [low_score, 0]]
).astype(np.float32)
b_scores = self.model.param_init_net.GivenTensorFill(
[], "b_scores", shape=[1, self.num_classes_padded], values=b_scores
)
e_scores = self.model.param_init_net.GivenTensorFill(
[], "e_scores", shape=[1, self.num_classes_padded], values=e_scores
)
zero_index = self.model.net.ConstantFill(
[], shape=[1, ], value=0
)
length = self.model.net.Gather(
[self.model.net.Shape([predictions]), zero_index],
)
length = self.model.net.Cast(length, to='int32')
t_range = self.model.net.LengthsRangeFill(length)
padding = self.model.net.ConstantFill([t_range], value=low_score)
padding = self.model.net.ExpandDims(padding, dims=[1])
padded_predictions, _ = self.model.net.Concat(
[predictions, padding, padding],
outputs=2,
axis=1
)
padded_predictions_concat, _ = self.model.net.Concat(
[b_scores, padded_predictions, e_scores],
outputs=2,
axis=0
)
return padded_predictions_concat
def _pad_labels(self, labels):
bos_i = self.num_classes
eos_i = self.num_classes + 1
bos_i_b = self.model.param_init_net.ConstantFill(
[], shape=[1], value=bos_i
)
eos_i_b = self.model.param_init_net.ConstantFill(
[], shape=[1], value=eos_i
)
labels = self.model.net.Cast([labels], to='int64')
padded_labels, _ = self.model.net.Concat(
[bos_i_b, labels, eos_i_b],
axis=0,
outputs=2
)
return padded_labels
def _path_binary_scores(self, labels, transitions, seq_lengths=None):
column_ids, _ = self.model.net.RemovePadding(
[labels],
outputs=2,
padding_width=1,
end_padding_width=0
)
row_ids, _ = self.model.net.RemovePadding(
[labels],
outputs=2,
padding_width=0,
end_padding_width=1
)
# Since there is no multi-dimensional gather, I flatten the matrix to
# a 1-d vector and transform the ids to (row_ids * num_columns +
# column_ids) and do gather in 1-d
num_columns_blob = self.model.net.ConstantFill(
[row_ids],
value=self.num_classes_padded,
)
flattened_ids = self.model.net.Mul([row_ids, num_columns_blob])
flattened_ids = self.model.net.Add([flattened_ids, column_ids])
flattened_transitions = self.model.net.FlattenToVec([transitions])
entries = self.model.net.Gather(
[flattened_transitions, flattened_ids],
dense_gradient=True
)
return self.model.ReduceFrontSum(entries)
def _gather_entries_sum(self, in_data, indices, index_size):
indices = self.model.net.Cast([indices], to='int64')
index_size_blob = self.model.param_init_net.ConstantFill(
[],
shape=[1],
value=index_size,
)
query_one_hot = self.model.net.OneHot(
[indices, index_size_blob]
)
flattend_query = self.model.net.FlattenToVec(query_one_hot)
flattend_data = self.model.net.FlattenToVec(in_data)
query_scores = self.model.net.DotProduct(
[flattend_query, flattend_data]
)
final_sum = self.model.net.ReduceFrontSum([query_scores])
return final_sum
def _crf_forward(
self,
input_blob,
initial_state,
transitions_copy,
seq_lengths=None
):
# Build the RNN net and get the last timestep output
out_last = self.build_crf_net(
input_blob, initial_state, transitions_copy
)
out_last, _ = self.model.net.Reshape(
[out_last],
outputs=2,
shape=(self.num_classes_padded,)
)
zero_segment_id = self.model.param_init_net.ConstantFill(
[],
value=0,
shape=[self.num_classes_padded],
dtype=core.DataType.INT32,
)
# Compute the accumlated total score of all the paths
accum_score = self.model.net.SortedSegmentRangeLogSumExp(
[out_last, zero_segment_id]
)
accum_score, _ = self.model.net.Reshape(
accum_score,
outputs=2,
shape=()
)
return accum_score
def build_crf_net(self, input_blob, initial_state, transitions):
'''
Adds the crf_net recurrent operator to the model.
model: model_helper.ModelHelper object new operators would be added
to
input_blob: the input sequence in a format T x N x D
where T is sequence size, N - batch size and D - input dimention
##Only supports batch-size 1##
seq_lengths: blob containing sequence lengths (unused)
'''
scope = 'crf_net'
def s(name):
''
# We have to manually scope due to our internal/external blob
# relationships.
return "{}/{}".format(str(scope), str(name))
step_model = model_helper.ModelHelper(name='crf_step',
param_model=self.model)
input_t, cell_t_prev, _ = (
step_model.net.AddExternalInputs(
core.ScopedBlobReference('input_t'),
core.ScopedBlobReference('cell_t_prev'),
transitions
)
)
zero_segment_id = step_model.param_init_net.ConstantFill(
[],
[s('zero_segment_id')],
value=0,
shape=[self.num_classes_padded],
dtype=core.DataType.INT32,
)
# A hack to bypass model cloning for test
step_model.param_init_net.AddExternalOutput(zero_segment_id)
""" the CRF step """
# Do tile
prev_transpose = brew.transpose(
step_model,
cell_t_prev,
[s('prev_transpose')],
axes=(0, 2, 1),<|fim▁hole|> [s('prev_tiled')],
tiles=self.num_classes_padded,
axis=2,
)
input_t_tiled = step_model.net.Tile(
input_t,
[s('input_t_tiled')],
tiles=self.num_classes_padded,
axis=1,
)
input_with_prev = step_model.net.Add(
[prev_tiled, input_t_tiled],
[s('input_with_prev')]
)
all_with_transitions = step_model.net.Add(
[input_with_prev, transitions],
[s('prev_with_transitions')],
broadcast=1,
use_grad_hack=1,
)
all_with_transitions_reshaped, _ = step_model.net.Reshape(
all_with_transitions,
[s('all_with_transitions_reshaped'), s('all_with_transitions_orig')],
shape=(self.num_classes_padded, self.num_classes_padded)
)
cell_t = step_model.net.SortedSegmentRangeLogSumExp(
[all_with_transitions_reshaped, zero_segment_id],
[s('cell_t')],
)
step_model.net.AddExternalOutputs(cell_t)
""" recurrent network """
cell_input_blob = initial_state
out_all, out_last = recurrent.recurrent_net(
net=self.model.net,
cell_net=step_model.net,
inputs=[(input_t, input_blob)],
initial_cell_inputs=[
(cell_t_prev, cell_input_blob),
],
links={
cell_t_prev: cell_t,
},
scope=scope,
outputs_with_grads=(1,)
)
return out_last
def update_predictions(self, classes):
def crf_update_predictions_op(inputs, outputs):
# This operator will compute the best path of classes by performing
# Viterbi decoding and then updates the predictions to make the tag
# On the best path has the highest score among the others
predictions = inputs[0].data
transitions = inputs[1].data
predictions = inputs[0].data
predictions_shape = inputs[0].shape
outputs[0].reshape(predictions_shape)
trellis = np.zeros(predictions_shape)
backpointers = np.zeros(predictions_shape, dtype=np.int32)
trellis[0] = predictions[0]
for t in range(1, predictions_shape[0]):
v = np.expand_dims(trellis[t - 1], 1) + transitions
trellis[t] = predictions[t] + np.max(v, 0)
backpointers[t] = np.argmax(v, 0)
viterbi = [np.argmax(trellis[-1])]
for bp in reversed(backpointers[1:]):
viterbi.append(bp[viterbi[-1]])
viterbi.reverse()
new_predictions = np.zeros(predictions_shape)
old_bests = []
for i, w_predictions in enumerate(predictions):
# Get the current tag with the maximum score
new_predictions[i] = predictions[i]
old_best = np.argmax(w_predictions)
old_bests.append(old_best)
# Swap the scores of the current best tag and the tag on the
# Viterbi path
w_predictions[viterbi[i]], w_predictions[old_best] = \
w_predictions[old_best], w_predictions[viterbi[i]]
new_predictions[i] = w_predictions
# Remove the BOS and EOS entries from the predictions matrix
orig_predictions = new_predictions[1:-1, 0:-2]
outputs[0].reshape(orig_predictions.shape)
outputs[0].data[...] = orig_predictions
padded_classes = self._pad_predictions(classes)
new_classes = self.model.net.Python(crf_update_predictions_op)(
[padded_classes, self.transitions],
core.ScopedBlobReference('post_crf_classes')
)
return new_classes<|fim▁end|> | )
prev_tiled = step_model.net.Tile(
prev_transpose, |
<|file_name|>route.js<|end_file_name|><|fim▁begin|><|fim▁hole|>import resetScroll from 'radio4000/mixins/reset-scroll'
export default Route.extend(resetScroll, {})<|fim▁end|> | import Route from '@ember/routing/route' |
<|file_name|>ClientAutoDetectionDiscoveryTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl.spi.impl.discovery;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.config.Config;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientAutoDetectionDiscoveryTest extends HazelcastTestSupport {
@After
public void tearDown() {
Hazelcast.shutdownAll();
}
@Test
public void defaultDiscovery() {
Hazelcast.newHazelcastInstance();
Hazelcast.newHazelcastInstance();
HazelcastInstance client = HazelcastClient.newHazelcastClient();
assertClusterSizeEventually(2, client);
}
@Test
public void autoDetectionDisabled() {
Config config = new Config();
config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false);
Hazelcast.newHazelcastInstance(config);
Hazelcast.newHazelcastInstance(config);
ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().getAutoDetectionConfig().setEnabled(false);
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
// uses 127.0.0.1 and finds only one standalone member
assertClusterSizeEventually(1, client);
}
@Test
public void autoDetectionNotUsedWhenOtherDiscoveryEnabled() {
Config config = new Config();
config.getNetworkConfig().setPort(5710);
config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false);
Hazelcast.newHazelcastInstance(config);
ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().addAddress("127.0.0.1:5710");
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
assertClusterSizeEventually(1, client);<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>ag-grid-demo.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit,ViewChild } from '@angular/core';
import { AppService } from '../../app.service';
import {SimpleDataHttpPageComponent} from '../../shared/simple-data-table/simple-data-http-page.component';
@Component({<|fim▁hole|> rowData: any[];
columnDefs: any[];
constructor(private appService: AppService) {
this.appService.titleEventEmitter.emit("ag-grid示例");
this.columnDefs = [
{ headerName: "Make", field: "make", width: 300 },
{ headerName: "Model", field: "model", width: 300 },
{ headerName: "Price", field: "price", width: 300 },
{ headerName: "aaa", field: "aaa", width: 500 }
];
this.rowData = [
{ make: "Toyota", model: "Celica", price: 35000, aaa: '789456' },
{ make: "Ford", model: "Mondeo", price: 32000, aaa: '0123456' },
{ make: "Porsche", model: "Boxter", price: 72000, aaa: '123456' }
]
}
}<|fim▁end|> | selector: 'c-ag-grid-demo',
templateUrl: './ag-grid-demo.component.html'
})
export class AgGridDemoComponent { |
<|file_name|>RDKitSVPanel.cc<|end_file_name|><|fim▁begin|>//
// file RDKitSVPanel.cc
// David Cosgrove
// AstraZeneca
// 20th June 2014
//
#include "RDKitSVPanel.H"
#include "MolDisplay2DWidget.H"<|fim▁hole|>#include <QLayout>
#include <QSlider>
using namespace std;
namespace RDKitSV {
// ****************************************************************************
RDKitSVPanel::RDKitSVPanel(bool left_slider, QWidget *parent, Qt::WindowFlags f)
: QWidget(parent, f) {
build_widget(left_slider);
}
// ****************************************************************************
void RDKitSVPanel::set_molecules(const vector<RDKit::ROMOL_SPTR> &new_mols,
const vector<vector<int>> &highlight_atoms) {
#ifdef NOTYET
cout << "RDKitSVPanel::set_molecules : " << new_mols.size() << endl;
#endif
mols_ = new_mols;
highlight_atoms_ = highlight_atoms;
if (highlight_atoms_.size() != mols_.size()) {
highlight_atoms_.clear();
}
if (mols_.empty()) {
mol_slider_->setDisabled(true);
} else {
mol_slider_->setEnabled(true);
mol_slider_->setRange(0, mols_.size() - 1);
mol_slider_->setValue(0);
}
slot_slider_changed(); // to force picture update
}
// ****************************************************************************
void RDKitSVPanel::set_label(const QString &new_label) {
label_->setText(new_label);
label_->setWordWrap(true);
label_->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
if (new_label.isEmpty()) {
label_->hide();
} else {
label_->show();
}
}
// ****************************************************************************
void RDKitSVPanel::build_widget(bool left_slider) {
QHBoxLayout *hbox = new QHBoxLayout;
mol_draw_ = new RDKit::MolDisplay2DWidget;
mol_slider_ = new QSlider;
connect(mol_slider_, &QSlider::valueChanged, this,
&RDKitSVPanel::slot_slider_changed);
mol_slider_->setPageStep(1);
if (left_slider) {
hbox->addWidget(mol_slider_);
hbox->addWidget(mol_draw_, 1);
} else {
hbox->addWidget(mol_draw_, 1);
hbox->addWidget(mol_slider_);
}
label_ = new QLabel;
QVBoxLayout *vbox = new QVBoxLayout;
vbox->addLayout(hbox, 1);
vbox->addWidget(label_);
setLayout(vbox);
label_->hide();
}
// ****************************************************************************
void RDKitSVPanel::slot_slider_changed() {
if (mols_.empty()) {
mol_draw_->set_display_mol(RDKit::ROMOL_SPTR());
} else {
int mol_num = mol_slider_->value();
mol_draw_->set_display_mol(mols_[mol_num]);
if (!highlight_atoms_.empty()) {
mol_draw_->set_selected_atoms(highlight_atoms_[mol_num]);
}
}
}
} // namespace RDKitSV<|fim▁end|> |
#include <QLabel> |
<|file_name|>grpc_asyncio.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.aiplatform_v1beta1.types import index
from google.cloud.aiplatform_v1beta1.types import index_service
from google.longrunning import operations_pb2 # type: ignore
from .base import IndexServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import IndexServiceGrpcTransport
class IndexServiceGrpcAsyncIOTransport(IndexServiceTransport):
"""gRPC AsyncIO backend transport for IndexService.
A service for creating and managing Vertex AI's Index
resources.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "aiplatform.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "aiplatform.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def create_index(
self,
) -> Callable[
[index_service.CreateIndexRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create index method over gRPC.
Creates an Index.
Returns:
Callable[[~.CreateIndexRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_index" not in self._stubs:
self._stubs["create_index"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.IndexService/CreateIndex",
request_serializer=index_service.CreateIndexRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_index"]
@property
def get_index(
self,
) -> Callable[[index_service.GetIndexRequest], Awaitable[index.Index]]:
r"""Return a callable for the get index method over gRPC.<|fim▁hole|>
Gets an Index.
Returns:
Callable[[~.GetIndexRequest],
Awaitable[~.Index]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_index" not in self._stubs:
self._stubs["get_index"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.IndexService/GetIndex",
request_serializer=index_service.GetIndexRequest.serialize,
response_deserializer=index.Index.deserialize,
)
return self._stubs["get_index"]
@property
def list_indexes(
self,
) -> Callable[
[index_service.ListIndexesRequest], Awaitable[index_service.ListIndexesResponse]
]:
r"""Return a callable for the list indexes method over gRPC.
Lists Indexes in a Location.
Returns:
Callable[[~.ListIndexesRequest],
Awaitable[~.ListIndexesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_indexes" not in self._stubs:
self._stubs["list_indexes"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.IndexService/ListIndexes",
request_serializer=index_service.ListIndexesRequest.serialize,
response_deserializer=index_service.ListIndexesResponse.deserialize,
)
return self._stubs["list_indexes"]
@property
def update_index(
self,
) -> Callable[
[index_service.UpdateIndexRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the update index method over gRPC.
Updates an Index.
Returns:
Callable[[~.UpdateIndexRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_index" not in self._stubs:
self._stubs["update_index"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.IndexService/UpdateIndex",
request_serializer=index_service.UpdateIndexRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_index"]
@property
def delete_index(
self,
) -> Callable[
[index_service.DeleteIndexRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the delete index method over gRPC.
Deletes an Index. An Index can only be deleted when all its
[DeployedIndexes][google.cloud.aiplatform.v1beta1.Index.deployed_indexes]
had been undeployed.
Returns:
Callable[[~.DeleteIndexRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_index" not in self._stubs:
self._stubs["delete_index"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.IndexService/DeleteIndex",
request_serializer=index_service.DeleteIndexRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_index"]
__all__ = ("IndexServiceGrpcAsyncIOTransport",)<|fim▁end|> | |
<|file_name|>_ResourceManager.js<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////
// Copyright © 2015 Esri. All Rights Reserved.
//
// Licensed under the Apache License Version 2.0 (the "License");
// you may not use this file except in compliance with the License.<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////
define([
'dojo/_base/declare',
'dojo/_base/lang',
'dojo/_base/array',
'dojo/Deferred',
'dojo/promise/all',
'esri/lang',
'jimu/portalUrlUtils',
'./table/_FeatureTable',
// './_RelationshipTable',
'./utils'
], function(declare, lang, array, Deferred, all,
esriLang, portalUrlUtils,
_FeatureTable,/* _RelationshipTable,*/ attrUtils) {
return declare(null, {
_activeLayerInfoId: null,
_activeRelationshipKey: null,
nls: null,
config: null,
map: null,
//FeatureTable
_delayedLayerInfos: [],
_layerInfosFromMap: [],
featureTableSet: {},
//RelationshipTable
// one layer may be have multiple relationships, so we use key-value to store relationships
relationshipsSet: {},
relationshipTableSet: {},
currentRelationshipKey: null,
constructor: function(params) {
this.map = params && params.map;
this.nls = params && params.nls;
this._delayedLayerInfos = [];
this._layerInfosFromMap = [];
this.featureTableSet = {};
this.relationshipsSet = {};
this.relationshipTableSet = {};
this.currentRelationshipKey = null;
},
setConfig: function(tableConfig) {
this.config = lang.clone(tableConfig || {});
},
setMap: function(map) {
this.map = map;
},
updateLayerInfoResources: function(updateConfig) {
var def = new Deferred();
attrUtils.readConfigLayerInfosFromMap(this.map, false, true)
.then(lang.hitch(this, function(layerInfos) {
this._layerInfosFromMap = layerInfos;
this._processDelayedLayerInfos();
if (updateConfig) {
if (this.config.layerInfos.length === 0) {
// if no config only display visible layers
var configLayerInfos = attrUtils.getConfigInfosFromLayerInfos(layerInfos);
this.config.layerInfos = array.filter(configLayerInfos, function(layer) {
return layer.show;
});
} else {
// filter layer from current map and show property of layerInfo is true
this.config.layerInfos = array.filter(
lang.delegate(this.config.layerInfos),
lang.hitch(this, function(layerInfo) {
var mLayerInfo = this._getLayerInfoById(layerInfo.id);
return layerInfo.show && mLayerInfo &&
(layerInfo.name = mLayerInfo.name || mLayerInfo.title);
}));
}
}
def.resolve();
}), function(err) {
def.reject(err);
});
return def;
},
isEmpty: function() {
return this.config && this.config.layerInfos && this.config.layerInfos.length <= 0;
},
getConfigInfos: function() {
return lang.clone(this.config.layerInfos);
},
addLayerInfo: function(newLayerInfo) {
if (this._layerInfosFromMap.length === 0) {
this._delayedLayerInfos.push(newLayerInfo);
} else if (this._layerInfosFromMap.length > 0 &&
!this._getLayerInfoById(newLayerInfo.id)) {
this._layerInfosFromMap.push(newLayerInfo); // _layerInfosFromMap read from map
}
},
addConfigInfo: function(newLayerInfo) {
if (!this._getConfigInfoById(newLayerInfo.id)) {
var info = attrUtils.getConfigInfoFromLayerInfo(newLayerInfo);
this.config.layerInfos.push({
id: info.id,
name: info.name,
layer: {
url: info.layer.url,
fields: info.layer.fields
}
});
}
},
removeLayerInfo: function(infoId) {
var _clayerInfo = this._getLayerInfoById(infoId);
var pos = this._layerInfosFromMap.indexOf(_clayerInfo);
this._layerInfosFromMap.splice(pos, 1);
},
removeConfigInfo: function(infoId) {
if (lang.getObject('config.layerInfos', false, this)) {
var len = this.config.layerInfos.length;
for (var i = 0; i < len; i++) {
if (this.config.layerInfos[i].id === infoId) {
if (this.featureTableSet[infoId]) {
this.featureTableSet[infoId].destroy();
delete this.featureTableSet[infoId];
}
this.config.layerInfos.splice(i, 1);
break;
}
}
}
},
getQueryTable: function(tabId, enabledMatchingMap, hideExportButton) {
var def = new Deferred();
this._activeLayerInfoId = tabId;
if (!this.featureTableSet[tabId]) {
this._getQueryTableInfo(tabId).then(lang.hitch(this, function(queryTableInfo) {
if (!queryTableInfo) {
def.resolve(null);
return;
}
var activeLayerInfo = queryTableInfo.layerInfo;
var layerObject = queryTableInfo.layerObject;
var tableInfo = queryTableInfo.tableInfo;
// prevent create duplicate table
// for asychronous request in both queryTable and queryRelationTable
if (this.featureTableSet[tabId]) {
def.resolve({
isSupportQuery: tableInfo.isSupportQuery,
table: this.featureTableSet[tabId]
});
return;
}
if (lang.getObject('isSupportQuery', false, tableInfo)) {
var configInfo = this._getConfigInfoById(tabId);
if (!configInfo) {
this.addConfigInfo(activeLayerInfo);
configInfo = this._getConfigInfoById(tabId);
}
var configFields = lang.getObject('layer.fields', false, configInfo);
var layerFields = layerObject && layerObject.fields;
// remove fields not exist in layerObject.fields
configInfo.layer.fields = this._clipValidFields(
configFields,
layerFields
);
var table = new _FeatureTable({
map: this.map,
matchingMap: enabledMatchingMap,
hideExportButton: hideExportButton,
layerInfo: activeLayerInfo,
configedInfo: configInfo,
nls: this.nls
});
this.featureTableSet[tabId] = table;
def.resolve({
isSupportQuery: tableInfo.isSupportQuery,
table: table
});
} else {
def.resolve({
isSupportQuery: false
});
}
}), function(err) {
def.reject(err);
});
} else {
def.resolve({
isSupportQuery: true,
table: this.featureTableSet[tabId]
});
}
return def;
},
getRelationTable: function(originalInfoId, key, enabledMatchingMap, hideExportButton) {
var def = new Deferred();
var currentShip = this.relationshipsSet[key];
this._activeRelationshipKey = key;
if (currentShip) {
var originalInfo = this._getLayerInfoById(originalInfoId);
var layerInfoId = lang.getObject('shipInfo.id', false, currentShip);
this.getQueryTable(layerInfoId, enabledMatchingMap, hideExportButton)
.then(lang.hitch(this, function(tableResult) {
if (tableResult && tableResult.table) {
var table = tableResult.table;
table.set('relatedOriginalInfo', originalInfo);
table.set('relationship', currentShip);
}
def.resolve(tableResult);
}), lang.hitch(function() {
def.resolve(null);
}));
} else {
def.resolve(null);
}
return def;
},
removeRelationTable: function(relationShipKey) {
if (this.relationshipTableSet[relationShipKey]) {
this.relationshipTableSet[relationShipKey].destroy();
this.relationshipTableSet[relationShipKey] = null;
}
},
getCurrentTable: function(key) {
return this.featureTableSet[key] || this.relationshipTableSet[key];
},
collectRelationShips: function(layerInfo, relatedTableInfos) {
this._collectRelationShips(layerInfo, layerInfo.layerObject, relatedTableInfos);
},
getConfigInfoById: function(id) {
return this._getConfigInfoById(id);
},
getLayerInfoById: function(id) {
return this._getLayerInfoById(id);
},
getRelationshipsByInfoId: function(id) {
var ships = [];
for (var p in this.relationshipsSet) {
var ship = this.relationshipsSet[p];
if (ship._layerInfoId === id) {
ships.push(ship);
}
}
return ships;
},
empty: function() {
this._delayedLayerInfos = [];
this._layerInfosFromMap = [];
this.featureTableSet = {};
for (var p in this.relationshipsSet) {
var ship = this.relationshipsSet[p];
ship.shipInfo = null;
}
this.relationshipsSet = {};
this.relationshipTableSet = {};
this.currentRelationshipKey = null;
this.config = null;
this.map = null;
this.nls = null;
},
_processDelayedLayerInfos: function() { // must be invoke after initialize this._layerInfos
if (this._delayedLayerInfos.length > 0) {
array.forEach(this._delayedLayerInfos, lang.hitch(this, function(delayedLayerInfo) {
if (!this._getLayerInfoById(delayedLayerInfo && delayedLayerInfo.id) &&
this.map && this.map.getLayer(delayedLayerInfo.id)) {
this._layerInfosFromMap.push(delayedLayerInfo);
}
}));
this._delayedLayerInfos = [];
}
},
_getLayerInfoById: function(layerId) {
for (var i = 0, len = this._layerInfosFromMap.length; i < len; i++) {
if (this._layerInfosFromMap[i] && this._layerInfosFromMap[i].id === layerId) {
return this._layerInfosFromMap[i];
}
}
},
_getConfigInfoById: function(id) {
if (!lang.getObject('layerInfos.length', false, this.config)) {
return null;
}
for (var i = 0, len = this.config.layerInfos.length; i < len; i++) {
var configInfo = this.config.layerInfos[i];
if (configInfo && configInfo.id === id) {
return configInfo;
}
}
return null;
},
_getQueryTableInfo: function(tabId) {
var def = new Deferred();
var activeLayerInfo = this._getLayerInfoById(tabId);
if (!activeLayerInfo) {
console.error("no activeLayerInfo!");
def.reject(new Error());
} else {
var defs = [];
var hasUrl = activeLayerInfo.getUrl();
defs.push(activeLayerInfo.getLayerObject());
defs.push(activeLayerInfo.getSupportTableInfo());
if (hasUrl) {
defs.push(activeLayerInfo.getRelatedTableInfoArray());
}
all(defs).then(lang.hitch(this, function(results) {
if (this._activeLayerInfoId !== tabId || !results) {
def.resolve(null);
return;
}
var layerObject = results[0];
var tableInfo = results[1];
var relatedTableInfos = hasUrl ? results[2] : [];
if (esriLang.isDefined(relatedTableInfos) && esriLang.isDefined(layerObject) &&
relatedTableInfos.length > 0) {
this._collectRelationShips(activeLayerInfo, layerObject, relatedTableInfos);
}
def.resolve({
layerInfo: activeLayerInfo,
layerObject: layerObject,
tableInfo: tableInfo
});
}), function(err) {
def.reject(err);
});
}
return def;
},
_collectRelationShips: function(layerInfo, layerObject, relatedTableInfos) {
var ships = layerObject.relationships;
if (ships && ships.length > 0 && layerObject && layerObject.url) {
var layerUrl = layerObject.url;
var parts = layerUrl.split('/');
array.forEach(ships, lang.hitch(this, function(ship) {
parts[parts.length - 1] = ship.relatedTableId;
var relationUrl = parts.join('/');
var tableInfos = array.filter(relatedTableInfos, lang.hitch(this, function(tableInfo) {
var tableInfoUrl = tableInfo.getUrl();
return esriLang.isDefined(tableInfoUrl) && esriLang.isDefined(relationUrl) &&
(portalUrlUtils.removeProtocol(tableInfoUrl.toString().toLowerCase()) ===
portalUrlUtils.removeProtocol(relationUrl.toString().toLowerCase()));
}));
if (tableInfos && tableInfos.length > 0) {
ship.shipInfo = tableInfos[0];
}
var relKey = layerInfo.id + '_' + ship.name + '_' + ship.id;
ship._relKey = relKey;
ship._layerInfoId = layerInfo.id;
if (!this.relationshipsSet[relKey]) {
this.relationshipsSet[relKey] = ship;
this.relationshipsSet[relKey].objectIdField = layerObject.objectIdField;
}
}));
}
},
_getLayerInfoLabel: function(layerInfo) {
var label = layerInfo.name || layerInfo.title;
return label;
},
_getLayerInfoId: function(layerInfo) {
return layerInfo && layerInfo.id || "";
},
_clipValidFields: function(sFields, rFields) {
if (!(sFields && sFields.length)) {
return rFields || [];
}
if (!(rFields && rFields.length)) {
return sFields;
}
var validFields = [];
for (var i = 0, len = sFields.length; i < len; i++) {
var sf = sFields[i];
for (var j = 0, len2 = rFields.length; j < len2; j++) {
var rf = rFields[j];
if (rf.name === sf.name) {
validFields.push(sf);
break;
}
}
}
return validFields;
}
});
});<|fim▁end|> | // You may obtain a copy of the License at
// |
<|file_name|>bindings-app.js<|end_file_name|><|fim▁begin|>br.test.GwtTestRunner.initialize();
describe("Dashboard App", function() {
fixtures("brjs.dashboard.app.testing.DashboardFixtureFactory");
it("displays the screens that are visible", function() {
given("dash.loaded = true");
and("dash.model.appsScreen.visible = true");
and("dash.model.appDetailScreen.visible = true");<|fim▁hole|> and("dash.view.(#appDetailScreen).isVisible = true");
and("dash.view.(#releaseNoteScreen).isVisible = true");
});
it("hides the screens that are not visible", function() {
given("dash.loaded = true");
and("dash.model.appsScreen.visible = false");
and("dash.model.appDetailScreen.visible = false");
and("dash.model.releaseNoteScreen.visible = false");
then("dash.view.(#appsScreen).isVisible = false");
and("dash.view.(#appDetailScreen).isVisible = false");
and("dash.view.(#releaseNoteScreen).isVisible = false");
});
});<|fim▁end|> | and("dash.model.releaseNoteScreen.visible = true");
then("dash.view.(#appsScreen).isVisible = true"); |
<|file_name|>pause.go<|end_file_name|><|fim▁begin|>// +build linux
package main
import "github.com/urfave/cli"
var pauseCommand = cli.Command{
Name: "pause",
Usage: "pause suspends all processes inside the container",
ArgsUsage: `<container-id>
Where "<container-id>" is the name for the instance of the container to be
paused. `,
Description: `The pause command suspends all processes in the instance of the container.
Use runc list to identiy instances of containers and their current status.`,
Action: func(context *cli.Context) error {
if err := checkArgs(context, 1, exactArgs); err != nil {
return err
}
container, err := getContainer(context)
if err != nil {
return err
}
if err := container.Pause(); err != nil {
return err
}
return nil
},
}
var resumeCommand = cli.Command{
Name: "resume",
Usage: "resumes all processes that have been previously paused",
ArgsUsage: `<container-id>
Where "<container-id>" is the name for the instance of the container to be
resumed.`,
Description: `The resume command resumes all processes in the instance of the container.
Use runc list to identiy instances of containers and their current status.`,
Action: func(context *cli.Context) error {
if err := checkArgs(context, 1, exactArgs); err != nil {
return err
}
container, err := getContainer(context)
if err != nil {
return err
}
if err := container.Resume(); err != nil {
return err<|fim▁hole|> },
}<|fim▁end|> | }
return nil |
<|file_name|>background_ee_qcd_170-250.py<|end_file_name|><|fim▁begin|>import ArtusConfigBase as base
def config():
conf = base.BaseConfig('mc', '2012', analysis='ee')
conf["InputFiles"] = base.setInputFiles(<|fim▁hole|> ekppath="",
nafpath="/pnfs/desy.de/cms/tier2/store/user/dhaitz/2014_08_08_data_QCD/kappa_QCD_170-250_*.root"
)
conf['EnableLumiReweighting'] = True
conf['EnableTriggerReweighting'] = False
conf['NEvents'] = 31697066
conf['XSection'] = 30990
conf = base.expand(conf, ['all', 'zcuts', 'incut'])
return conf<|fim▁end|> | |
<|file_name|>ExponentialDemo.java<|end_file_name|><|fim▁begin|>package ru.sigma.test.learning.data;
/**<|fim▁hole|> * To change this template use File | Settings | File Templates.
*/
public class ExponentialDemo {
public static void main(String[] args) {
double x = 11.635;
double y = 2.76;
System.out.printf("The value of " + "e is %.4f%n",
Math.E);
System.out.printf("exp(%.3f) " + "is %.3f%n",
x, Math.exp(x));
System.out.printf("log(%.3f) is " + "%.3f%n",
x, Math.log(x));
System.out.printf("pow(%.3f, %.3f) " + "is %.3f%n",
x, y, Math.pow(x, y));
System.out.printf("sqrt(%.3f) is " + "%.3f%n",
x, Math.sqrt(x));
}
}<|fim▁end|> | * Created with IntelliJ IDEA.
* User: emaltsev
* Date: 22.11.13
* Time: 10:37 |
<|file_name|>editorParamsViewWidget.js<|end_file_name|><|fim▁begin|>/*global define*/
/*jslint white:true,browser:true*/
define([
'bluebird',
// CDN
'kb_common/html',
// LOCAL
'common/ui',
'common/runtime',
'common/events',
'common/props',
// Wrapper for inputs
'./inputWrapperWidget',
'widgets/appWidgets/fieldWidget',
// Display widgets
'widgets/appWidgets/paramDisplayResolver'
], function (
Promise,
html,
UI,
Runtime,
Events,
Props,
//Wrappers
RowWidget,
FieldWidget,<|fim▁hole|> var t = html.tag,
form = t('form'), span = t('span'), div = t('div');
function factory(config) {
var runtime = Runtime.make(),
parentBus = config.bus,
cellId = config.cellId,
workspaceInfo = config.workspaceInfo,
container,
ui,
bus,
places,
model = Props.make(),
inputBusses = [],
settings = {
showAdvanced: null
},
paramResolver = ParamResolver.make();
// DATA
/*
* The input control widget is selected based on these parameters:
* - data type - (text, int, float, workspaceObject (ref, name)
* - input app - input, select
*/
// RENDERING
function makeFieldWidget(parameterSpec, value) {
var bus = runtime.bus().makeChannelBus(null, 'Params view input bus comm widget'),
inputWidget = paramResolver.getWidgetFactory(parameterSpec);
inputBusses.push(bus);
// An input widget may ask for the current model value at any time.
bus.on('sync', function () {
parentBus.emit('parameter-sync', {
parameter: parameterSpec.id()
});
});
parentBus.listen({
key: {
type: 'update',
parameter: parameterSpec.id()
},
handle: function (message) {
bus.emit('update', {
value: message.value
});
}
});
// Just pass the update along to the input widget.
// TODO: commented out, is it even used?
// parentBus.listen({
// test: function (message) {
// var pass = (message.type === 'update' && message.parameter === parameterSpec.id());
// return pass;
// },
// handle: function (message) {
// bus.send(message);
// }
// });
return FieldWidget.make({
inputControlFactory: inputWidget,
showHint: true,
useRowHighight: true,
initialValue: value,
parameterSpec: parameterSpec,
bus: bus,
workspaceId: workspaceInfo.id
});
}
function renderAdvanced() {
var advancedInputs = container.querySelectorAll('[data-advanced-parameter]');
if (advancedInputs.length === 0) {
return;
}
var removeClass = (settings.showAdvanced ? 'advanced-parameter-hidden' : 'advanced-parameter-showing'),
addClass = (settings.showAdvanced ? 'advanced-parameter-showing' : 'advanced-parameter-hidden');
for (var i = 0; i < advancedInputs.length; i += 1) {
var input = advancedInputs[i];
input.classList.remove(removeClass);
input.classList.add(addClass);
}
// How many advanaced?
// Also update the button
var button = container.querySelector('[data-button="toggle-advanced"]');
button.innerHTML = (settings.showAdvanced ? 'Hide Advanced' : 'Show Advanced (' + advancedInputs.length + ' hidden)');
// Also update the
}
function renderLayout() {
var events = Events.make(),
content = form({dataElement: 'input-widget-form'}, [
ui.buildPanel({
type: 'default',
classes: 'kb-panel-light',
body: [
ui.makeButton('Show Advanced', 'toggle-advanced', {events: events})
]
}),
ui.buildPanel({
title: 'Inputs',
body: div({dataElement: 'input-fields'}),
classes: ['kb-panel-container']
}),
ui.buildPanel({
title: span(['Parameters', span({dataElement: 'advanced-hidden'})]),
body: div({dataElement: 'parameter-fields'}),
classes: ['kb-panel-container']
}),
ui.buildPanel({
title: 'Outputs',
body: div({dataElement: 'output-fields'}),
classes: ['kb-panel-container']
})
]);
return {
content: content,
events: events
};
}
// MESSAGE HANDLERS
function doAttach(node) {
container = node;
ui = UI.make({
node: container,
bus: bus
});
var layout = renderLayout();
container.innerHTML = layout.content;
layout.events.attachEvents(container);
places = {
inputFields: ui.getElement('input-fields'),
outputFields: ui.getElement('output-fields'),
parameterFields: ui.getElement('parameter-fields'),
advancedParameterFields: ui.getElement('advanced-parameter-fields')
};
}
// EVENTS
function attachEvents() {
bus.on('reset-to-defaults', function () {
inputBusses.forEach(function (inputBus) {
inputBus.send({
type: 'reset-to-defaults'
});
});
});
bus.on('toggle-advanced', function () {
settings.showAdvanced = !settings.showAdvanced;
renderAdvanced();
});
}
// LIFECYCLE API
function renderParameters(params) {
var widgets = [];
// First get the app specs, which is stashed in the model,
// with the parameters returned.
// Separate out the params into the primary groups.
var params = model.getItem('parameters'),
inputParams = params.filter(function (spec) {
return (spec.spec.ui_class === 'input');
}),
outputParams = params.filter(function (spec) {
return (spec.spec.ui_class === 'output');
}),
parameterParams = params.filter(function (spec) {
return (spec.spec.ui_class === 'parameter');
});
return Promise.resolve()
.then(function () {
if (inputParams.length === 0) {
places.inputFields.innerHTML = span({style: {fontStyle: 'italic'}}, 'No input objects for this app');
} else {
return Promise.all(inputParams.map(function (spec) {
var fieldWidget = makeFieldWidget(spec, model.getItem(['params', spec.name()])),
rowWidget = RowWidget.make({widget: fieldWidget, spec: spec}),
rowNode = document.createElement('div');
places.inputFields.appendChild(rowNode);
widgets.push(rowWidget);
rowWidget.attach(rowNode);
}));
}
})
.then(function () {
if (outputParams.length === 0) {
places.outputFields.innerHTML = span({style: {fontStyle: 'italic'}}, 'No output objects for this app');
} else {
return Promise.all(outputParams.map(function (spec) {
var fieldWidget = makeFieldWidget(spec, model.getItem(['params', spec.name()])),
rowWidget = RowWidget.make({widget: fieldWidget, spec: spec}),
rowNode = document.createElement('div');
places.outputFields.appendChild(rowNode);
widgets.push(rowWidget);
rowWidget.attach(rowNode);
}));
}
})
.then(function () {
if (parameterParams.length === 0) {
ui.setContent('parameter-fields', span({style: {fontStyle: 'italic'}}, 'No parameters for this app'));
} else {
return Promise.all(parameterParams.map(function (spec) {
var fieldWidget = makeFieldWidget(spec, model.getItem(['params', spec.name()])),
rowWidget = RowWidget.make({widget: fieldWidget, spec: spec}),
rowNode = document.createElement('div');
places.parameterFields.appendChild(rowNode);
widgets.push(rowWidget);
rowWidget.attach(rowNode);
}));
}
})
.then(function () {
return Promise.all(widgets.map(function (widget) {
return widget.start();
}));
})
.then(function () {
return Promise.all(widgets.map(function (widget) {
return widget.run(params);
}));
})
.then(function () {
renderAdvanced();
});
}
function start() {
// send parent the ready message
return Promise.try(function () {
parentBus.emit('ready');
// parent will send us our initial parameters
parentBus.on('run', function (message) {
doAttach(message.node);
model.setItem('parameters', message.parameters);
// we then create our widgets
renderParameters()
.then(function () {
// do something after success
attachEvents();
})
.catch(function (err) {
// do somethig with the error.
console.error('ERROR in start', err);
});
});
});
}
function stop() {
return Promise.try(function () {
// unregister listerrs...
});
}
// CONSTRUCTION
bus = runtime.bus().makeChannelBus(null, 'params view own bus');
return {
start: start,
stop: stop,
bus: function () {
return bus;
}
};
}
return {
make: function (config) {
return factory(config);
}
};
});<|fim▁end|> | ParamResolver
) {
'use strict';
|
<|file_name|>instrument.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from devito.passes.iet.engine import iet_pass
from devito.passes.iet.orchestration import BusyWait
from devito.types import Timer
__all__ = ['instrument']
def instrument(graph, **kwargs):
track_subsections(graph, **kwargs)
# Construct a fresh Timer object
profiler = kwargs['profiler']
timer = Timer(profiler.name, list(profiler.all_sections))
instrument_sections(graph, timer=timer, **kwargs)
@iet_pass
def track_subsections(iet, **kwargs):
"""
Add custom Sections to the `profiler`. Custom Sections include:
* MPI Calls (e.g., HaloUpdateCall and HaloUpdateWait)
* Busy-waiting on While(lock) (e.g., from host-device orchestration)
"""
profiler = kwargs['profiler']
sregistry = kwargs['sregistry']
name_mapper = {
HaloUpdateCall: 'haloupdate',
HaloWaitCall: 'halowait',
RemainderCall: 'remainder',
HaloUpdateList: 'haloupdate',
HaloWaitList: 'halowait',
BusyWait: 'busywait'
}
mapper = {}
for NodeType in [MPIList, MPICall, BusyWait]:
for k, v in MapNodes(Section, NodeType).visit(iet).items():
for i in v:
if i in mapper or not any(issubclass(i.__class__, n)
for n in profiler.trackable_subsections):
continue
name = sregistry.make_name(prefix=name_mapper[i.__class__])
mapper[i] = Section(name, body=i, is_subsection=True)
profiler.track_subsection(k.name, name)
iet = Transformer(mapper).visit(iet)
return iet, {}
@iet_pass
def instrument_sections(iet, **kwargs):
"""
Instrument the Sections of the input IET based on `profiler.sections`.
"""
profiler = kwargs['profiler']
timer = kwargs['timer']
piet = profiler.instrument(iet, timer)
if piet is iet:
return piet, {}
headers = [TimedList._start_timer_header(), TimedList._stop_timer_header()]
return piet, {'args': timer, 'headers': headers}<|fim▁end|> | from devito.ir.iet import MapNodes, Section, TimedList, Transformer
from devito.mpi.routines import (HaloUpdateCall, HaloWaitCall, MPICall, MPIList,
HaloUpdateList, HaloWaitList, RemainderCall) |
<|file_name|>os_flavor_facts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2015 IBM
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_flavor_facts
short_description: Retrieve facts about one or more flavors
author: "David Shrewsbury (@Shrews)"
version_added: "2.1"
description:
- Retrieve facts about available OpenStack instance flavors. By default,
facts about ALL flavors are retrieved. Filters can be applied to get
facts for only matching flavors. For example, you can filter on the
amount of RAM available to the flavor, or the number of virtual CPUs
available to the flavor, or both. When specifying multiple filters,
*ALL* filters must match on a flavor before that flavor is returned as
a fact.
notes:
- This module creates a new top-level C(openstack_flavors) fact, which
contains a list of unsorted flavors.
requirements:
- "python >= 2.6"
- "openstacksdk"
options:
name:
description:
- A flavor name. Cannot be used with I(ram) or I(vcpus) or I(ephemeral).
ram:
description:
- "A string used for filtering flavors based on the amount of RAM
(in MB) desired. This string accepts the following special values:
'MIN' (return flavors with the minimum amount of RAM), and 'MAX'
(return flavors with the maximum amount of RAM)."
- "A specific amount of RAM may also be specified. Any flavors with this
exact amount of RAM will be returned."
- "A range of acceptable RAM may be given using a special syntax. Simply
prefix the amount of RAM with one of these acceptable range values:
'<', '>', '<=', '>='. These values represent less than, greater than,
less than or equal to, and greater than or equal to, respectively."
type: bool
default: 'no'
vcpus:
description:
- A string used for filtering flavors based on the number of virtual
CPUs desired. Format is the same as the I(ram) parameter.
type: bool
default: 'no'
limit:
description:
- Limits the number of flavors returned. All matching flavors are
returned by default.
ephemeral:
description:
- A string used for filtering flavors based on the amount of ephemeral
storage. Format is the same as the I(ram) parameter
type: bool
default: 'no'
version_added: "2.3"
availability_zone:
description:
- Ignored. Present for backwards compatibility
extends_documentation_fragment: openstack
'''
EXAMPLES = '''
# Gather facts about all available flavors
- os_flavor_facts:
cloud: mycloud
# Gather facts for the flavor named "xlarge-flavor"
- os_flavor_facts:
cloud: mycloud
name: "xlarge-flavor"
# Get all flavors that have exactly 512 MB of RAM.
- os_flavor_facts:
cloud: mycloud
ram: "512"
# Get all flavors that have 1024 MB or more of RAM.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
# Get a single flavor that has the minimum amount of RAM. Using the 'limit'
# option will guarantee only a single flavor is returned.
- os_flavor_facts:
cloud: mycloud
ram: "MIN"
limit: 1
# Get all flavors with 1024 MB of RAM or more, AND exactly 2 virtual CPUs.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
vcpus: "2"
# Get all flavors with 1024 MB of RAM or more, exactly 2 virtual CPUs, and
# less than 30gb of ephemeral storage.
- os_flavor_facts:
cloud: mycloud
ram: ">=1024"
vcpus: "2"
ephemeral: "<30"
'''
RETURN = '''
openstack_flavors:
description: Dictionary describing the flavors.
returned: On success.
type: complex
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
disk:
description: Size of local disk, in GB.
returned: success
type: int
sample: 10
ephemeral:
description: Ephemeral space size, in GB.
returned: success
type: int
sample: 10
ram:
description: Amount of memory, in MB.
returned: success
type: int
sample: 1024
swap:
description: Swap space size, in MB.
returned: success
type: int
sample: 100
vcpus:
description: Number of virtual CPUs.
returned: success
type: int
sample: 2
is_public:
description: Make flavor accessible to the public.
returned: success
type: bool
sample: true
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=False, default=None),
ram=dict(required=False, default=None),
vcpus=dict(required=False, default=None),
limit=dict(required=False, default=None, type='int'),
ephemeral=dict(required=False, default=None),<|fim▁hole|> module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['name', 'ram'],
['name', 'vcpus'],
['name', 'ephemeral']
]
)
module = AnsibleModule(argument_spec, **module_kwargs)
name = module.params['name']
vcpus = module.params['vcpus']
ram = module.params['ram']
ephemeral = module.params['ephemeral']
limit = module.params['limit']
filters = {}
if vcpus:
filters['vcpus'] = vcpus
if ram:
filters['ram'] = ram
if ephemeral:
filters['ephemeral'] = ephemeral
sdk, cloud = openstack_cloud_from_module(module)
try:
if name:
flavors = cloud.search_flavors(filters={'name': name})
else:
flavors = cloud.list_flavors()
if filters:
flavors = cloud.range_search(flavors, filters)
if limit is not None:
flavors = flavors[:limit]
module.exit_json(changed=False,
ansible_facts=dict(openstack_flavors=flavors))
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()<|fim▁end|> | ) |
<|file_name|>reporter.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::SourceLocation;
use ipc_channel::ipc::IpcSender;
use log;
use msg::constellation_msg::PipelineId;
use script_traits::ConstellationControlMsg;
use servo_url::ServoUrl;
use std::sync::{Mutex, Arc};
use style::error_reporting::{ParseErrorReporter, ContextualParseError};
#[derive(HeapSizeOf, Clone)]
pub struct CSSErrorReporter {
pub pipelineid: PipelineId,
// Arc+Mutex combo is necessary to make this struct Sync,
// which is necessary to fulfill the bounds required by the
// uses of the ParseErrorReporter trait.
#[ignore_heap_size_of = "Arc is defined in libstd"]
pub script_chan: Arc<Mutex<IpcSender<ConstellationControlMsg>>>,
}<|fim▁hole|> location: SourceLocation,
error: ContextualParseError) {
if log_enabled!(log::LogLevel::Info) {
info!("Url:\t{}\n{}:{} {}",
url.as_str(),
location.line,
location.column,
error.to_string())
}
//TODO: report a real filename
let _ = self.script_chan.lock().unwrap().send(
ConstellationControlMsg::ReportCSSError(self.pipelineid,
"".to_owned(),
location.line,
location.column,
error.to_string()));
}
}<|fim▁end|> |
impl ParseErrorReporter for CSSErrorReporter {
fn report_error(&self,
url: &ServoUrl, |
<|file_name|>filter-stack.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { Call, StatusObject, WriteObject } from './call-stream';
import { Filter, FilterFactory } from './filter';
import { Metadata } from './metadata';
export class FilterStack implements Filter {
constructor(private readonly filters: Filter[]) {}
sendMetadata(metadata: Promise<Metadata>) {
let result: Promise<Metadata> = metadata;
for (let i = 0; i < this.filters.length; i++) {
result = this.filters[i].sendMetadata(result);
}
return result;
}
receiveMetadata(metadata: Metadata) {<|fim▁hole|> result = this.filters[i].receiveMetadata(result);
}
return result;
}
sendMessage(message: Promise<WriteObject>): Promise<WriteObject> {
let result: Promise<WriteObject> = message;
for (let i = 0; i < this.filters.length; i++) {
result = this.filters[i].sendMessage(result);
}
return result;
}
receiveMessage(message: Promise<Buffer>): Promise<Buffer> {
let result: Promise<Buffer> = message;
for (let i = this.filters.length - 1; i >= 0; i--) {
result = this.filters[i].receiveMessage(result);
}
return result;
}
receiveTrailers(status: StatusObject): StatusObject {
let result: StatusObject = status;
for (let i = this.filters.length - 1; i >= 0; i--) {
result = this.filters[i].receiveTrailers(result);
}
return result;
}
refresh(): void {
for (const filter of this.filters) {
filter.refresh();
}
}
push(filters: Filter[]) {
this.filters.unshift(...filters);
}
getFilters(): Filter[] {
return this.filters;
}
}
export class FilterStackFactory implements FilterFactory<FilterStack> {
constructor(private readonly factories: Array<FilterFactory<Filter>>) {}
push(filterFactories: FilterFactory<Filter>[]) {
this.factories.unshift(...filterFactories);
}
createFilter(callStream: Call): FilterStack {
return new FilterStack(
this.factories.map((factory) => factory.createFilter(callStream))
);
}
}<|fim▁end|> | let result: Metadata = metadata;
for (let i = this.filters.length - 1; i >= 0; i--) { |
<|file_name|>ConsumerIdsChangeListener.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2010-2013 Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.broker.client;
import io.netty.channel.Channel;
import java.util.List;
/**
* @author shijia.wxr<[email protected]>
* @since 2013-6-24
*/
public interface ConsumerIdsChangeListener {
<|fim▁hole|><|fim▁end|> | public void consumerIdsChanged(final String group, final List<Channel> channels);
} |
<|file_name|>mainSocket.js<|end_file_name|><|fim▁begin|>$(function() {
var FADE_TIME = 150; // ms
var TYPING_TIMER_LENGTH = 400; // ms
var COLORS = [
'#e21400', '#91580f', '#f8a700', '#f78b00',
'#58dc00', '#287b00', '#a8f07a', '#4ae8c4',
'#3b88eb', '#3824aa', '#a700ff', '#d300e7'
];
// Initialize variables
var $window = $(window);
var $usernameInput = $('.usernameInput'); // Input for username
var $messages = $('.messages'); // Messages area
var $inputMessage = $('.inputMessage'); // Input message input box
var $loginPage = $('.login.page'); // The login page
var $chatPage = $('.chat.page'); // The chatroom page
// Prompt for setting a username
var username;
var connected = false;
var typing = false;
var lastTypingTime;
var $currentInput = $usernameInput.focus();
var socket = io();
function addParticipantsMessage (data) {
var message = '';
if (data.numUsers === 1) {
message += "there's 1 participant";
} else {
message += "there are " + data.numUsers + " participants";
}
log(message);
}
// Sets the client's username
function setUsername () {
username = cleanInput($usernameInput.val().trim());
// If the username is valid
if (username) {
$loginPage.fadeOut();
$chatPage.show();
$loginPage.off('click');
$currentInput = $inputMessage.focus();
// Tell the server your username
socket.emit('add user', username);
}
}
// Sends a chat message
function sendMessage () {
var message = $inputMessage.val();
// Prevent markup from being injected into the message
message = cleanInput(message);
// if there is a non-empty message and a socket connection
if (message && connected) {
$inputMessage.val('');
addChatMessage({
username: username,
message: message
});
// tell server to execute 'new message' and send along one parameter
socket.emit('new message', message);
}
}
// Log a message
function log (message, options) {
var $el = $('<li>').addClass('log').text(message);
addMessageElement($el, options);
}
// Adds the visual chat message to the message list
function addChatMessage (data, options) {
// Don't fade the message in if there is an 'X was typing'
var $typingMessages = getTypingMessages(data);
options = options || {};
if ($typingMessages.length !== 0) {
options.fade = false;
$typingMessages.remove();
}
var $usernameDiv = $('<span class="username"/>')
.text(data.username)
.css('color', getUsernameColor(data.username));
var $messageBodyDiv = $('<span class="messageBody">')
.text(data.message);
var typingClass = data.typing ? 'typing' : '';
var $messageDiv = $('<li class="message"/>')
.data('username', data.username)
.addClass(typingClass)
.append($usernameDiv, $messageBodyDiv);
addMessageElement($messageDiv, options);
}
// Adds the visual chat typing message
function addChatTyping (data) {
data.typing = true;
data.message = 'is typing';
addChatMessage(data);
}
// Removes the visual chat typing message
function removeChatTyping (data) {
getTypingMessages(data).fadeOut(function () {
$(this).remove();
});
}
// Adds a message element to the messages and scrolls to the bottom
// el - The element to add as a message
// options.fade - If the element should fade-in (default = true)<|fim▁hole|> function addMessageElement (el, options) {
var $el = $(el);
// Setup default options
if (!options) {
options = {};
}
if (typeof options.fade === 'undefined') {
options.fade = true;
}
if (typeof options.prepend === 'undefined') {
options.prepend = false;
}
// Apply options
if (options.fade) {
$el.hide().fadeIn(FADE_TIME);
}
if (options.prepend) {
$messages.prepend($el);
} else {
$messages.append($el);
}
$messages[0].scrollTop = $messages[0].scrollHeight;
}
// Prevents input from having injected markup
function cleanInput (input) {
return $('<div/>').text(input).text();
}
// Updates the typing event
function updateTyping () {
if (connected) {
if (!typing) {
typing = true;
socket.emit('typing');
}
lastTypingTime = (new Date()).getTime();
setTimeout(function () {
var typingTimer = (new Date()).getTime();
var timeDiff = typingTimer - lastTypingTime;
if (timeDiff >= TYPING_TIMER_LENGTH && typing) {
socket.emit('stop typing');
typing = false;
}
}, TYPING_TIMER_LENGTH);
}
}
// Gets the 'X is typing' messages of a user
function getTypingMessages (data) {
return $('.typing.message').filter(function (i) {
return $(this).data('username') === data.username;
});
}
// Gets the color of a username through our hash function
function getUsernameColor (username) {
// Compute hash code
var hash = 7;
for (var i = 0; i < username.length; i++) {
hash = username.charCodeAt(i) + (hash << 5) - hash;
}
// Calculate color
var index = Math.abs(hash % COLORS.length);
return COLORS[index];
}
// Keyboard events
$window.keydown(function (event) {
// Auto-focus the current input when a key is typed
if (!(event.ctrlKey || event.metaKey || event.altKey)) {
$currentInput.focus();
}
// When the client hits ENTER on their keyboard
if (event.which === 13) {
if (username) {
sendMessage();
socket.emit('stop typing');
typing = false;
} else {
setUsername();
}
}
});
$inputMessage.on('input', function() {
updateTyping();
});
// Click events
// Focus input when clicking anywhere on login page
$loginPage.click(function () {
$currentInput.focus();
});
// Focus input when clicking on the message input's border
$inputMessage.click(function () {
$inputMessage.focus();
});
// Socket events
// Whenever the server emits 'login', log the login message
socket.on('login', function (data) {
connected = true;
// Display the welcome message
var message = "Welcome to Socket.IO Chat – ";
log(message, {
prepend: true
});
addParticipantsMessage(data);
});
// Whenever the server emits 'new message', update the chat body
socket.on('new message', function (data) {
addChatMessage(data);
});
// Whenever the server emits 'user joined', log it in the chat body
socket.on('user joined', function (data) {
log(data.username + ' joined');
addParticipantsMessage(data);
});
// Whenever the server emits 'user left', log it in the chat body
socket.on('user left', function (data) {
log(data.username + ' left');
addParticipantsMessage(data);
removeChatTyping(data);
});
// Whenever the server emits 'typing', show the typing message
socket.on('typing', function (data) {
addChatTyping(data);
});
// Whenever the server emits 'stop typing', kill the typing message
socket.on('stop typing', function (data) {
removeChatTyping(data);
});
socket.on('disconnect', function () {
log('you have been disconnected');
});
socket.on('reconnect', function () {
log('you have been reconnected');
if (username) {
socket.emit('add user', username);
}
});
socket.on('reconnect_error', function () {
log('attempt to reconnect has failed');
});
});<|fim▁end|> | // options.prepend - If the element should prepend
// all other messages (default = false) |
<|file_name|>xml_parser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import xml.sax
import glob
from xml.etree import ElementTree
import os
debug = 0
currentPath = os.path.dirname(os.path.abspath(__file__))
def printl(string):
if debug:
print "[xml_parser:Debug] "+ str(string)
def main ():
p = getProviders()
print p[2].getIncomingServers()
def getProviderFromFilenames():
provs = []
printl("Current Path "+currentPath)
provs = os.listdir(currentPath+"/xml")
printl (provs)
return provs
class ProviderXMLHandler:
fullFilePath = None
domain = None
dom = None
services = []
def __init__(self, xmlfile):
self.fullFilePath = os.path.join(currentPath ,os.path.join('xml',xmlfile))
printl ("Getting "+self.fullFilePath)
self.dom = ElementTree.parse(self.fullFilePath)
self.domain = xmlfile
if self.dom != None:
printl("File geladen: "+str(self.dom))
self.getDisplayName()
self.getIncomingServers()
#self.getDomains()
def getDomain(self):
return self.domain
def getIncomingServers(self):
server = []
incomingServers = self.dom.findall('emailProvider/incomingServer')
printl ("incoming servers "+ str(len(incomingServers)))
for s in incomingServers:
type = s.attrib['type']
if type not in self.services:
self.services.append(type)
printl("Hostname: "+str(s.find('hostname').text))
printl("Hostname: "+str(s.find('port').text))
service = []
service.append(type)
service.append(s.find('hostname').text)
service.append(int(s.find('port').text))
if service not in server:
server.append(service)
printl("getIncomingServers: "+str(server))
return server
def canProviderIMAP(self):
if "imap" in self.services:
return True
return False
def canProviderPOP3(self):
if "pop3" in self.services:
return True<|fim▁hole|> return False
def getDisplayName(self):
displayName = self.dom.findall('emailProvider/displayName')
if len(displayName) > 0:
displayName = displayName[0].text
printl ("Display name: " + displayName)
return displayName.encode('utf8')
else:
printl ("Display name: none")
return "None";
def getDomains(self):
domains = self.dom.findall('emailProvider/domain')
printl("\nGetting Domains")
for d in domains:
printl(d.text)
return domains
def getProviders():
providers = []
names = getProviderFromFilenames()
printl("files found: "+str(len(names)))
for p in names:
printl ("Provider: "+str(p))
providers.append(ProviderXMLHandler(p))
return providers
#main()<|fim▁end|> | |
<|file_name|>float.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(missing_docs)]
pub use self::ExponentFormat::*;
pub use self::SignificantDigits::*;
pub use self::SignFormat::*;
use char;
use char::CharExt;
use fmt;
use iter::Iterator;
use num::{cast, Float, ToPrimitive};
use num::FpCategory as Fp;
use ops::FnOnce;
use result::Result::Ok;
use slice::{self, SliceExt};
use str::{self, StrExt};
/// A flag that specifies whether to use exponential (scientific) notation.
pub enum ExponentFormat {
/// Do not use exponential notation.
ExpNone,
/// Use exponential notation with the exponent having a base of 10 and the
/// exponent sign being `e` or `E`. For example, 1000 would be printed
/// 1e3.
ExpDec
}
/// The number of digits used for emitting the fractional part of a number, if
/// any.
pub enum SignificantDigits {
/// At most the given number of digits will be printed, truncating any
/// trailing zeroes.
DigMax(usize),
/// Precisely the given number of digits will be printed.
DigExact(usize)
}
/// How to emit the sign of a number.
pub enum SignFormat {
/// `-` will be printed for negative values, but no sign will be emitted
/// for positive numbers.
SignNeg
}
const DIGIT_E_RADIX: u32 = ('e' as u32) - ('a' as u32) + 11;
/// Converts a number to its string representation as a byte vector.
/// This is meant to be a common base implementation for all numeric string
/// conversion functions like `to_string()` or `to_str_radix()`.
///
/// # Arguments
///
/// - `num` - The number to convert. Accepts any number that
/// implements the numeric traits.
/// - `radix` - Base to use. Accepts only the values 2-36. If the exponential notation
/// is used, then this base is only used for the significand. The exponent
/// itself always printed using a base of 10.
/// - `negative_zero` - Whether to treat the special value `-0` as
/// `-0` or as `+0`.
/// - `sign` - How to emit the sign. See `SignFormat`.
/// - `digits` - The amount of digits to use for emitting the fractional
/// part, if any. See `SignificantDigits`.
/// - `exp_format` - Whether or not to use the exponential (scientific) notation.
/// See `ExponentFormat`.
/// - `exp_capital` - Whether or not to use a capital letter for the exponent sign, if
/// exponential notation is desired.
/// - `f` - A closure to invoke with the bytes representing the
/// float.
///
/// # Panics
///
/// - Panics if `radix` < 2 or `radix` > 36.
/// - Panics if `radix` > 14 and `exp_format` is `ExpDec` due to conflict
/// between digit and exponent sign `'e'`.
/// - Panics if `radix` > 25 and `exp_format` is `ExpBin` due to conflict
/// between digit and exponent sign `'p'`.
pub fn float_to_str_bytes_common<T: Float, U, F>(
num: T,
radix: u32,
negative_zero: bool,
sign: SignFormat,
digits: SignificantDigits,
exp_format: ExponentFormat,
exp_upper: bool,
f: F
) -> U where
F: FnOnce(&str) -> U,
{
assert!(2 <= radix && radix <= 36);<|fim▁hole|> ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> panic!("float_to_str_bytes_common: radix {} incompatible with \
use of 'e' as decimal exponent", radix),
_ => ()
}
let _0: T = Float::zero();
let _1: T = Float::one();
match num.classify() {
Fp::Nan => return f("NaN"),
Fp::Infinite if num > _0 => {
return f("inf");
}
Fp::Infinite if num < _0 => {
return f("-inf");
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
// For an f64 the exponent is in the range of [-1022, 1023] for base 2, so
// we may have up to that many digits. Give ourselves some extra wiggle room
// otherwise as well.
let mut buf = [0; 1536];
let mut end = 0;
let radix_gen: T = cast(radix as isize).unwrap();
let (num, exp) = match exp_format {
ExpNone => (num, 0),
ExpDec if num == _0 => (num, 0),
ExpDec => {
let (exp, exp_base) = match exp_format {
ExpDec => (num.abs().log10().floor(), cast::<f64, T>(10.0f64).unwrap()),
ExpNone => panic!("unreachable"),
};
(num / exp_base.powf(exp), cast::<T, i32>(exp).unwrap())
}
};
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35 .. 0] we always have [0 .. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
let c = char::from_digit(current_digit.to_isize().unwrap() as u32, radix);
buf[end] = c.unwrap() as u8;
end += 1;
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigMax(count) => (true, count + 1, false),
DigExact(count) => (true, count + 1, true)
};
// Decide what sign to put in front
match sign {
SignNeg if neg => {
buf[end] = b'-';
end += 1;
}
_ => ()
}
buf[..end].reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = end;
// Now emit the fractional part, if any
deccum = num.fract();
if deccum != _0 || (limit_digits && exact && digit_count > 0) {
buf[end] = b'.';
end += 1;
let mut dig = 0;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum != _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum != _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
let c = char::from_digit(current_digit.to_isize().unwrap() as u32,
radix);
buf[end] = c.unwrap() as u8;
end += 1;
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
(chr as char).to_digit(radix).unwrap()
};
let value2ascii = |val: u32| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf[end - 1]);
end -= 1;
if extra_digit >= radix / 2 { // -> need to round
let mut i: isize = end as isize - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i as usize] == b'-'
|| buf[i as usize] == b'+' {
for j in ((i + 1) as usize..end).rev() {
buf[j + 1] = buf[j];
}
buf[(i + 1) as usize] = value2ascii(1);
end += 1;
break;
}
// Skip the '.'
if buf[i as usize] == b'.' { i -= 1; continue; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i as usize]);
if current_digit < (radix - 1) {
buf[i as usize] = value2ascii(current_digit+1);
break;
} else {
buf[i as usize] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if !exact {
let buf_max_i = end - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == b'0' {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == b'.' { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
end = i + 1;
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = end - 1;
if buf[max_i] == b'.' {
end = max_i;
}
}
match exp_format {
ExpNone => {},
_ => {
buf[end] = match exp_format {
ExpDec if exp_upper => 'E',
ExpDec if !exp_upper => 'e',
_ => panic!("unreachable"),
} as u8;
end += 1;
struct Filler<'a> {
buf: &'a mut [u8],
end: &'a mut usize,
}
impl<'a> fmt::Write for Filler<'a> {
fn write_str(&mut self, s: &str) -> fmt::Result {
slice::bytes::copy_memory(s.as_bytes(),
&mut self.buf[(*self.end)..]);
*self.end += s.len();
Ok(())
}
}
let mut filler = Filler { buf: &mut buf, end: &mut end };
match sign {
SignNeg => {
let _ = fmt::write(&mut filler, format_args!("{:-}", exp));
}
}
}
}
f(unsafe { str::from_utf8_unchecked(&buf[..end]) })
}<|fim▁end|> | match exp_format { |
<|file_name|>VPFHeaderFormatException.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2003-2008, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.data.vpf.exc;
/**
* Class VPFHeaderFormatException.java is responsible for
*
* <p>Created: Tue Jan 21 15:12:10 2003
*
* @author <a href="mailto:[email protected]">Artur Hefczyc</a>
* @source $URL$
* @version 1.0.0
*/
public class VPFHeaderFormatException extends VPFDataException {
/** serialVersionUID */
private static final long serialVersionUID = 4680952037855445222L;
/** Creates a new VPFHeaderFormatException object. */
public VPFHeaderFormatException() {
super();
}
/**
* Creates a new VPFHeaderFormatException object.
*
* @param message DOCUMENT ME!
*/
public VPFHeaderFormatException(String message) {
super(message);
}
}
// VPFHeaderFormatException<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># coding:utf-8
# Create your views here.
from django.shortcuts import render
from django.http import HttpResponse
from arrow_time import today_date_for_influxd_sql
from arrow_time import ten_day_ago_for_influxd_sql
from influxdb_function import influxDB_interface
from aircraft_config import AC_WQAR_CONFIG
import json
def home(request):
date_start = today_date_for_influxd_sql()
date_end = today_date_for_influxd_sql()
all_aircraft_list = json.dumps(AC_WQAR_CONFIG().all_aircraft())
where_str = " WHERE time > " + "'" + date_start + "'" + " - 8h" + " AND time < " + "'" + date_end + "'" + " + 16h"
infdb_if = influxDB_interface()
sector_index = infdb_if.inf_query("DB_sector_index", "*", "index", where_str)
if sector_index <> {}:
df = sector_index['index']
result_json = df.to_json(orient="records")
return render(request, 'home.html', {'result_json': result_json,
'all_ac':all_aircraft_list})
else:
return render(request, 'home.html', {'result_json': {},
'all_ac':all_aircraft_list})
<|fim▁hole|> return render(request, 'guide.html')<|fim▁end|> | def guide(request): |
<|file_name|>test-socks5-muc.py<|end_file_name|><|fim▁begin|>"""Check if SOCKS5 relays are disabled in muc"""
import os
if os.name != 'posix':
# skipped on non-Unix for now, because it uses a Unix socket
raise SystemExit(77)
import dbus
from servicetest import call_async, EventPattern, EventProtocolClientFactory
from gabbletest import acknowledge_iq, make_muc_presence, exec_test
import constants as cs
import ns
from mucutil import join_muc
from bytestream import BytestreamS5BRelay, create_from_si_offer, announce_socks5_proxy
from twisted.internet import reactor
def test(q, bus, conn, stream):
iq_event, disco_event = q.expect_many(
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
announce_socks5_proxy(q, stream, disco_event.stanza)
join_muc(q, bus, conn, stream, '[email protected]')
# bob offers a stream tube
stream_tube_id = 1
presence = make_muc_presence('owner', 'moderator', '[email protected]', 'bob')
tubes = presence.addElement((ns.TUBES, 'tubes'))
tube = tubes.addElement((None, 'tube'))
tube['type'] = 'stream'
tube['service'] = 'echo'
tube['id'] = str(stream_tube_id)
parameters = tube.addElement((None, 'parameters'))<|fim▁hole|> return props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
e = q.expect('dbus-signal', signal='NewChannels',
predicate=new_chan_predicate)
channels = e.args[0]
assert len(channels) == 1
path, props = channels[0]
assert props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
tube_chan = bus.get_object(conn.bus_name, path)
tube_iface = dbus.Interface(tube_chan, cs.CHANNEL_TYPE_STREAM_TUBE)
call_async(q, tube_iface, 'Accept', 0, 0, '',
byte_arrays=True)
accept_return_event, _ = q.expect_many(
EventPattern('dbus-return', method='Accept'),
EventPattern('dbus-signal', signal='TubeChannelStateChanged',
args=[cs.TUBE_CHANNEL_STATE_OPEN]))
unix_socket_adr = accept_return_event.value[0]
factory = EventProtocolClientFactory(q)
reactor.connectUNIX(unix_socket_adr, factory)
# expect SI request
e = q.expect('stream-iq', to='[email protected]/bob', query_ns=ns.SI,
query_name='si')
bytestream, profile = create_from_si_offer(stream, q, BytestreamS5BRelay, e.stanza,
'[email protected]/bob')
result, si = bytestream.create_si_reply(e.stanza, 'test@localhost/Resource')
si.addElement((ns.TUBES, 'tube'))
stream.send(result)
# wait SOCKS5 init iq
id, mode, si, hosts = bytestream._expect_socks5_init()
for jid, host, port in hosts:
# the proxy is not announced because we are in a muc
assert jid != 'proxy.localhost'
if __name__ == '__main__':
exec_test(test)<|fim▁end|> | stream.send(presence)
def new_chan_predicate(e):
path, props = e.args[0][0] |
<|file_name|>hooks.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package git
import (
"errors"
"io/ioutil"
"os"
"path"
"strings"
"github.com/Unknwon/com"
)
// hookNames is a list of Git hooks' name that are supported.
var hookNames = []string{
"pre-applypatch",<|fim▁hole|> "applypatch-msg",
"prepare-commit-msg",
"commit-msg",
"pre-commit",
"pre-rebase",
"post-commit",
"post-receive",
"post-update",
}
var (
ErrNotValidHook = errors.New("not a valid Git hook")
)
// IsValidHookName returns true if given name is a valid Git hook.
func IsValidHookName(name string) bool {
for _, hn := range hookNames {
if hn == name {
return true
}
}
return false
}
// Hook represents a Git hook.
type Hook struct {
name string
IsActive bool // Indicates whether repository has this hook.
Content string // Content of hook if it's active.
Sample string // Sample content from Git.
path string // Hook file path.
}
// GetHook returns a Git hook by given name and repository.
func GetHook(repoPath, name string) (*Hook, error) {
if !IsValidHookName(name) {
return nil, ErrNotValidHook
}
h := &Hook{
name: name,
path: path.Join(repoPath, "hooks", name),
}
if isFile(h.path) {
data, err := ioutil.ReadFile(h.path)
if err != nil {
return nil, err
}
h.IsActive = true
h.Content = string(data)
} else if isFile(h.path + ".sample") {
data, err := ioutil.ReadFile(h.path + ".sample")
if err != nil {
return nil, err
}
h.Sample = string(data)
}
return h, nil
}
func (h *Hook) Name() string {
return h.name
}
// Update updates hook settings.
func (h *Hook) Update() error {
if len(strings.TrimSpace(h.Content)) == 0 {
if com.IsExist(h.path) {
return os.Remove(h.path)
}
return nil
}
return ioutil.WriteFile(h.path, []byte(strings.Replace(h.Content, "\r", "", -1)), os.ModePerm)
}
// ListHooks returns a list of Git hooks of given repository.
func ListHooks(repoPath string) (_ []*Hook, err error) {
if !isDir(path.Join(repoPath, "hooks")) {
return nil, errors.New("hooks path does not exist")
}
hooks := make([]*Hook, len(hookNames))
for i, name := range hookNames {
hooks[i], err = GetHook(repoPath, name)
if err != nil {
return nil, err
}
}
return hooks, nil
}
func (repo *Repository) GetHook(name string) (*Hook, error) {
return GetHook(repo.Path, name)
}
func (repo *Repository) Hooks() ([]*Hook, error) {
return ListHooks(repo.Path)
}<|fim▁end|> | |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/**
* Java Beans.<|fim▁hole|> * @author Archimedes Trajano
*/
package net.trajano.ms.vertx.beans;<|fim▁end|> | * |
<|file_name|>base_error.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class BaseError(Exception):
"""Base error for all test runner errors."""
def __init__(self, message, is_infra_error=False):<|fim▁hole|> self._is_infra_error = is_infra_error
self.message = message
def __eq__(self, other):
return (self.message == other.message
and self.is_infra_error == other.is_infra_error)
def __ne__(self, other):
return not self == other
@property
def is_infra_error(self):
"""Property to indicate if error was caused by an infrastructure issue."""
return self._is_infra_error<|fim▁end|> | super(BaseError, self).__init__(message) |
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>/// Add line of assembly to output, with indentation and newline, using
/// format! syntax.
macro_rules! push_asm {
($state:expr, $fmt:expr) => {
(writeln!(&mut $state.output, concat!("{}", $fmt),
" ".repeat($state.level * 4))).unwrap()
};
($state:expr, $fmt:expr, $($arg:tt)*) => {<|fim▁hole|> " ".repeat($state.level * 4),
$($arg)*)).unwrap()
};
}<|fim▁end|> | (writeln!(&mut $state.output, concat!("{}", $fmt), |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(rustc_private, plugin_registrar)]
#![warn(missing_docs)]
//! rebind_plugins
//! ==============
//!
//! A compiler plugin which complements the `rebind` crate by providing the
//! `#[derive(Action)]`
//! annotation.
//!
//! Example
//! -------
//!
//! ```
//! #![feature(plugin)]
//! #![plugin(rebind_macros)]
//!
//! extern crate rebind;
//! use rebind::RebindBuilder;
//!
//! fn main {
//! #[derive(Action)]
//! enum MyAction {ActionA, ActionB}
//!
//! let _ = RebindBuilder::<MyAction>::new().build_translator();
//! // ...
//! }
//! ```
extern crate rebind;
extern crate rustc;
extern crate syntax;
mod derive_action;
use rustc::plugin::Registry;
use syntax::parse::token::intern;
use syntax::ext::base::SyntaxExtension;
<|fim▁hole|>pub fn plugin_registrar(registry: &mut Registry) {
registry.register_syntax_extension(intern("derive_Action"),
SyntaxExtension::MultiDecorator(Box::new(expand_derive_action_annotation)));
}<|fim▁end|> | use derive_action::expand_derive_action_annotation;
#[plugin_registrar]
#[doc(hidden)] |
<|file_name|>ipc_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''<|fim▁hole|>'''
# Import python libs
from __future__ import absolute_import
import os
import logging
import tornado.gen
import tornado.ioloop
import tornado.testing
import salt.utils
import salt.config
import salt.exceptions
import salt.transport.ipc
import salt.transport.server
import salt.transport.client
from salt.ext.six.moves import range
# Import Salt Testing libs
import integration
from salttesting.mock import MagicMock
from salttesting.helpers import ensure_in_syspath
log = logging.getLogger(__name__)
ensure_in_syspath('../')
class BaseIPCReqCase(tornado.testing.AsyncTestCase):
'''
Test the req server/client pair
'''
def setUp(self):
super(BaseIPCReqCase, self).setUp()
self._start_handlers = dict(self.io_loop._handlers)
self.socket_path = os.path.join(integration.TMP, 'ipc_test.ipc')
self.server_channel = salt.transport.ipc.IPCMessageServer(
self.socket_path,
io_loop=self.io_loop,
payload_handler=self._handle_payload,
)
self.server_channel.start()
self.payloads = []
def tearDown(self):
super(BaseIPCReqCase, self).tearDown()
failures = []
self.server_channel.close()
os.unlink(self.socket_path)
for k, v in self.io_loop._handlers.iteritems():
if self._start_handlers.get(k) != v:
failures.append((k, v))
if len(failures) > 0:
raise Exception('FDs still attached to the IOLoop: {0}'.format(failures))
@tornado.gen.coroutine
def _handle_payload(self, payload, reply_func):
self.payloads.append(payload)
yield reply_func(payload)
if isinstance(payload, dict) and payload.get('stop'):
self.stop()
class IPCMessageClient(BaseIPCReqCase):
'''
Test all of the clear msg stuff
'''
def _get_channel(self):
channel = salt.transport.ipc.IPCMessageClient(
socket_path=self.socket_path,
io_loop=self.io_loop,
)
channel.connect(callback=self.stop)
self.wait()
return channel
def setUp(self):
super(IPCMessageClient, self).setUp()
self.channel = self._get_channel()
def tearDown(self):
super(IPCMessageClient, self).setUp()
self.channel.close()
def test_basic_send(self):
msg = {'foo': 'bar', 'stop': True}
self.channel.send(msg)
self.wait()
self.assertEqual(self.payloads[0], msg)
def test_many_send(self):
msgs = []
self.server_channel.stream_handler = MagicMock()
for i in range(0, 1000):
msgs.append('test_many_send_{0}'.format(i))
for i in msgs:
self.channel.send(i)
self.channel.send({'stop': True})
self.wait()
self.assertEqual(self.payloads[:-1], msgs)
def test_very_big_message(self):
long_str = ''.join([str(num) for num in range(10**5)])
msg = {'long_str': long_str, 'stop': True}
self.channel.send(msg)
self.wait()
self.assertEqual(msg, self.payloads[0])
def test_multistream_sends(self):
local_channel = self._get_channel()
for c in (self.channel, local_channel):
c.send('foo')
self.channel.send({'stop': True})
self.wait()
self.assertEqual(self.payloads[:-1], ['foo', 'foo'])
def test_multistream_errors(self):
local_channel = self._get_channel()
for c in (self.channel, local_channel):
c.send(None)
for c in (self.channel, local_channel):
c.send('foo')
self.channel.send({'stop': True})
self.wait()
self.assertEqual(self.payloads[:-1], [None, None, 'foo', 'foo'])
if __name__ == '__main__':
from integration import run_tests
run_tests(IPCMessageClient, needs_daemon=False)<|fim▁end|> | :codeauthor: :email:`Mike Place <[email protected]>` |
<|file_name|>05_trunBasedRpg.py<|end_file_name|><|fim▁begin|>#trun based rpg
import random
import time
class role:
name=""
lv=1
exp=0
nextLv=1000
hp=100
mp=30
stra=5
inte=5
spd=5
defe=5
rest=5
void=5
dropItems=[None]
dropPrecent=[100]
command=['attack','void','def','fireball']
def __init__(self,name,lv):
self.name=name
self.lv=lv
self.initRoleByLv(lv)
def initRoleByLv(self,lv):
self.exp=lv*(1000+lv*200)
self.nextLv=(lv+1)*(1000+(lv+1)*200)
self.hp=int(self.hp+lv*30*random.random())
self.mp=int(self.mp+lv*10*random.random())
self.stra=int(self.stra+lv*2*random.random())
self.inte=int(self.inte+lv*2*random.random())
self.spd=int(self.spd+lv*2*random.random())
self.defe=int(self.defe+lv*2*random.random())
self.rest=int(self.rest+lv*2*random.random())
self.void=int(self.void+lv*2*random.random())
def getInfo(self):
return self.name+"[lv:"+str(self.lv)+",exp:"+str(self.exp)+\
",nextLv:"+str(self.nextLv)+\
",hp:"+str(self.hp)+",mp:"+str(self.mp)+\
",stra:"+str(self.stra)+",inte:"+str(self.inte)+\
",spd:"+str(self.spd)+",defe:"+str(self.defe)+\
",rest:"+str(self.rest)+\
",void:"+str(self.void)+",command:["+",".join(self.command)+"]]"
def addExp(self,exp):
self.exp+=exp
if self.exp>=self.nextLv:
self.lvUp();
print self.name+' get '+str(exp)+' exp!'
def lvUp(self):
self.lv+=1
self.nextLv=(self.lv+1)*(1000+(self.lv+1)*200)
self.hp=int(self.hp+30*random.random())
self.mp=int(self.mp+10*random.random())
self.stra=int(self.stra+2*random.random())
self.inte=int(self.inte+2*random.random())
self.spd=int(self.spd+2*random.random())
self.defe=int(self.defe+2*random.random())
self.rest=int(self.rest+2*random.random())
self.void=int(self.void+2*random.random())
if self.exp>=self.nextLv:
self.lvUp();
print self.name+' LEVELUP!'+self.getInfo()
class stage:
stagename="stage"
stageLv=1
compelete=False
startPos=0
endPos=100
emenyLIst=[role("man",1),role("slime",3),role("swordman",4),\
role("dragon baby",5),role("dragon",7),role("vampire",8)]
emenyPrecent=[30,30,20,10,5,5]
boss=role("boss",10)
def __init__(self,stagename,stagelv):
self.stagename=stagename
self.stagelv=stagelv
self.startPos=0
def getInfo(self):
s=''
for num in self.emenyPrecent :s+=str(num)+','
s2=''
for num2 in self.emenyLIst :s2+=num2.name+','
return self.stagename+"[stageLv:"+str(self.stageLv)+",compelete:"+str(self.compelete)+\
",startPos:"+str(self.startPos)+\
",endPos:"+str(self.endPos)+\
",emenyLIst:["+s2+\
"],emenyPrecent:["+s+"]]"
#my=role('my',7)
#print my.getInfo()
#my.addExp(18000)
#print my.getInfo()
#stage=stage("forest",1)
#print stage.getInfo()
#commads:
def attack(roleself,roleattacked):
damage=0
if roleself.stra-roleattacked.defe>0:
damage=int((roleself.stra-roleattacked.defe)*random.random()*20)
else:
damage=int(random.random()*20)
roleattacked.hp-=damage
print roleself.name+'\'s attack:deal '+str(damage)+' damage to '+roleattacked.name
#methods:
def expolore(stage):
while True:
r=int(random.random()*100);
precentnew=0;
for (precent,emeny) in zip(stage.emenyPrecent,stage.emenyLIst):
stage.startPos+=int(4*random.random())+1;
if(stage.startPos>=stage.endPos):
print "stage clear!"
return "stage clear!"
precentold=precentnew
precentnew+=precent
if r>=precentold and r<precentnew :
while True:
print time.strftime("%Y-%m-%d-%H-%M-%S",\
time.localtime(time.time())),\
precentold,\
precentnew,emeny.name,emeny.hp,emeny.mp,player.name,player.hp,player.mp
#print emeny.getInfo()
#print player.getInfo()
cmd=raw_input()
if cmd=="exit" :
break
if cmd=="show":
print stage.startPos,stage.endPos,player.getInfo(),emeny.getInfo()
break
if emeny.spd>player.spd:
attack(emeny,player)
if cmd=="a" or cmd=="attack":
attack(player,emeny)
if emeny.spd<=player.spd:
attack(emeny,player)<|fim▁hole|> elif player.hp<=0:
print "game over"
return 'game over'
#main methods:
global player
player=role("player",8)
while True:
print 'Please type enter to start,type"exit" to exit'
cmd=raw_input()
if cmd=="exit" :
break
else:
expolore(stage("forest",1))<|fim▁end|> | if emeny.hp<=0:
player.addExp(int((emeny.lv+emeny.inte+emeny.stra)*500*random.random()))
break |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>export const CREATE_MESSAGE_SUCCESS = 'CREATE_MESSAGE_SUCCESS';
export const CREATE_MESSAGE_ERROR = 'CREATE_MESSAGE_ERROR';
export const UPDATE_MESSAGE_REQUEST = 'UPDATE_MESSAGE_REQUEST';
export const UPDATE_MESSAGE_SUCCESS = 'UPDATE_MESSAGE_SUCCESS';
export const UPDATE_MESSAGE_ERROR = 'UPDATE_MESSAGE_ERROR';
export const DELETE_MESSAGE_REQUEST = 'DELETE_MESSAGE_REQUEST';
export const DELETE_MESSAGE_SUCCESS = 'DELETE_MESSAGE_SUCCESS';
export const DELETE_MESSAGE_ERROR = 'DELETE_MESSAGE_ERROR';<|fim▁end|> | export const FETCH_MESSAGES_REQUEST = 'FETCH_MESSAGES_REQUEST';
export const FETCH_MESSAGES_SUCCESS = 'FETCH_MESSAGES_SUCCESS';
export const FETCH_MESSAGES_ERROR = 'FETCH_MESSAGES_ERROR';
export const CREATE_MESSAGE_REQUEST = 'CREATE_MESSAGE_REQUEST'; |
<|file_name|>HasCases.relay.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { graphql } from 'react-relay';
import { makeFilter } from '@ncigdc/utils/filters';
import { compose, withPropsOnChange } from 'recompose';
import { BaseQuery } from '@ncigdc/modern_components/Query';
export default (Component: ReactClass<*>) =>
compose(<|fim▁hole|> variables: {
filters: makeFilter([
{
field: 'cases.project.project_id',
value: [projectId],
},
...(mutated
? [{ field: 'cases.available_variation_data', value: ['ssm'] }]
: []),
]),
},
};
}),
)((props: Object) => {
return (
<BaseQuery
parentProps={props}
name="HasCases"
variables={props.variables}
Component={Component}
query={graphql`
query HasCases_relayQuery($filters: FiltersArgument) {
viewer {
explore {
cases {
hits(first: 0, filters: $filters) {
total
}
}
}
}
}
`}
/>
);
});<|fim▁end|> | withPropsOnChange(['projectId', 'mutated'], ({ projectId, mutated }) => {
return { |
<|file_name|>xcode_emulation.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import gyp.common
import os.path
import re
import shlex
import subprocess
import sys
from gyp.common import GypError
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# If you need this, speak up at http://crbug.com/122592
conditional_keys = [key for key in self.xcode_settings[configname]
if key.endswith(']')]
if conditional_keys:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
for key in conditional_keys:
del self.xcode_settings[configname][key]
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', ['i386'])
def _GetSdkVersionInfoItem(self, sdk, infoitem):
job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running xcodebuild' % job.returncode)
return out.rstrip('\n')
def _SdkPath(self):
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
if sdk_root.startswith('/'):
return sdk_root
if sdk_root not in XcodeSettings._sdk_path_cache:
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
sdk_root, 'Path')
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings():
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name:
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = self.xcode_settings[configname].get(setting, None)
first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _AdjustLibrary(self, library):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath())
def AdjustLibraries(self, libraries):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [ self._AdjustLibrary(library) for library in libraries]
return libraries
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = output[0:-3] + 'nib'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:<|fim▁hole|> build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerTargetSetting('SDKROOT'):
env['SDKROOT'] = xcode_settings._SdkPath()
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds<|fim▁end|> | * |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the |
<|file_name|>c4fb5a76b195_add_switchport_mapping.py<|end_file_name|><|fim▁begin|># Copyright 2016 Nokia.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT<|fim▁hole|># under the License.
"""add gateway port mapping
Revision ID: c4fb5a76b195
Revises: 13cf8b5dfd05
Create Date: 2016-04-12 11:35:51.542465
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c4fb5a76b195'
down_revision = '13cf8b5dfd05'
def upgrade():
op.create_table('nuage_switchport_mapping',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('switch_info', sa.String(255), nullable=False),
sa.Column('switch_id', sa.String(36), nullable=False),
sa.Column('redundant', sa.Boolean(), nullable=False),
sa.Column('port_id', sa.String(255), nullable=False),
sa.Column('port_uuid', sa.String(36), nullable=False),
sa.Column('pci_slot', sa.String(36), nullable=False),
sa.Column('host_id', sa.String(255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('pci_slot', 'host_id'))
op.create_table('nuage_switchport_binding',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('neutron_port_id',
sa.String(36),
nullable=False),
sa.Column('nuage_vport_id', sa.String(36), nullable=False),
sa.Column('switchport_uuid',
sa.String(36),
nullable=False),
sa.Column('segmentation_id', sa.Integer, nullable=False),
sa.ForeignKeyConstraint(
['neutron_port_id'],
['ports.id'],
ondelete='CASCADE'))<|fim▁end|> | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations |
<|file_name|>glyph_item_iter.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use glib::translate::*;
use pango_sys;
use GlyphItem;
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GlyphItemIter(Boxed<pango_sys::PangoGlyphItemIter>);
match fn {
copy => |ptr| pango_sys::pango_glyph_item_iter_copy(mut_override(ptr)),
free => |ptr| pango_sys::pango_glyph_item_iter_free(ptr),
get_type => || pango_sys::pango_glyph_item_iter_get_type(),
}
}
impl GlyphItemIter {
pub fn init_end(&mut self, glyph_item: &mut GlyphItem, text: &str) -> bool {
unsafe {
from_glib(pango_sys::pango_glyph_item_iter_init_end(
self.to_glib_none_mut().0,
glyph_item.to_glib_none_mut().0,
text.to_glib_none().0,
))
}
}
pub fn init_start(&mut self, glyph_item: &mut GlyphItem, text: &str) -> bool {
unsafe {
from_glib(pango_sys::pango_glyph_item_iter_init_start(
self.to_glib_none_mut().0,
glyph_item.to_glib_none_mut().0,
text.to_glib_none().0,
))
}
}
pub fn next_cluster(&mut self) -> bool {
unsafe {
from_glib(pango_sys::pango_glyph_item_iter_next_cluster(<|fim▁hole|>
pub fn prev_cluster(&mut self) -> bool {
unsafe {
from_glib(pango_sys::pango_glyph_item_iter_prev_cluster(
self.to_glib_none_mut().0,
))
}
}
}<|fim▁end|> | self.to_glib_none_mut().0,
))
}
} |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>enum FetcherError {
// TODO: Add more Errors<|fim▁hole|><|fim▁end|> | IOError(String),
} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.core.files.storage import FileSystemStorage
import os
from datetime import datetime
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.timezone import utc
from autofixture.compat import get_GenericForeignKey
from autofixture.compat import get_GenericRelation
try:
from django.db.models import GenericIPAddressField as IPAddressField
except ImportError:
from django.models import IPAddressField
filepath = os.path.dirname(os.path.abspath(__file__))
def y2k():
return datetime(2000, 1, 1).replace(tzinfo=utc)
class SimpleModel(models.Model):
name = models.CharField(max_length=50)
class OtherSimpleModel(models.Model):
name = models.CharField(max_length=50)
class UniqueNullFieldModel(models.Model):
name = models.CharField(max_length=15, null=True, blank=True, unique=True)
class UniqueTogetherNullFieldModel(models.Model):
field_one = models.CharField(max_length=15, null=True, blank=True)
field_two = models.CharField(max_length=15, null=True, blank=True)
class Meta:
unique_together = ['field_one', 'field_two']
class MultipleUniqueTogetherNullFieldModel(models.Model):
field_one = models.CharField(max_length=15, null=True, blank=True)
field_two = models.CharField(max_length=15, null=True, blank=True)
field_three = models.CharField(max_length=15, null=True, blank=True)
field_four = models.CharField(max_length=15, null=True, blank=True)
field_five = models.CharField(max_length=15, null=True, blank=True)
class Meta:
verbose_name = 'Multi unique_together null field'
unique_together = (
['field_one', 'field_two'],
['field_three', 'field_four', 'field_five'],
)
class DeepLinkModel1(models.Model):
related = models.ForeignKey('SimpleModel')
related2 = models.ForeignKey('SimpleModel',
related_name='deeplinkmodel1_rel2',
null=True,
blank=True)
class DeepLinkModel2(models.Model):
related = models.ForeignKey('DeepLinkModel1')
class NullableFKModel(models.Model):
m2m = models.ManyToManyField('SimpleModel', null=True, blank=True)
class BasicModel(models.Model):
chars = models.CharField(max_length=50)
shortchars = models.CharField(max_length=2)
blankchars = models.CharField(max_length=100, blank=True)
nullchars = models.CharField(max_length=100, blank=True, null=True)
slugfield = models.SlugField()
textfield = models.TextField()
blankfloatfield = models.FloatField(null=True, blank=True)
floatfield = models.FloatField()
defaultint = models.IntegerField(default=1)
intfield = models.IntegerField()
pintfield = models.PositiveIntegerField()
sintfield = models.SmallIntegerField()
psintfield = models.PositiveSmallIntegerField()
STRING_CHOICES = (
('a', 'A'),
('b', 'B'),
('c', 'C'),
)
choicefield = models.CharField(choices=STRING_CHOICES, max_length=1)
datefield = models.DateField()
datetimefield = models.DateTimeField()
defaultdatetime = models.DateTimeField(default=y2k)
timefield = models.TimeField()
decimalfield = models.DecimalField(max_digits=10, decimal_places=4)
emailfield = models.EmailField()
ipaddressfield = IPAddressField()
urlfield = models.URLField()
rfilepathfield = models.FilePathField(path=filepath, recursive=True)
filepathfield = models.FilePathField(path=filepath)
mfilepathfield = models.FilePathField(path=filepath, match=r'^.+\.py$')
imgfield = models.ImageField(upload_to='_autofixtures')
class UniqueTestModel(models.Model):
CHOICES = [(i, i) for i in range(10)]
choice1 = models.PositiveIntegerField(choices=CHOICES, unique=True)
class UniqueTogetherTestModel(models.Model):<|fim▁hole|> CHOICES = [(i, i) for i in range(10)]
choice1 = models.PositiveIntegerField(choices=CHOICES)
choice2 = models.PositiveIntegerField(choices=CHOICES)
class Meta:
unique_together = ('choice1', 'choice2')
class RelatedModel(models.Model):
related = models.ForeignKey(BasicModel, related_name='rel1')
limitedfk = models.ForeignKey(SimpleModel,
limit_choices_to={'name__exact': 'foo'},
related_name='rel2',
null=True,
blank=True)
class O2OModel(models.Model):
o2o = models.OneToOneField(SimpleModel)
class O2OPrimaryKeyModel(models.Model):
o2o = models.OneToOneField(SimpleModel, primary_key=True)
class InheritModel(SimpleModel):
extrafloatfield = models.FloatField()
class InheritUniqueTogetherModel(SimpleModel):
extrafloatfield = models.FloatField()
class Meta:
unique_together = ('extrafloatfield', 'simplemodel_ptr')
class SelfReferencingModel(models.Model):
parent_self = models.ForeignKey('self', blank=True, null=True)
class SelfReferencingModelNoNull(models.Model):
parent_self = models.ForeignKey('self')
class M2MModel(models.Model):
m2m = models.ManyToManyField(SimpleModel, related_name='m2m_rel1')
secondm2m = models.ManyToManyField(
OtherSimpleModel, related_name='m2m_rel2', null=True, blank=True)
class ThroughModel(models.Model):
simple = models.ForeignKey('SimpleModel')
other = models.ForeignKey('M2MModelThrough')
class M2MModelThrough(models.Model):
m2m = models.ManyToManyField(
SimpleModel, related_name='m2mthrough_rel1', through=ThroughModel)
class GFKModel(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = get_GenericForeignKey()('content_type', 'object_id')
class GRModel(models.Model):
gr = get_GenericRelation()('GFKModel')
class DummyStorage(FileSystemStorage):
pass
dummy_storage = DummyStorage()
class ImageModel(models.Model):
imgfield = models.ImageField(upload_to='_autofixtures',
storage=dummy_storage)
class RelationWithCustomAutofixtureModel(models.Model):
user = models.ForeignKey('auth.User', related_name='user1+')
users = models.ManyToManyField('auth.User', related_name='user2+')<|fim▁end|> | |
<|file_name|>news-story.ts<|end_file_name|><|fim▁begin|>export /**
* NewsStory
*/
class NewsStory {
nsid:number;
<|fim▁hole|> title:string;
description:string;
story:string;
image:string;
thumb:string;
constructor(nsid:number,category:string,title:string,description:string,story:string,image:string) {
this.nsid = nsid;
this.category = category;
this.title = title;
this.description = description;
this.story = story;
this.image = image;
}
}<|fim▁end|> | category:string;
|
<|file_name|>keyproperty_models.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for representing a contact with multiple phone numbers.
This module provides models with a relationship with ndb.KeyProperty to
allow a single contact to have multiple phone numbers.
For more information, see README.md.
"""
# In the original article, it uses ReferenceProperty on the<|fim▁hole|>
from google.appengine.ext import ndb
# [START keyproperty_models]
class Contact(ndb.Model):
"""A Contact model with KeyProperty."""
# Basic info.
name = ndb.StringProperty()
birth_day = ndb.DateProperty()
# Address info.
address = ndb.StringProperty()
# Company info.
company_title = ndb.StringProperty()
company_name = ndb.StringProperty()
company_description = ndb.TextProperty()
company_address = ndb.StringProperty()
# The original phone_number property has been replaced by
# the following property.
@property
def phone_numbers(self):
return PhoneNumber.query(PhoneNumber.contact == self.key)
class PhoneNumber(ndb.Model):
"""A model representing a phone number."""
contact = ndb.KeyProperty(Contact)
phone_type = ndb.StringProperty(
choices=('home', 'work', 'fax', 'mobile', 'other'))
number = ndb.StringProperty()
# [END keyproperty_models]<|fim▁end|> | # PhoneNumber model. With ndb, there is no ReferenceProperty any more,
# so here we use KeyProperty first. However this pattern has a
# consistency issue, shown in the test_fails function in
# test/test_keyproperty_models.py. |
<|file_name|>jquery.fancybox.pack.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:2d79d4ce9f72e0b9db16aee949410ecd30bfcfb5205af39053f05ac39083e151<|fim▁hole|><|fim▁end|> | size 22425 |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# makeenv documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 27 21:24:26 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
# readthedocs does not build from inside the makeenv environment so we have to
# hack it around a bit here
if "READTHEDOCS" in os.environ:
import sys
import tempfile
# put us on the sys.path
MAKEENV_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, MAKEENV_ROOT)
# to silence a warning from the makefile
os.environ.setdefault("PIP_DOWNLOAD_CACHE", tempfile.mkdtemp())
# build the module doc
import subprocess
subprocess.check_output(("make", "-C", MAKEENV_ROOT, "sphinx-module-rst"),
stderr=subprocess.STDOUT)<|fim▁hole|># add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinxcontrib.programoutput']
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'makeenv'
copyright = u'2012, Arthur Noel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.1-dev"
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['.build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'makeenvdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'makeenv.tex', u'makeenv Documentation',
u'Arthur Noel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'makeenv', u'makeenv Documentation',
[u'Arthur Noel'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'makeenv', u'makeenv Documentation',
u'Arthur Noel', 'makeenv', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
highlight_language = "bash"<|fim▁end|> |
# If extensions (or modules to document with autodoc) are in another directory, |
<|file_name|>test_producer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import collections
import logging
import time
from mock import MagicMock, patch
from . import unittest
from kafka import KafkaClient, SimpleProducer
from kafka.common import (
AsyncProducerQueueFull, FailedPayloadsError, NotLeaderForPartitionError,
ProduceResponse, RetryOptions, TopicAndPartition
)
from kafka.producer.base import Producer, _send_upstream
from kafka.protocol import CODEC_NONE
import threading
try:
from queue import Empty, Queue
except ImportError:
from Queue import Empty, Queue
try:
xrange
except NameError:
xrange = range
class TestKafkaProducer(unittest.TestCase):
def test_producer_message_types(self):
producer = Producer(MagicMock())
topic = b"test-topic"
partition = 0
bad_data_types = (u'你怎么样?', 12, ['a', 'list'], ('a', 'tuple'), {'a': 'dict'})
for m in bad_data_types:
with self.assertRaises(TypeError):
logging.debug("attempting to send message of type %s", type(m))
producer.send_messages(topic, partition, m)
good_data_types = (b'a string!',)
for m in good_data_types:
# This should not raise an exception
producer.send_messages(topic, partition, m)
def test_topic_message_types(self):
client = MagicMock()
def partitions(topic):
return [0, 1]
client.get_partition_ids_for_topic = partitions
producer = SimpleProducer(client, random_start=False)
topic = b"test-topic"
producer.send_messages(topic, b'hi')
assert client.send_produce_request.called
@patch('kafka.producer.base._send_upstream')
def test_producer_async_queue_overfilled(self, mock):
queue_size = 2
producer = Producer(MagicMock(), async=True,
async_queue_maxsize=queue_size)
topic = b'test-topic'
partition = 0
message = b'test-message'
with self.assertRaises(AsyncProducerQueueFull):
message_list = [message] * (queue_size + 1)
producer.send_messages(topic, partition, *message_list)
self.assertEqual(producer.queue.qsize(), queue_size)
for _ in xrange(producer.queue.qsize()):
producer.queue.get()
def test_producer_sync_fail_on_error(self):
error = FailedPayloadsError('failure')
with patch.object(KafkaClient, 'load_metadata_for_topics'):
with patch.object(KafkaClient, 'get_partition_ids_for_topic', return_value=[0, 1]):
with patch.object(KafkaClient, '_send_broker_aware_request', return_value = [error]):
client = KafkaClient(MagicMock())
producer = SimpleProducer(client, async=False, sync_fail_on_error=False)
# This should not raise
(response,) = producer.send_messages('foobar', b'test message')
self.assertEqual(response, error)
producer = SimpleProducer(client, async=False, sync_fail_on_error=True)
with self.assertRaises(FailedPayloadsError):
producer.send_messages('foobar', b'test message')
class TestKafkaProducerSendUpstream(unittest.TestCase):
def setUp(self):
self.client = MagicMock()
self.queue = Queue()
def _run_process(self, retries_limit=3, sleep_timeout=1):
# run _send_upstream process with the queue
stop_event = threading.Event()
retry_options = RetryOptions(limit=retries_limit,
backoff_ms=50,
retry_on_timeouts=False)
self.thread = threading.Thread(
target=_send_upstream,
args=(self.queue, self.client, CODEC_NONE,
0.3, # batch time (seconds)
3, # batch length
Producer.ACK_AFTER_LOCAL_WRITE,
Producer.DEFAULT_ACK_TIMEOUT,
retry_options,
stop_event))
self.thread.daemon = True
self.thread.start()
time.sleep(sleep_timeout)
stop_event.set()
def test_wo_retries(self):
# lets create a queue and add 10 messages for 1 partition
for i in range(10):
self.queue.put((TopicAndPartition("test", 0), "msg %i", "key %i"))
self._run_process()
# the queue should be void at the end of the test
self.assertEqual(self.queue.empty(), True)
# there should be 4 non-void cals:
# 3 batches of 3 msgs each + 1 batch of 1 message
self.assertEqual(self.client.send_produce_request.call_count, 4)
def test_first_send_failed(self):
# lets create a queue and add 10 messages for 10 different partitions
# to show how retries should work ideally
for i in range(10):
self.queue.put((TopicAndPartition("test", i), "msg %i", "key %i"))
# Mock offsets counter for closure
offsets = collections.defaultdict(lambda: collections.defaultdict(lambda: 0))
self.client.is_first_time = True
def send_side_effect(reqs, *args, **kwargs):
if self.client.is_first_time:
self.client.is_first_time = False
return [FailedPayloadsError(req) for req in reqs]
responses = []
for req in reqs:
offset = offsets[req.topic][req.partition]
offsets[req.topic][req.partition] += len(req.messages)
responses.append(
ProduceResponse(req.topic, req.partition, 0, offset)
)
return responses
self.client.send_produce_request.side_effect = send_side_effect
self._run_process(2)
# the queue should be void at the end of the test
self.assertEqual(self.queue.empty(), True)
# there should be 5 non-void calls: 1st failed batch of 3 msgs
# plus 3 batches of 3 msgs each + 1 batch of 1 message
self.assertEqual(self.client.send_produce_request.call_count, 5)
def test_with_limited_retries(self):
# lets create a queue and add 10 messages for 10 different partitions
# to show how retries should work ideally
for i in range(10):
self.queue.put((TopicAndPartition("test", i), "msg %i" % i, "key %i" % i))
def send_side_effect(reqs, *args, **kwargs):
return [FailedPayloadsError(req) for req in reqs]
self.client.send_produce_request.side_effect = send_side_effect
self._run_process(3, 3)
# the queue should be void at the end of the test
self.assertEqual(self.queue.empty(), True)
# there should be 16 non-void calls:
# 3 initial batches of 3 msgs each + 1 initial batch of 1 msg +
# 3 retries of the batches above = (1 + 3 retries) * 4 batches = 16
self.assertEqual(self.client.send_produce_request.call_count, 16)
def test_async_producer_not_leader(self):
for i in range(10):
self.queue.put((TopicAndPartition("test", i), "msg %i", "key %i"))<|fim▁hole|> def send_side_effect(reqs, *args, **kwargs):
if self.client.is_first_time:
self.client.is_first_time = False
return [ProduceResponse(req.topic, req.partition,
NotLeaderForPartitionError.errno, -1)
for req in reqs]
responses = []
for req in reqs:
offset = offsets[req.topic][req.partition]
offsets[req.topic][req.partition] += len(req.messages)
responses.append(
ProduceResponse(req.topic, req.partition, 0, offset)
)
return responses
self.client.send_produce_request.side_effect = send_side_effect
self._run_process(2)
# the queue should be void at the end of the test
self.assertEqual(self.queue.empty(), True)
# there should be 5 non-void calls: 1st failed batch of 3 msgs
# + 3 batches of 3 msgs each + 1 batch of 1 msg = 1 + 3 + 1 = 5
self.assertEqual(self.client.send_produce_request.call_count, 5)
def tearDown(self):
for _ in xrange(self.queue.qsize()):
self.queue.get()<|fim▁end|> |
# Mock offsets counter for closure
offsets = collections.defaultdict(lambda: collections.defaultdict(lambda: 0))
self.client.is_first_time = True |
<|file_name|>setup_py2exe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2006-2010, University of Maryland
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/ or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Author: James Krycka
"""
This script uses py2exe to create inversion\dist\direfl.exe for Windows.
The resulting executable bundles the DiRefl application, the python runtime
environment, and other required python packages into a single file. Additional
resource files that are needed when DiRefl is run are placed in the dist
directory tree. On completion, the contents of the dist directory tree can be
used by the Inno Setup Compiler (via a separate script) to build a Windows
installer/uninstaller for deployment of the DiRefl application. For testing
purposes, direfl.exe can be run from the dist directory.
"""
import os
import sys
'''
print "*** Python path is:"
for i, p in enumerate(sys.path):
print "%5d %s" %(i, p)
'''
from distutils.core import setup
# Augment the setup interface with the py2exe command and make sure the py2exe
# option is passed to setup.
import py2exe<|fim▁hole|>
import matplotlib
# Retrieve the application version string.
from version import version
# A manifest is required to be included in a py2exe image (or accessible as a
# file in the image directory) when wxPython is included so that the Windows XP
# theme is used when rendering wx widgets. The manifest must be matched to the
# version of Python that is being used.
#
# Create a manifest for use with Python 2.5 on Windows XP or Vista. It is
# adapted from the Python manifest file (C:\Python25\pythonw.exe.manifest).
manifest_for_python25 = """
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="1.0.0.0"
processorArchitecture="x86"
name="%(prog)s"
type="win32"
/>
<description>%(prog)s</description>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="X86"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
"""
# Create a manifest for use with Python 2.6 or 2.7 on Windows XP or Vista.
manifest_for_python26 = """
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="5.0.0.0"
processorArchitecture="x86"
name="%(prog)s"
type="win32">
</assemblyIdentity>
<description>%(prog)s</description>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="asInvoker"
uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.VC90.CRT"
version="9.0.21022.8"
processorArchitecture="x86"
publicKeyToken="1fc8b3b9a1e18e3b">
</assemblyIdentity>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="x86"
publicKeyToken="6595b64144ccf1df"
language="*">
</assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
# Select the appropriate manifest to use.
if sys.version_info >= (3, 0) or sys.version_info < (2, 5):
print "*** This script only works with Python 2.5, 2.6, or 2.7."
sys.exit()
elif sys.version_info >= (2, 6):
manifest = manifest_for_python26
elif sys.version_info >= (2, 5):
manifest = manifest_for_python25
# Create a list of all files to include along side the executable being built
# in the dist directory tree. Each element of the data_files list is a tuple
# consisting of a path (relative to dist\) and a list of files in that path.
data_files = []
# Add data files from the matplotlib\mpl-data folder and its subfolders.
# For matploblib prior to version 0.99 see the examples at the end of the file.
data_files = matplotlib.get_py2exe_datafiles()
# Add resource files that need to reside in the same directory as the image.
data_files.append( ('.', [os.path.join('.', 'direfl.ico')]) )
data_files.append( ('.', [os.path.join('.', 'direfl_splash.png')]) )
data_files.append( ('.', [os.path.join('.', 'LICENSE.txt')]) )
data_files.append( ('.', [os.path.join('.', 'README.txt')]) )
data_files.append( ('examples', [os.path.join('examples', 'demo_model_1.dat')]) )
data_files.append( ('examples', [os.path.join('examples', 'demo_model_2.dat')]) )
data_files.append( ('examples', [os.path.join('examples', 'demo_model_3.dat')]) )
data_files.append( ('examples', [os.path.join('examples', 'qrd1.refl')]) )
data_files.append( ('examples', [os.path.join('examples', 'qrd2.refl')]) )
data_files.append( ('examples', [os.path.join('examples', 'surround_air_4.refl')]) )
data_files.append( ('examples', [os.path.join('examples', 'surround_d2o_4.refl')]) )
# Add the Microsoft Visual C++ 2008 redistributable kit if we are building with
# Python 2.6 or 2.7. This kit will be installed on the target system as part
# of the installation process for the frozen image. Note that the Python 2.5
# interpreter requires msvcr71.dll which is included in the Python25 package,
# however, Python 2.6 and 2.7 require the msvcr90.dll but they do not bundle it
# with the Python26 or Python27 package. Thus, for Python 2.6 and later, the
# appropriate dll must be present on the target system at runtime.
if sys.version_info >= (2, 6):
pypath = os.path.dirname(sys.executable)
data_files.append( ('.', [os.path.join(pypath, 'vcredist_x86.exe')]) )
# Specify required packages to bundle in the executable image.
packages = ['matplotlib', 'numpy', 'scipy', 'pytz']
# Specify files to include in the executable image.
includes = []
# Specify files to exclude from the executable image.
# - We can safely exclude Tk/Tcl and Qt modules because our app uses wxPython.
# - We do not use ssl services so they are omitted.
# - We can safely exclude the TkAgg matplotlib backend because our app uses
# "matplotlib.use('WXAgg')" to override the default matplotlib configuration.
# - On the web it is widely recommended to exclude certain lib*.dll modules
# but this does not seem necessary any more (but adding them does not hurt).
# - Python25 requires mscvr71.dll, however, Win XP includes this file.
# - Since we do not support Win 9x systems, w9xpopen.dll is not needed.
# - For some reason cygwin1.dll gets included by default, but it is not needed.
excludes = ['Tkinter', 'PyQt4', '_ssl', '_tkagg']
dll_excludes = ['libgdk_pixbuf-2.0-0.dll',
'libgobject-2.0-0.dll',
'libgdk-win32-2.0-0.dll',
'tcl84.dll',
'tk84.dll',
'QtGui4.dll',
'QtCore4.dll',
'msvcr71.dll',
'msvcp90.dll',
'w9xpopen.exe',
'cygwin1.dll']
class Target():
"""This class stores metadata about the distribution in a dictionary."""
def __init__(self, **kw):
self.__dict__.update(kw)
self.version = version
client = Target(
name = 'DiRefl',
description = 'Direct Inversion Reflectometry (DiRefl) application',
script = 'bin/direfl.py', # module to run on application start
dest_base = 'direfl', # file name part of the exe file to create
icon_resources = [(1, 'direfl.ico')], # also need to specify in data_files
bitmap_resources = [],
other_resources = [(24, 1, manifest % dict(prog='DiRefl'))] )
# Now we do the work to create a standalone distribution using py2exe.
#
# When the application is run in console mode, a console window will be created
# to receive any logging or error messages and the application will then create
# a separate GUI application window.
#
# When the application is run in windows mode, it will create a GUI application
# window and no console window will be provided. Output to stderr will be
# written to <app-image-name>.log.
setup(
#console=[client],
windows=[client],
options={'py2exe': {
'packages': packages,
'includes': includes,
'excludes': excludes,
'dll_excludes': dll_excludes,
'compressed': 1, # standard compression
'optimize': 0, # no byte-code optimization
'dist_dir': "dist",# where to put py2exe results
'xref': False, # display cross reference (as html doc)
'bundle_files': 1 # bundle python25.dll in library.zip
}
},
#zipfile=None, # None means bundle library.zip in exe
data_files=data_files # list of files to copy to dist directory
)
#==============================================================================
# This section is for reference only when using older versions of matplotlib.
# The location of mpl-data files has changed across releases of matplotlib.
# Furthermore, matplotlib.get_py2exe_datafiles() had problems prior to version
# 0.99 (see link below for details), so alternative ways had to be used.
# The various techniques shown below for obtaining matplotlib auxiliary files
# (and previously used by this project) was adapted from the examples and
# discussion on http://www.py2exe.org/index.cgi/MatPlotLib.
#
# The following technique worked for matplotlib 0.91.2.
# Note that glob '*.*' will not find files that have no file extension.
'''
import glob
data_files = []
matplotlibdatadir = matplotlib.get_data_path()
mpl_lst = ('mpl-data', glob.glob(os.path.join(matplotlibdatadir, '*.*')))
data_files.append(mpl_lst)
mpl_lst = ('mpl-data', [os.path.join(matplotlibdatadir, 'matplotlibrc')])
data_files.append(mpl_lst) # pickup file missed by glob
mpl_lst = (r'mpl-data\fonts',
glob.glob(os.path.join(matplotlibdatadir, r'fonts\*.*')))
data_files.append(mpl_lst)
mpl_lst = (r'mpl-data\images',
glob.glob(os.path.join(matplotlibdatadir, r'images\*.*')))
data_files.append(mpl_lst)
'''
# The following technique worked for matplotlib 0.98.5.
# Note that glob '*.*' will not find files that have no file extension.
'''
import glob
data_files = []
matplotlibdatadir = matplotlib.get_data_path()
mpl_lst = ('mpl-data', glob.glob(os.path.join(matplotlibdatadir, '*.*')))
data_files.append(mpl_lst)
mpl_lst = ('mpl-data', [os.path.join(matplotlibdatadir, 'matplotlibrc')])
data_files.append(mpl_lst) # pickup file missed by glob
mpl_lst = (r'mpl-data\fonts\afm',
glob.glob(os.path.join(matplotlibdatadir, r'fonts\afm\*.*')))
data_files.append(mpl_lst)
mpl_lst = (r'mpl-data\fonts\pdfcorefonts',
glob.glob(os.path.join(matplotlibdatadir, r'fonts\pdfcorefonts\*.*')))
data_files.append(mpl_lst)
mpl_lst = (r'mpl-data\fonts\ttf',
glob.glob(os.path.join(matplotlibdatadir, r'fonts\ttf\*.*')))
data_files.append(mpl_lst)
mpl_lst = (r'mpl-data\images',
glob.glob(os.path.join(matplotlibdatadir, r'images\*.*')))
data_files.append(mpl_lst)
'''
# The following technique worked for matplotlib 0.98 and 0.99.
'''
from distutils.filelist import findall
data_files = []
matplotlibdatadir = matplotlib.get_data_path()
matplotlibdata = findall(matplotlibdatadir)
for f in matplotlibdata:
dirname = os.path.join('mpl-data', f[len(matplotlibdatadir)+1:])
data_files.append((os.path.split(dirname)[0], [f]))
'''<|fim▁end|> |
if len(sys.argv) == 1:
sys.argv.append('py2exe') |
<|file_name|>symlink-posix.cpp<|end_file_name|><|fim▁begin|>/* $Id: symlink-posix.cpp $ */
/** @file
* IPRT - Symbolic Links, POSIX.
*/
/*
* Copyright (C) 2010-2015 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*
* The contents of this file may alternatively be used under the terms
* of the Common Development and Distribution License Version 1.0
* (CDDL) only, as it comes in the "COPYING.CDDL" file of the
* VirtualBox OSE distribution, in which case the provisions of the
* CDDL are applicable instead of those of the GPL.
*
* You may elect to license modified versions of this file under the
* terms and conditions of either the GPL or the CDDL or both.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
#define LOG_GROUP RTLOGGROUP_SYMLINK
#include <errno.h>
#include <sys/stat.h>
#include <unistd.h>
#include <iprt/symlink.h>
#include <iprt/assert.h>
#include <iprt/err.h>
#include <iprt/log.h>
#include <iprt/mem.h>
#include <iprt/string.h>
#include "internal/path.h"
RTDECL(bool) RTSymlinkExists(const char *pszSymlink)
{
bool fRc = false;
char const *pszNativeSymlink;
int rc = rtPathToNative(&pszNativeSymlink, pszSymlink, NULL);
if (RT_SUCCESS(rc))
{
struct stat s;
fRc = !lstat(pszNativeSymlink, &s)
&& S_ISLNK(s.st_mode);
rtPathFreeNative(pszNativeSymlink, pszSymlink);
}
LogFlow(("RTSymlinkExists(%p={%s}): returns %RTbool\n", pszSymlink, pszSymlink, fRc));
return fRc;
}
RTDECL(bool) RTSymlinkIsDangling(const char *pszSymlink)
{
bool fRc = false;
char const *pszNativeSymlink;
int rc = rtPathToNative(&pszNativeSymlink, pszSymlink, NULL);
if (RT_SUCCESS(rc))
{
struct stat s;
fRc = !lstat(pszNativeSymlink, &s)
&& S_ISLNK(s.st_mode);
if (fRc)
{
errno = 0;
fRc = stat(pszNativeSymlink, &s) != 0
&& ( errno == ENOENT
|| errno == ENOTDIR
|| errno == ELOOP);
}
rtPathFreeNative(pszNativeSymlink, pszSymlink);
}
LogFlow(("RTSymlinkIsDangling(%p={%s}): returns %RTbool\n", pszSymlink, pszSymlink, fRc));
return fRc;
}
RTDECL(int) RTSymlinkCreate(const char *pszSymlink, const char *pszTarget, RTSYMLINKTYPE enmType, uint32_t fCreate)
{
/*
* Validate the input.
*/
AssertReturn(enmType > RTSYMLINKTYPE_INVALID && enmType < RTSYMLINKTYPE_END, VERR_INVALID_PARAMETER);
AssertPtrReturn(pszSymlink, VERR_INVALID_POINTER);
AssertPtrReturn(pszTarget, VERR_INVALID_POINTER);
/*
* Convert the paths.
*/
char const *pszNativeSymlink;
int rc = rtPathToNative(&pszNativeSymlink, pszSymlink, NULL);
if (RT_SUCCESS(rc))
{
const char *pszNativeTarget;
rc = rtPathToNative(&pszNativeTarget, pszTarget, NULL);
if (RT_SUCCESS(rc))
{
/*
* Create the link.
*/
if (symlink(pszNativeTarget, pszNativeSymlink) == 0)
rc = VINF_SUCCESS;
else
rc = RTErrConvertFromErrno(errno);
rtPathFreeNative(pszNativeTarget, pszTarget);
}
rtPathFreeNative(pszNativeSymlink, pszSymlink);
}
LogFlow(("RTSymlinkCreate(%p={%s}, %p={%s}, %d, %#x): returns %Rrc\n", pszSymlink, pszSymlink, pszTarget, pszTarget, enmType, fCreate, rc));
return rc;
}
RTDECL(int) RTSymlinkDelete(const char *pszSymlink, uint32_t fDelete)
{
char const *pszNativeSymlink;
int rc = rtPathToNative(&pszNativeSymlink, pszSymlink, NULL);<|fim▁hole|> {
if (S_ISLNK(s.st_mode))
{
if (unlink(pszNativeSymlink) == 0)
rc = VINF_SUCCESS;
else
rc = RTErrConvertFromErrno(errno);
}
else
rc = VERR_NOT_SYMLINK;
}
else
rc = RTErrConvertFromErrno(errno);
rtPathFreeNative(pszNativeSymlink, pszSymlink);
}
LogFlow(("RTSymlinkDelete(%p={%s}, #%x): returns %Rrc\n", pszSymlink, pszSymlink, fDelete, rc));
return rc;
}
RTDECL(int) RTSymlinkRead(const char *pszSymlink, char *pszTarget, size_t cbTarget, uint32_t fRead)
{
char *pszMyTarget;
int rc = RTSymlinkReadA(pszSymlink, &pszMyTarget);
if (RT_SUCCESS(rc))
{
rc = RTStrCopy(pszTarget, cbTarget, pszMyTarget);
RTStrFree(pszMyTarget);
}
LogFlow(("RTSymlinkRead(%p={%s}): returns %Rrc\n", pszSymlink, pszSymlink, rc));
return rc;
}
RTDECL(int) RTSymlinkReadA(const char *pszSymlink, char **ppszTarget)
{
AssertPtr(ppszTarget);
char const *pszNativeSymlink;
int rc = rtPathToNative(&pszNativeSymlink, pszSymlink, NULL);
if (RT_SUCCESS(rc))
{
/* Guess the initial buffer size. */
ssize_t cbBuf;
struct stat s;
if (!lstat(pszNativeSymlink, &s))
cbBuf = RT_MIN(RT_ALIGN_Z(s.st_size, 64), 64);
else
cbBuf = 1024;
/* Read loop that grows the buffer. */
char *pszBuf = NULL;
for (;;)
{
RTMemTmpFree(pszBuf);
pszBuf = (char *)RTMemTmpAlloc(cbBuf);
if (pszBuf)
{
ssize_t cbReturned = readlink(pszNativeSymlink, pszBuf, cbBuf);
if (cbReturned >= cbBuf)
{
/* Increase the buffer size and try again */
cbBuf *= 2;
continue;
}
if (cbReturned > 0)
{
pszBuf[cbReturned] = '\0';
rc = rtPathFromNativeDup(ppszTarget, pszBuf, pszSymlink);
}
else if (errno == EINVAL)
rc = VERR_NOT_SYMLINK;
else
rc = RTErrConvertFromErrno(errno);
}
else
rc = VERR_NO_TMP_MEMORY;
break;
} /* for loop */
RTMemTmpFree(pszBuf);
rtPathFreeNative(pszNativeSymlink, pszSymlink);
}
if (RT_SUCCESS(rc))
LogFlow(("RTSymlinkReadA(%p={%s},%p): returns %Rrc *ppszTarget=%p:{%s}\n", pszSymlink, pszSymlink, ppszTarget, rc, *ppszTarget, *ppszTarget));
else
LogFlow(("RTSymlinkReadA(%p={%s},%p): returns %Rrc\n", pszSymlink, pszSymlink, ppszTarget, rc));
return rc;
}<|fim▁end|> | if (RT_SUCCESS(rc))
{
struct stat s;
if (!lstat(pszNativeSymlink, &s)) |
<|file_name|>edit.js<|end_file_name|><|fim▁begin|>/************************************************************************
* This file is part of EspoCRM.
*
* EspoCRM - Open Source CRM application.
* Copyright (C) 2014 Yuri Kuznetsov, Taras Machyshyn, Oleksiy Avramenko
* Website: http://www.espocrm.com
*
* EspoCRM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EspoCRM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with EspoCRM. If not, see http://www.gnu.org/licenses/.
************************************************************************/
Espo.define('Views.EmailAccount.Record.Edit', ['Views.Record.Edit', 'Views.EmailAccount.Record.Detail'], function (Dep, Detail) {
<|fim▁hole|> Dep.prototype.afterRender.call(this);
Detail.prototype.initSslFieldListening.call(this);
},
});
});<|fim▁end|> | return Dep.extend({
afterRender: function () { |
<|file_name|>test_xpi_import.py<|end_file_name|><|fim▁begin|># Copyright 2009-2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Functional tests for XPI file format"""
__metaclass__ = type
import re
import unittest
from zope.component import getUtility
from lp.app.interfaces.launchpad import ILaunchpadCelebrities
from lp.registry.interfaces.person import IPersonSet
from lp.registry.interfaces.product import IProductSet
from lp.testing.layers import LaunchpadZopelessLayer
from lp.translations.enums import RosettaImportStatus
from lp.translations.interfaces.potemplate import IPOTemplateSet
from lp.translations.utilities.mozilla_xpi_importer import MozillaXpiImporter
from lp.translations.utilities.tests.helpers import (
import_pofile_or_potemplate,
)
from lp.translations.utilities.tests.xpi_helpers import (
access_key_source_comment,
command_key_source_comment,
get_en_US_xpi_file_to_import,
)
def unwrap(text):
"""Remove line breaks and any other wrapping artifacts from text."""
return re.sub('\s+', ' ', text.strip())
class XpiTestCase(unittest.TestCase):
"""XPI file import into Launchpad."""
layer = LaunchpadZopelessLayer
def setUp(self):
# Get the importer.
self.importer = getUtility(IPersonSet).getByName('mark')
# Get the Firefox template.
firefox_product = getUtility(IProductSet).getByName('firefox')
firefox_productseries = firefox_product.getSeries('trunk')
firefox_potemplate_subset = getUtility(IPOTemplateSet).getSubset(
productseries=firefox_productseries)
self.firefox_template = firefox_potemplate_subset.new(
name='firefox',
translation_domain='firefox',
path='en-US.xpi',
owner=self.importer)
self.spanish_firefox = self.firefox_template.newPOFile('es')
self.spanish_firefox.path = 'translations/es.xpi'
def setUpTranslationImportQueueForTemplate(self, subdir):
"""Return an ITranslationImportQueueEntry for testing purposes.
:param subdir: subdirectory in firefox-data to get XPI data from.
"""
# Get the file to import.
en_US_xpi = get_en_US_xpi_file_to_import(subdir)
return import_pofile_or_potemplate(
file_contents=en_US_xpi.read(),
person=self.importer,
potemplate=self.firefox_template)
def setUpTranslationImportQueueForTranslation(self, subdir):
"""Return an ITranslationImportQueueEntry for testing purposes.
:param subdir: subdirectory in firefox-data to get XPI data from.
"""
# Get the file to import. Given the way XPI file format works, we can
# just use the same template file like a translation one.
es_xpi = get_en_US_xpi_file_to_import(subdir)
return import_pofile_or_potemplate(
file_contents=es_xpi.read(),
person=self.importer,
pofile=self.spanish_firefox,
by_maintainer=True)
def _assertXpiMessageInvariant(self, message):
"""Check whether invariant part of all messages are correct."""
# msgid and singular_text are always different except for the keyboard
# shortcuts which are the 'accesskey' and 'commandkey' ones.
self.failIf(
(message.msgid_singular.msgid == message.singular_text and
message.msgid_singular.msgid not in (
u'foozilla.menu.accesskey', u'foozilla.menu.commandkey')),
'msgid and singular_text should be different but both are %s' % (
message.msgid_singular.msgid))
# Plural forms should be None as this format is not able to handle
# them.
self.assertEquals(message.msgid_plural, None)
self.assertEquals(message.plural_text, None)
# There is no way to know whether a comment is from a
# translator or a developer comment, so we have comenttext
# always as None and store all comments as source comments.
self.assertEquals(message.commenttext, u'')
# This format doesn't support any functionality like .po flags.
self.assertEquals(message.flagscomment, u'')
def test_TemplateImport(self):
"""Test XPI template file import."""
# Prepare the import queue to handle a new .xpi import.
entry = self.setUpTranslationImportQueueForTemplate('en-US')
# The status is now IMPORTED:
self.assertEquals(entry.status, RosettaImportStatus.IMPORTED)
# Let's validate the content of the messages.
potmsgsets = list(self.firefox_template.getPOTMsgSets())
messages_msgid_list = []
for message in potmsgsets:
messages_msgid_list.append(message.msgid_singular.msgid)
# Check the common values for all messages.
self._assertXpiMessageInvariant(message)
if message.msgid_singular.msgid == u'foozilla.name':
# It's a normal message that lacks any comment.
self.assertEquals(message.singular_text, u'FooZilla!')
self.assertEquals(
message.filereferences,
u'jar:chrome/en-US.jar!/test1.dtd(foozilla.name)')
self.assertEquals(message.sourcecomment, None)
elif message.msgid_singular.msgid == u'foozilla.play.fire':
# This one is also a normal message that has a comment.
self.assertEquals(
message.singular_text, u'Do you want to play with fire?')
self.assertEquals(
message.filereferences,
u'jar:chrome/en-US.jar!/test1.dtd(foozilla.play.fire)')
self.assertEquals(
message.sourcecomment,
u" Translators, don't play with fire! \n")
elif message.msgid_singular.msgid == u'foozilla.utf8':
# Now, we can see that special UTF-8 chars are extracted
# correctly.
self.assertEquals(
message.singular_text, u'\u0414\u0430\u043d=Day')
self.assertEquals(
message.filereferences,
u'jar:chrome/en-US.jar!/test1.properties:5' +
u'(foozilla.utf8)')
self.assertEquals(message.sourcecomment, None)
elif message.msgid_singular.msgid == u'foozilla.menu.accesskey':
# access key is a special notation that is supposed to be
# translated with a key shortcut.
self.assertEquals(
message.singular_text, u'M')
self.assertEquals(
message.filereferences,
u'jar:chrome/en-US.jar!/subdir/test2.dtd' +
u'(foozilla.menu.accesskey)')
# The comment shows the key used when there is no translation,
# which is noted as the en_US translation.
self.assertEquals(
unwrap(message.sourcecomment),
unwrap(access_key_source_comment))
elif message.msgid_singular.msgid == u'foozilla.menu.commandkey':
# command key is a special notation that is supposed to be
# translated with a key shortcut.
self.assertEquals(
message.singular_text, u'm')
self.assertEquals(
message.filereferences,
u'jar:chrome/en-US.jar!/subdir/test2.dtd' +
u'(foozilla.menu.commandkey)')
# The comment shows the key used when there is no translation,
# which is noted as the en_US translation.
self.assertEquals(
unwrap(message.sourcecomment),
unwrap(command_key_source_comment))
# Check that we got all messages.
self.assertEquals(
[u'foozilla.happytitle', u'foozilla.menu.accesskey',
u'foozilla.menu.commandkey', u'foozilla.menu.title',
u'foozilla.name', u'foozilla.nocomment', u'foozilla.play.fire',
u'foozilla.play.ice', u'foozilla.title', u'foozilla.utf8',
u'foozilla_something'],
sorted(messages_msgid_list))
def test_TwiceTemplateImport(self):
"""Test a template import done twice."""
# Prepare the import queue to handle a new .xpi import.
entry = self.setUpTranslationImportQueueForTemplate('en-US')
# The status is now IMPORTED:
self.assertEquals(entry.status, RosettaImportStatus.IMPORTED)
# Retrieve the number of messages we got in this initial import.
first_import_potmsgsets = self.firefox_template.getPOTMsgSets(
).count()
# Force the entry to be imported again:
entry.setStatus(RosettaImportStatus.APPROVED,
getUtility(ILaunchpadCelebrities).rosetta_experts)
# Now, we tell the PO template to import from the file data it has.
(subject, body) = self.firefox_template.importFromQueue(entry)
# Retrieve the number of messages we got in this second import.
second_import_potmsgsets = self.firefox_template.getPOTMsgSets(
).count()
# Both must match.
self.assertEquals(first_import_potmsgsets, second_import_potmsgsets)
def test_TranslationImport(self):
"""Test XPI translation file import."""
# Prepare the import queue to handle a new .xpi import.
template_entry = self.setUpTranslationImportQueueForTemplate('en-US')
translation_entry = self.setUpTranslationImportQueueForTranslation(
'en-US')
# The status is now IMPORTED:
self.assertEquals(
translation_entry.status, RosettaImportStatus.IMPORTED)
self.assertEquals(template_entry.status, RosettaImportStatus.IMPORTED)
# Let's validate the content of the messages.
potmsgsets = list(self.firefox_template.getPOTMsgSets())
messages = [message.msgid_singular.msgid for message in potmsgsets]
messages.sort()
self.assertEquals(
[u'foozilla.happytitle',
u'foozilla.menu.accesskey',
u'foozilla.menu.commandkey',
u'foozilla.menu.title',
u'foozilla.name',
u'foozilla.nocomment',
u'foozilla.play.fire',
u'foozilla.play.ice',
u'foozilla.title',
u'foozilla.utf8',
u'foozilla_something'],
messages)
potmsgset = self.firefox_template.getPOTMsgSetByMsgIDText(
u'foozilla.name', context='main/test1.dtd')
translation = potmsgset.getCurrentTranslation(
self.firefox_template, self.spanish_firefox.language,
self.firefox_template.translation_side)
# It's a normal message that lacks any comment.
self.assertEquals(potmsgset.singular_text, u'FooZilla!')
# With this first import, upstream and Ubuntu translations must match.
self.assertEquals(
translation.translations,
potmsgset.getOtherTranslation(
self.spanish_firefox.language,
self.firefox_template.translation_side).translations)
potmsgset = self.firefox_template.getPOTMsgSetByMsgIDText(
u'foozilla.menu.accesskey', context='main/subdir/test2.dtd')
# access key is a special notation that is supposed to be
# translated with a key shortcut.
self.assertEquals(potmsgset.singular_text, u'M')
# The comment shows the key used when there is no translation,
# which is noted as the en_US translation.
self.assertEquals(
unwrap(potmsgset.sourcecomment),
unwrap(access_key_source_comment))
# But for the translation import, we get the key directly.
self.assertEquals(
potmsgset.getOtherTranslation(
self.spanish_firefox.language,
self.firefox_template.translation_side).translations,
[u'M'])
potmsgset = self.firefox_template.getPOTMsgSetByMsgIDText(
u'foozilla.menu.commandkey', context='main/subdir/test2.dtd')
# command key is a special notation that is supposed to be
# translated with a key shortcut.
self.assertEquals(
potmsgset.singular_text, u'm')
# The comment shows the key used when there is no translation,
# which is noted as the en_US translation.
self.assertEquals(
unwrap(potmsgset.sourcecomment),
unwrap(command_key_source_comment))
# But for the translation import, we get the key directly.
self.assertEquals(
potmsgset.getOtherTranslation(
self.spanish_firefox.language,
self.firefox_template.translation_side).translations,
[u'm'])
def test_GetLastTranslator(self):
"""Tests whether we extract last translator information correctly."""
translation_entry = self.setUpTranslationImportQueueForTranslation(
'en-US')
importer = MozillaXpiImporter()
translation_file = importer.parse(translation_entry)
# Let's try with the translation file, it has valid Last Translator
# information.
name, email = translation_file.header.getLastTranslator()
self.assertEqual(name, u'Carlos Perell\xf3 Mar\xedn')
self.assertEqual(email, u'[email protected]')
def test_Contexts(self):
"""Test that message context in XPI file is set to chrome path."""
queue_entry = self.setUpTranslationImportQueueForTranslation(
'clashing_ids')
importer = MozillaXpiImporter()
template = importer.parse(queue_entry)
messages = sorted([
(message.msgid_singular, message.context, message.singular_text)
for message in template.messages])
self.assertEquals(
[
(u'foozilla.clashing.key',
u'mac/extra.dtd',
u'This message is Mac-specific, and comes from DTD.'),
(u'foozilla.clashing.key',
u'mac/extra.properties',
u'This message is Mac-specific, and comes from properties.'),
(u'foozilla.clashing.key',
u'main/main.dtd',
u'This message is in the main DTD.'),
(u'foozilla.clashing.key',
u'main/main.properties',
u'This message is in the main properties file.'),<|fim▁hole|> u'This message is Unix-specific, and comes from DTD.'),
(u'foozilla.clashing.key',
u'unix/extra.properties',
u'This message is Unix-specific, and comes from properties.'),
(u'foozilla.clashing.key',
u'win/extra.dtd',
u'This message is Windows-specific, and comes from DTD.'),
(u'foozilla.clashing.key',
u'win/extra.properties',
u'This message is Windows-specific, '
'and comes from properties.'),
(u'foozilla.regular.message',
u'main/main.dtd',
u'A non-clashing message.'),
],
messages)
def test_SystemEntityIsIgnored(self):
"""Test handling of SYSTEM entities in DTD files."""
self.setUpTranslationImportQueueForTemplate('system-entity')
msgids = [
(potmsgset.msgid_singular.msgid, potmsgset.singular_text)
for potmsgset in self.firefox_template.getPOTMsgSets()]
self.assertEqual(msgids, [
('firststring', 'First translatable string'),
('secondstring', 'Second translatable string')])<|fim▁end|> | (u'foozilla.clashing.key',
u'unix/extra.dtd', |
<|file_name|>test_auth.py<|end_file_name|><|fim▁begin|># project/tests/test_auth.py
import time
import json
import unittest
from project.server import db
from project.server.models import User, BlacklistToken
from project.tests.base import BaseTestCase
def register_user(self, email, password):
return self.client.post(
'/auth/register',
data=json.dumps(dict(
email=email,
password=password
)),
content_type='application/json',
)
def login_user(self, email, password):
return self.client.post(
'/auth/login',
data=json.dumps(dict(
email=email,
password=password
)),
content_type='application/json',
)
class TestAuthBlueprint(BaseTestCase):
def test_registration(self):
""" Test for user registration """
with self.client:
response = register_user(self, '[email protected]', '123456')
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Successfully registered.')
self.assertTrue(data['auth_token'])
self.assertTrue(response.content_type == 'application/json')
self.assertEqual(response.status_code, 201)
def test_registered_with_already_registered_user(self):
""" Test registration with already registered email"""
user = User(
email='[email protected]',
password='test'
)
db.session.add(user)
db.session.commit()
with self.client:
response = register_user(self, '[email protected]', '123456')
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(
data['message'] == 'User already exists. Please Log in.')
self.assertTrue(response.content_type == 'application/json')
self.assertEqual(response.status_code, 202)
def test_registered_user_login(self):
""" Test for login of registered-user login """
with self.client:
# user registration
resp_register = register_user(self, '[email protected]', '123456')
data_register = json.loads(resp_register.data.decode())
self.assertTrue(data_register['status'] == 'success')
self.assertTrue(
data_register['message'] == 'Successfully registered.'
)
self.assertTrue(data_register['auth_token'])
self.assertTrue(resp_register.content_type == 'application/json')
self.assertEqual(resp_register.status_code, 201)
# registered user login
response = login_user(self, '[email protected]', '123456')
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Successfully logged in.')
self.assertTrue(data['auth_token'])
self.assertTrue(response.content_type == 'application/json')
self.assertEqual(response.status_code, 200)
def test_non_registered_user_login(self):
""" Test for login of non-registered user """
with self.client:
response = login_user(self, '[email protected]', '123456')
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'User does not exist.')
self.assertTrue(response.content_type == 'application/json')
self.assertEqual(response.status_code, 404)
def test_user_status(self):
""" Test for user status """
with self.client:
resp_register = register_user(self, '[email protected]', '123456')
response = self.client.get(
'/auth/status',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_register.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['data'] is not None)
self.assertTrue(data['data']['email'] == '[email protected]')
self.assertTrue(data['data']['admin'] is 'true' or 'false')
self.assertEqual(response.status_code, 200)
def test_user_status_malformed_bearer_token(self):
""" Test for user status with malformed bearer token"""
with self.client:
resp_register = register_user(self, '[email protected]', '123456')
response = self.client.get(
'/auth/status',
headers=dict(
Authorization='Bearer' + json.loads(
resp_register.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bearer token malformed.')
self.assertEqual(response.status_code, 401)
def test_valid_logout(self):
""" Test for logout before token expires """
with self.client:
# user registration
resp_register = register_user(self, '[email protected]', '123456')
data_register = json.loads(resp_register.data.decode())
self.assertTrue(data_register['status'] == 'success')
self.assertTrue(
data_register['message'] == 'Successfully registered.')
self.assertTrue(data_register['auth_token'])
self.assertTrue(resp_register.content_type == 'application/json')
self.assertEqual(resp_register.status_code, 201)
# user login
resp_login = login_user(self, '[email protected]', '123456')
data_login = json.loads(resp_login.data.decode())
self.assertTrue(data_login['status'] == 'success')
self.assertTrue(data_login['message'] == 'Successfully logged in.')
self.assertTrue(data_login['auth_token'])
self.assertTrue(resp_login.content_type == 'application/json')
self.assertEqual(resp_login.status_code, 200)
# valid token logout
response = self.client.post(
'/auth/logout',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)<|fim▁hole|> self.assertTrue(data['message'] == 'Successfully logged out.')
self.assertEqual(response.status_code, 200)
def test_valid_blacklisted_token_logout(self):
""" Test for logout after a valid token gets blacklisted """
with self.client:
# user registration
resp_register = register_user(self, '[email protected]', '123456')
data_register = json.loads(resp_register.data.decode())
self.assertTrue(data_register['status'] == 'success')
self.assertTrue(
data_register['message'] == 'Successfully registered.')
self.assertTrue(data_register['auth_token'])
self.assertTrue(resp_register.content_type == 'application/json')
self.assertEqual(resp_register.status_code, 201)
# user login
resp_login = login_user(self, '[email protected]', '123456')
data_login = json.loads(resp_login.data.decode())
self.assertTrue(data_login['status'] == 'success')
self.assertTrue(data_login['message'] == 'Successfully logged in.')
self.assertTrue(data_login['auth_token'])
self.assertTrue(resp_login.content_type == 'application/json')
self.assertEqual(resp_login.status_code, 200)
# blacklist a valid token
blacklist_token = BlacklistToken(
token=json.loads(resp_login.data.decode())['auth_token'])
db.session.add(blacklist_token)
db.session.commit()
# blacklisted valid token logout
response = self.client.post(
'/auth/logout',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_valid_blacklisted_token_user(self):
""" Test for user status with a blacklisted valid token """
with self.client:
resp_register = register_user(self, '[email protected]', '123456')
# blacklist a valid token
blacklist_token = BlacklistToken(
token=json.loads(resp_register.data.decode())['auth_token'])
db.session.add(blacklist_token)
db.session.commit()
response = self.client.get(
'/auth/status',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_register.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success') |
<|file_name|>generic-unique.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Triple<T> { x: T, y: T, z: T }<|fim▁hole|>fn box<T:Copy>(x: Triple<T>) -> ~Triple<T> { return ~x; }
pub fn main() {
let x: ~Triple<int> = box::<int>(Triple{x: 1, y: 2, z: 3});
assert!((x.y == 2));
}<|fim▁end|> | |
<|file_name|>scripting.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
import os, shutil, glob
from functools import wraps
def print_warning(message, *args, **kwargs):
from . import colortext
if args or kwargs: message = message.format(*args, **kwargs)
colortext.write(message + '\n', color='red')
def print_error_and_die(message, *args, **kwargs):
aborting = "Aborting..."
if not message.endswith('\n'):
aborting = ' ' + aborting
print_warning(message + aborting, *args, **kwargs)
raise SystemExit(1)
class catch_and_print_errors:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type == KeyboardInterrupt:
print()
return True
if getattr(exc_value, 'no_stack_trace', False):
print_warning(str(exc_value))
return True
def __call__(self, function):
@wraps(function)
def wrapper(*args, **kwargs):
with self:
return function(*args, **kwargs)
return wrapper
<|fim▁hole|> readline.parse_and_bind("tab: complete")
readline.set_completer(path_completer)
def path_completer(text, state):
globs = glob.glob(os.path.expanduser(text) + '*') + [None]
add_slash = lambda x: x + '/' if os.path.isdir(x) else x
return add_slash(globs[state])
def clear_directory(directory):
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
def relative_symlink(target, link_name):
"""Make a symlink to target using the shortest possible relative path."""
link_name = os.path.abspath(link_name)
abs_target = os.path.abspath(target)
rel_target = os.path.relpath(target, os.path.dirname(link_name))
if os.path.exists(link_name):
os.remove(link_name)
os.symlink(rel_target, link_name)
# Bread'n'butter shell commands.
def mkdir(newdir):
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
os.makedirs(newdir)
def touch(path):
with open(path, 'w'):
pass<|fim▁end|> |
def use_path_completion():
import readline
readline.set_completer_delims(' \t\n;') |
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration file, see link for more information<|fim▁hole|>module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine', '@angular-devkit/build-angular'],
plugins: [
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('@angular-devkit/build-angular/plugins/karma')
],
client: {
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
reporters: ['progress', 'kjhtml'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
restartOnFileChange: true
});
};<|fim▁end|> | // https://karma-runner.github.io/1.0/config/configuration-file.html
|
<|file_name|>map.js<|end_file_name|><|fim▁begin|>function(doc) {
if(doc.tags.length > 0) {
for(var idx in doc.tags) {
emit(doc.tags[idx], null);
}<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>cities.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
<|fim▁hole|> result_count = None
cities = None<|fim▁end|> | from iris_sdk.models.maps.base_map import BaseMap
class CitiesMap(BaseMap):
|
<|file_name|>scanwidget.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of the xc2424scan package
# Copyright (C) 2005 Mathieu Bouchard <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
This is the main widget of the xc2424scan application
This widget is self contained and can be included in any other Qt4
application.
"""
__all__ = ["ScanWidget"]
from PyQt4.QtCore import QDir, QObject, QRect, Qt, SIGNAL
from PyQt4.QtGui import QWidget, QFileDialog, QListWidgetItem, QPixmap, \
QIcon, QMessageBox, QInputDialog, QLineEdit, QPainter, \
QProgressDialog, QMessageBox, QSizePolicy, QDialog, \
QLabel, QVBoxLayout, QHBoxLayout, QSpacerItem, \
QSizePolicy, QPushButton
import os
from xc2424scan import config
from xc2424scan.threadedscanlib import ThreadedXeroxC2424
from xc2424scan.scanlib import ProtectedError, SocketError, NoPreviewError
from xc2424scan.ui.widgets.scanwidgetbase import Ui_ScanWidgetBase
class ProgressFullDialog(QProgressDialog):
def __init__(self, parent = None):
QProgressDialog.__init__(self, parent)
self.setWindowTitle(_("Downloading"))
# Top level fixed size dialog
self.setWindowModality(Qt.WindowModal)
# Do not close when reaching 100%
self.setAutoClose(False)
self.setAutoReset(False)
self.__nbr_pages_ = -1
def setNbrPages(self, nbr_pages):
self.__nbr_pages_ = nbr_pages
def newpage(self, current_page, file_size):
if self.isVisible():
# Set progress value to 0 and range to file size
self.setValue(0)
self.setRange(0, file_size)
# Set label text
if self.__nbr_pages_ == 1:
self.setLabelText(_("Getting page %d") % current_page)
else:
self.setLabelText(_("Getting page %d of %d") % \
(current_page, self.__nbr_pages_))
def progress(self, received_size):
if self.isVisible():
self.setValue(self.value() + received_size)
class ProgressDialog(QDialog):
def __init__(self, parent = None):
QDialog.__init__(self, parent)
self.setWindowTitle(_("Downloading"))
# Top level fixed size dialog
self.setWindowModality(Qt.WindowModal)
self.__page_ = QLabel(self)
self.__progress_ = QLabel(self)
self.__cancel_ = QPushButton(self)
self.__downloaded_ = 0
self.__nbr_pages_ = 0
vboxlayout = QVBoxLayout(self)
# Page status
labellayout = QHBoxLayout()
labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
labellayout.addWidget(self.__page_)
labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
vboxlayout.addLayout(labellayout)
# Progress status
progresslayout = QHBoxLayout()
progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
progresslayout.addWidget(self.__progress_)
progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
vboxlayout.addLayout(progresslayout)
# Cancel button
cancellayout = QHBoxLayout()
cancellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
cancellayout.addWidget(self.__cancel_)
vboxlayout.addLayout(cancellayout)
self.__cancel_.setDefault(True)
self.__cancel_.setText("Cancel")
QObject.connect(self.__cancel_, SIGNAL("clicked()"),
self.__ui_progress_canceled_)
QObject.connect(self, SIGNAL("rejected()"),
self.__ui_progress_canceled_)
def __ui_progress_canceled_(self):
self.emit(SIGNAL("canceled()"))
def setLabelText(self, text):
self.__page_.setText(text)
def setValue(self, value):
self.__downloaded_ = value
self.progress(0)
def setNbrPages(self, nbr_pages):
self.__nbr_pages_ = nbr_pages
def newpage(self, current_page, file_size = None):
if self.isVisible():
# Set progress value to 0
self.setValue(0)
# Set label text
if self.__nbr_pages_ == 0:
# Only happens when getting a pdf file
self.__page_.setText(_("Getting file"))
elif self.__nbr_pages_ == 1:
self.__page_.setText(_("Getting page %d") % current_page)
else:
self.__page_.setText(_("Getting page %d of %d") % \
(current_page, self.__nbr_pages_))
def progress(self, received_size):
self.__downloaded_ += received_size
if self.isVisible():
size = self.__downloaded_ / 1024
if size > 1024:
size = float(size) / 1024
self.__progress_.setText("Received %.3f mb" % size)
else:
self.__progress_.setText("Received %d kb" % size)
class ProgressWrapper(QObject):
def __init__(self, parent = None):
QObject.__init__(self)
self.__progress_full_ = ProgressFullDialog(parent)
self.__progress_ = ProgressDialog(parent)
self.__current_ = None
QObject.connect(self.__progress_full_, SIGNAL("canceled()"),
self.__ui_progress_canceled_)
QObject.connect(self.__progress_, SIGNAL("canceled()"),
self.__ui_progress_canceled_)
def show(self, format, nbr_pages):
if format in ["tiff", "bmp"]:
self.__current_ = self.__progress_full_
else:
self.__current_ = self.__progress_
self.__current_.setLabelText(_("Waiting for transfer to begin"))
self.__current_.setValue(0)
self.__current_.setNbrPages(nbr_pages)
self.__current_.show()
def __ui_progress_canceled_(self):
self.emit(SIGNAL("canceled()"))
def newpage(self, current_page, file_size):
if self.__current_ is not None:
self.__current_.newpage(current_page, file_size)
def progress(self, received_size):
if self.__current_ is not None:
self.__current_.progress(received_size)
def isVisible(self):
if self.__current_ is not None:
return self.__current_.isVisible()
else:
return False
def hide(self):
if self.__current_ is not None:
self.__current_.hide()
class ScanWidget(QWidget):
"""The main scanning widget"""
def __init__(self, parent = None):
"""Create a new scanning widget
@param parent: The parent widget
@type parent: QWidget
"""
QWidget.__init__(self, parent)
self.__basewidget_ = Ui_ScanWidgetBase()
self.__basewidget_.setupUi(self)
# The threaded scanner object
self.__scanner_ = ThreadedXeroxC2424()
# List of files available on the scanner
self.__scanned_files_ = None
# Last folder visited
self.__old_folder_ = "Public"
# Progress dialog
self.__progress_ = ProgressWrapper(self)
# UI: Buttons
QObject.connect(self.__basewidget_.refresh, SIGNAL("clicked()"),
self.__ui_refresh_clicked_)
QObject.connect(self.__basewidget_.delete, SIGNAL("clicked()"),
self.__ui_delete_clicked_)
QObject.connect(self.__basewidget_.save, SIGNAL("clicked()"),
self.__ui_save_clicked_)
# UI: An option has been modified
QObject.connect(self.__basewidget_.folder,
SIGNAL("activated(const QString&)"),
self.__ui_folder_currentChanged_)
# UI: List widget
QObject.connect(self.__basewidget_.imageList,
SIGNAL("currentTextChanged(const QString&)"),
self.__ui_imageList_currentChanged_)
QObject.connect(self.__basewidget_.format,
SIGNAL("currentIndexChanged(const QString&)"),
self.__ui_format_currentChanged_)
# Signals emited from threads
QObject.connect(self.__scanner_, SIGNAL("foldersList()"),
self.__foldersListReceived_)
QObject.connect(self.__scanner_, SIGNAL("filesList()"),
self.__filesListReceived_)
QObject.connect(self.__scanner_, SIGNAL("folderSet(const QString&)"),
self.__folderSetReceived_)
QObject.connect(self.__scanner_, SIGNAL("folderProtected(const QString&)"),
self.__folderProtectedReceived_)
QObject.connect(self.__scanner_, SIGNAL("fileReceived(const QString&)"),
self.__fileReceived_)
QObject.connect(self.__scanner_, SIGNAL("previewReceived(const QString&)"),
self.__previewReceived_)
QObject.connect(self.__scanner_, SIGNAL("allPreviewReceived()"),
self.__allPreviewReceived_)
QObject.connect(self.__scanner_, SIGNAL("fileDeleted(const QString&)"),
self.__fileDeletedReceived_)
QObject.connect(self.__scanner_, SIGNAL("connectedToScanner()"),
self.__connectedToScannerReceived_)
QObject.connect(self.__scanner_, SIGNAL("scanlibError(const QString&)"),
self.__scanlibErrorReceived)
QObject.connect(self.__scanner_, SIGNAL("newPage(int, int)"),
self.__progress_.newpage)
QObject.connect(self.__scanner_, SIGNAL("progress(int)"),
self.__progress_.progress)
# Progress dialog
QObject.connect(self.__progress_, SIGNAL("canceled()"),
self.__ui_progress_canceled_)
self.__lock_()
#
# Methods connected to thread signals
#
def __scanlibErrorReceived(self, text):
"""Called when there is an error in the scan library
@param text: The text of the error
@type text: str
"""
if self.__progress_.isVisible():
self.__progress_.hide()
QMessageBox.critical(self, "Critical error", text)
if self.__scanner_.connected:
self.__unlock_()
def __connectedToScannerReceived_(self):
"""Called when we are connected to a new scanner"""
# Show the public directory
if config.DEBUG_GUI:
print "<-- Connected to scanner"
# Clear the list of files and request the available folders
self.__basewidget_.imageList.clear()
self.__scanner_.getFolders()
def __folderSetReceived_(self, folder):
"""Called when we have changed the current folder
@param folder: The folder name
@type folder: str
"""
if config.DEBUG_GUI:
print "<-- Folder has been set:", str(folder)
# Save old folder
self.__old_folder_ = str(folder)
# Refresh the contents of the folder
self.__refreshPreviews_()
def __folderProtectedReceived_(self, folder):
"""Called when we are trying to access a protected folder
@param folder: The folder name
@type folder: str
"""
if config.DEBUG_GUI:
print "<-- Protected folder:", folder
folder = str(folder)
password, result = QInputDialog.getText(self, "Accessing a protected folder",
"Please enter the password for the protected " \
"folder %s" % folder, QLineEdit.Password)
if result is True:
self.__scanner_.setFolder(folder, str(password))
else:
folder_index = self.__basewidget_.folder.findText(self.__old_folder_)
self.__basewidget_.folder.setCurrentIndex(folder_index)
self.__unlock_()
def __fileReceived_(self, filename):
"""Called when a file tranfert has been successfully completed
@param filename: The file name
@type filename: str
"""
if config.DEBUG_GUI:
print "<-- File transfer finished for:", filename
# Reset the progress dialog and unlock the widget
self.__progress_.hide()
self.__unlock_()
def __allPreviewReceived_(self):
"""Received when we have received all previews"""
if config.DEBUG_GUI:
print "<-- All previews received"
self.__unlock_()
self.__basewidget_.imageList.setCurrentItem(self.__basewidget_.imageList.item(0))
def __previewReceived_(self, filename):
"""Received when a preview has been received
@param filename: The filename of the preview
@type filename: str
"""
if config.DEBUG_GUI:
print "<-- Preview received:", filename
filename = str(filename)
preview = self.__scanner_.previews[filename]
del self.__scanner_.previews[filename]
# Create the pixmap item
pixmap = QPixmap()
if preview == None:
pixmap.load(config.NO_PREVIEW_FILENAME)
else:
pixmap.loadFromData(preview)
# Add a black border
self.__add_black_border_(pixmap)
# Add the new icon to the list
items = self.__basewidget_.imageList.findItems(filename, Qt.MatchExactly)
items[0].setIcon(QIcon(pixmap))
def __fileDeletedReceived_(self, filename):
"""Called when a file has been deleted
@param filename: The name of the deleted file
@type filename: str
"""
if config.DEBUG_GUI:
print "<-- File deleted:", filename
# Remove the deleted item from the list
items = self.__basewidget_.imageList.findItems(filename, Qt.MatchExactly)
item = self.__basewidget_.imageList.takeItem(self.__basewidget_.imageList.row(items[0]))
del item
# Unlock the widget
self.__unlock_()
def __foldersListReceived_(self):
"""Called when the folders listing has arrived"""
if config.DEBUG_GUI:
print "<-- Received folder listing"
# Add the folders to the list of folders
for folder in self.__scanner_.folders:
self.__basewidget_.folder.addItem(folder)
# Refresh the files of the current folder
self.__refreshPreviews_()
def __filesListReceived_(self):
"""Called when the files listing of the current folder has arrived"""
if config.DEBUG_GUI:
print "<-- Received files listing"
self.__scanned_files_ = self.__scanner_.files
# Add the files to the list and request their previews
if len(self.__scanned_files_) != 0:
# Sort by filename (wich is also by date)
filenames = self.__scanned_files_.keys()
filenames.sort()
# Create the Waiting for preview pixmap
pixmap = QPixmap()
pixmap.load(config.WAITING_PREVIEW_FILENAME)
self.__add_black_border_(pixmap)
# Add the files to the list
for filename in filenames:
self.__basewidget_.imageList.addItem(QListWidgetItem(QIcon(pixmap), filename))
# Request the previews
if config.DEBUG_GUI:
print "--> Requesting previews"
self.__scanner_.getPreviews(filenames)
else:
self.__unlock_()
#
# Methods connected to the UI
#
def __ui_refresh_clicked_(self):
"""Called when the user activates the refresh button
This method clears the files list and request the current files list
again
"""
# Refresh the folder contents
self.__refreshPreviews_()
def __ui_delete_clicked_(self):
"""Called when the user activates the delete button
This method delete the current selected file
"""
if config.DEBUG_GUI:
print "--> Deleting file"
filename = self.currentFilename()
if filename is not None:
result = QMessageBox.question(self, "Confirmation of file deletion",
"Do you really want to delete the file %s " \
"from the scanner?" % filename,
QMessageBox.Yes, QMessageBox.No)
if result == QMessageBox.Yes:
self.__scanner_.deleteFile(filename)
else:
print "WARNING: No file selected (save), this should not happen"
def __ui_save_clicked_(self):
"""Called when the user activates the save button
This method ask for a filename and download the selected pages
"""
if config.DEBUG_GUI:
print "--> Saving file"
filename = self.currentFilename()
# Check if a file has been selected
if filename is not None:
# Ask for filename
save_filter = self.__get_format_filter_()
default_save_filename = os.path.join(str(QDir.homePath()),
"%s.%s" % (os.path.splitext(filename)[0],
self.getFormat()))
save_filename = str(QFileDialog.getSaveFileName(self, "Saving scanned file",
default_save_filename,
save_filter))
if save_filename != "":
self.__lock_()
# Add file format if not specified
if os.path.splitext(save_filename)[1] == "":
save_filename += ".%s" % self.getFormat()
# Call the saving thread method
format = self.getFormat()
pages = self.getPages()
dpi = self.getDpi()
if dpi == None:
dpi = self.__scanned_files_[filename]["dpi"]
samplesize = self.getSamplesize()
self.__scanner_.getFile(filename, save_filename, pages,
format, dpi, samplesize)
# Show the progress dialog
self.__progress_.show(format, len(pages))
else:
print "WARNING: No file selected (save), this should not happen"
def __ui_folder_currentChanged_(self, folder):
"""Called when the current folder has been changed
If the user has selected another directory, we need to list the contents
of this directory
"""
if config.DEBUG_GUI:
print "--> Changing folder"
folder = str(folder)
if folder != self.__old_folder_:
self.__lock_()
# Request the new folder
self.__scanner_.setFolder(folder)
def __ui_imageList_currentChanged_(self, filename):
"""Called when the user select an image in the image list
@param filename: The file name of the selected file
@type filename: str
"""
filename = str(filename)
if config.DEBUG_GUI:
print "--- Selected file: \"%s\"" % filename
if filename == "":
self.__basewidget_.info_nbPages.setText("")
self.__basewidget_.info_dpi.setText("")
self.__basewidget_.info_resolution.setText("")
self.__clearOptions_()
self.__basewidget_.delete.setEnabled(False)
self.__basewidget_.save.setEnabled(False)
self.__basewidget_.format.setEnabled(False)
self.__basewidget_.page.setEnabled(False)
self.__basewidget_.resolution.setEnabled(False)
self.__basewidget_.color.setEnabled(False)
else:
file_infos = self.__scanned_files_[filename]
# Show basic informations
self.__basewidget_.info_nbPages.setText(str(file_infos["nbpages"]))
self.__basewidget_.info_dpi.setText("%dx%d dpi" % \
(file_infos["dpi"][0],
file_infos["dpi"][1]))
self.__basewidget_.info_resolution.setText("%dx%d" % \
(file_infos["resolution"][0],
file_infos["resolution"][1]))
# Create file options
self.__clearOptions_()
# Add pages
pages = []
if file_infos["nbpages"] > 1:
pages.append("all")
pages.extend([str(x) for x in range(1, file_infos["nbpages"] + 1)])
self.__basewidget_.page.addItems(pages)
# Add dpi
dpis = ["max"]
dpis.extend(["%dx%d" % (x, x) for x in [100, 200, 300, 400, 600]
if x <= file_infos["dpi"][0]])
self.__basewidget_.resolution.addItems(dpis)
# Add samplesize
if file_infos["samplesize"] == 24:
self.__basewidget_.color.addItem("Color")
if file_infos["samplesize"] >= 8:
self.__basewidget_.color.addItem("Grayscale")
self.__basewidget_.color.addItem("Black & White")
# Enable buttons
self.__basewidget_.delete.setEnabled(True)
self.__basewidget_.save.setEnabled(True)
# Enable options
self.__basewidget_.format.setEnabled(True)
self.__basewidget_.resolution.setEnabled(True)
self.__basewidget_.color.setEnabled(True)
self.__ui_format_currentChanged_(self.__basewidget_.format.currentText())
def __ui_format_currentChanged_(self, format):
"""Called when file format has changed
If the file format is pdf, we cannot select a page. If it is not pdf, we
need to enable the page selector
"""<|fim▁hole|> if format == "pdf":
self.__basewidget_.page.setCurrentIndex(0)
self.__basewidget_.page.setEnabled(False)
else:
self.__basewidget_.page.setEnabled(True)
def __ui_progress_canceled_(self):
"""Called when the user click on the progress cancel button"""
if config.DEBUG_GUI:
print "--- Canceled saving"
self.__scanner_.stop()
#
# Other methods
#
def __get_format_filter_(self):
format = self.getFormat()
if format == "tiff":
filter = _("TIFF images (*.tif *.tiff)")
elif format == "gif":
filter = _("GIF images (*.gif)")
elif format == "jpeg":
filter = _("JPEG images (*.jpg *.jpeg)")
elif format == "bmp":
filter = _("BMP images (*.bmp)")
elif format == "pdf":
filter = _("PDF files (*.pdf)")
else:
filter = ""
return filter + ";;All files (*)"
def __add_black_border_(self, pixmap):
"""Add a black border around a pixmap
@param pixmap: The pixmap
@type pixmap: QPixmap
"""
painter = QPainter()
painter.begin(pixmap)
painter.setPen(Qt.black);
painter.drawRect(QRect(0, 0, pixmap.width() - 1, pixmap.height() - 1))
painter.end()
def __refreshPreviews_(self):
if config.DEBUG_GUI:
print "--> Refreshing previews"
self.__basewidget_.imageList.clear()
self.__lock_()
self.__scanner_.getFilesList()
def __clearOptions_(self):
self.__basewidget_.page.clear()
self.__basewidget_.resolution.clear()
self.__basewidget_.color.clear()
def __lock_(self):
self.__basewidget_.refresh.setEnabled(False)
self.__basewidget_.folder.setEnabled(False)
self.__basewidget_.imageList.setEnabled(False)
self.__basewidget_.save.setEnabled(False)
self.__basewidget_.delete.setEnabled(False)
self.__basewidget_.format.setEnabled(False)
self.__basewidget_.page.setEnabled(False)
self.__basewidget_.resolution.setEnabled(False)
self.__basewidget_.color.setEnabled(False)
def __unlock_(self):
self.__basewidget_.refresh.setEnabled(True)
self.__basewidget_.folder.setEnabled(True)
self.__basewidget_.imageList.setEnabled(True)
if self.currentFilename() is not None:
self.__basewidget_.save.setEnabled(True)
self.__basewidget_.delete.setEnabled(True)
self.__basewidget_.format.setEnabled(True)
self.__basewidget_.page.setEnabled(True)
self.__basewidget_.resolution.setEnabled(True)
self.__basewidget_.color.setEnabled(True)
#
# API public
#
def currentFilename(self):
currentItem = self.__basewidget_.imageList.currentItem()
# Vérification inutile, car le bouton delete est activé seulement
# s'il y a un item sélectionné, mais on ne sais jamais
if currentItem is not None:
return str(currentItem.text())
def currentFolder(self):
return str(self.__basewidget_.folder.currentText())
def getFormat(self):
return str(self.__basewidget_.format.currentText()).lower()
def getDpi(self):
dpi = str(self.__basewidget_.resolution.currentText())
if dpi == "max":
return None
elif dpi == "100x100":
return [100, 100]
elif dpi == "200x200":
return [200, 200]
elif dpi == "300x300":
return [300, 300]
elif dpi == "400x400":
return [400, 400]
elif dpi == "600x600":
return [600, 600]
def getPages(self):
if self.getFormat() == "pdf":
return []
if str(self.__basewidget_.page.currentText()) == "all":
return [x for x in range(1, self.__scanned_files_[self.currentFilename()]["nbpages"] + 1)]
else:
return [int(str(self.__basewidget_.page.currentText()))]
def getSamplesize(self):
samplesize = str(self.__basewidget_.color.currentText())
# 24 bits color
if samplesize == "Color":
return 24
# 8 tones grayscale
elif samplesize == "Grayscale":
return 8
# black and white
else:
return 1
def connectToScanner(self, host, port):
if config.DEBUG_GUI:
print "--> Connecting to scanner"
self.__scanner_.connectToScanner(host, port)
def disconnect(self):
if config.DEBUG_GUI:
print "--> Disconnecting from scanner"
self.__scanner_.disconnect()<|fim▁end|> | format = str(format).lower() |
<|file_name|>unboxed-closures-by-ref.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
// Test by-ref capture of environment in unboxed closure types
fn call_fn<F: Fn()>(f: F) {<|fim▁hole|>}
fn call_fn_mut<F: FnMut()>(mut f: F) {
f()
}
fn call_fn_once<F: FnOnce()>(f: F) {
f()
}
fn main() {
let mut x = 0u;
let y = 2u;
call_fn(|&:| assert_eq!(x, 0));
call_fn_mut(|&mut:| x += y);
call_fn_once(|:| x += y);
assert_eq!(x, y * 2);
}<|fim▁end|> | f() |
<|file_name|>teste.py<|end_file_name|><|fim▁begin|>from appkit.api.v0_2_8 import App<|fim▁hole|>
app = App(__name__)
@app.route("/")
def home():
return '<a href="#" target="_blank">Clique</a>'
app.run()<|fim▁end|> | |
<|file_name|>ps_dispatcher.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
class PSDispatcher(object):
"""
PSDispatcher is the base class for dispatching vars
into different pserver instance.
You need to implement the `dispatch` interface.
"""
def __init__(self, pserver_endpoints):
self._eps = pserver_endpoints
self._step = 0
@property
def eps(self):
return self._eps
def reset(self):
"""
reset the step counter, set it zero.
"""
self._step = 0
def dispatch(self, varlist):
"""
Args:
varlist(list): a list of Variables
Returns:
a map of pserver endpoint -> varname
"""
raise NotImplementedError("Interface has not been implemented.")
class HashName(PSDispatcher):
"""
Hash variable names to several endpoints using python
"hash()" function.
Args:
pserver_endpoints (list): list of endpoint(ip:port).
Examples:
.. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
vars = ["var1","var2","var3","var4","var5"]
rr = RoundRobin(pserver_endpoints)
rr.dispatch(vars)
"""
def __init__(self, pserver_endpoints):
super(HashName, self).__init__(pserver_endpoints)
def _hash_block(self, block_str, total):
return hash(block_str) % total
def dispatch(self, varlist):
"""
use `HashName` method to dispatch variables with each parameter server.
Args:
varlist (list): a list of Variables
"""
eplist = []
for var in varlist:
server_id = self._hash_block(var.name(), len(self._eps))
server_for_param = self._eps[server_id]
eplist.append(server_for_param)
return eplist
class RoundRobin(PSDispatcher):
"""
Distribute variables to several endpoints using
RondRobin<https://en.wikipedia.org/wiki/Round-robin_scheduling> method.
Args:
pserver_endpoints (list): list of endpoint(ip:port).
Examples:
.. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
vars = ["var1","var2","var3","var4","var5"]
rr = RoundRobin(pserver_endpoints)
rr.dispatch(vars)<|fim▁hole|> super(RoundRobin, self).__init__(pserver_endpoints)
def dispatch(self, varlist):
"""
use `RoundRobin` method to dispatch variables with each parameter server.
Args:
varlist (list): a list of Variables
"""
eplist = []
for var in varlist:
server_for_param = self._eps[self._step]
eplist.append(server_for_param)
self._step += 1
if self._step >= len(self._eps):
self._step = 0
return eplist<|fim▁end|> |
"""
def __init__(self, pserver_endpoints): |
<|file_name|>test_crypto.py<|end_file_name|><|fim▁begin|>__author__ = 'hkar'
import Vault.Crypto
import Vault.Key
from test_helpers import *
import os
def test_aes(tmpdir):
# make tmp text files
f = create_test_file(tmpdir)
text = str(f.read())
# define file names
file_in = str(f)
file_out = file_in + ".enc"
# generate random secret
secret = get_random_string()
# encrypt test file
Vault.Crypto.AesSymmetric.encrypt(file_in, file_out, secret)
# remove original test file
os.remove(file_in)
# decrypt test file
Vault.Crypto.AesSymmetric.decrypt(file_out, file_in, secret)
assert text == open(file_in, 'r').read()
def test_rsa(tmpdir):
# make tmp text files
f = create_test_file(tmpdir, length=64)
text = str(f.read())
# define file names
file_in = str(f)
file_out = file_in + ".enc"
# generate keys
Vault.Key.RsaKey.generate()
# encrypt test file
Vault.Crypto.RsaAsymmetric.encrypt(file_in, file_out, Vault.Key.RsaKey.public())
# remove original test file
os.remove(file_in)
# decrypt test file
Vault.Crypto.RsaAsymmetric.decrypt(file_out, file_in, Vault.Key.RsaKey.private())
<|fim▁hole|> Vault.Key.RsaKey.delete_keys()
assert text == open(file_in, 'r').read()<|fim▁end|> | |
<|file_name|>AbstractJavaBlock.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.formatter.java;
import com.intellij.formatting.*;
import com.intellij.formatting.alignment.AlignmentStrategy;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.JavaCodeStyleSettings;
import com.intellij.psi.formatter.FormatterUtil;
import com.intellij.psi.formatter.common.AbstractBlock;
import com.intellij.psi.formatter.java.wrap.JavaWrapManager;
import com.intellij.psi.formatter.java.wrap.ReservedWrapsProvider;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.codeStyle.ShiftIndentInsideHelper;
import com.intellij.psi.impl.source.tree.*;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.impl.source.tree.java.ClassElement;
import com.intellij.psi.jsp.JspElementType;
import com.intellij.psi.tree.IElementType;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.intellij.psi.formatter.java.JavaFormatterUtil.getWrapType;
import static com.intellij.psi.formatter.java.MultipleFieldDeclarationHelper.findLastFieldInGroup;
public abstract class AbstractJavaBlock extends AbstractBlock implements JavaBlock, ReservedWrapsProvider {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.formatter.java.AbstractJavaBlock");
@NotNull protected final CommonCodeStyleSettings mySettings;
@NotNull protected final JavaCodeStyleSettings myJavaSettings;
protected final CommonCodeStyleSettings.IndentOptions myIndentSettings;
private final Indent myIndent;
protected Indent myChildIndent;
protected Alignment myChildAlignment;
protected boolean myUseChildAttributes = false;
@NotNull protected final AlignmentStrategy myAlignmentStrategy;
private boolean myIsAfterClassKeyword = false;
protected Alignment myReservedAlignment;
protected Alignment myReservedAlignment2;
private final JavaWrapManager myWrapManager;
private Map<IElementType, Wrap> myPreferredWraps;
private AbstractJavaBlock myParentBlock;
protected AbstractJavaBlock(@NotNull final ASTNode node,
final Wrap wrap,
final Alignment alignment,
final Indent indent,
@NotNull final CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings)
{
this(node, wrap, indent, settings, javaSettings, JavaWrapManager.INSTANCE, AlignmentStrategy.wrap(alignment));
}
protected AbstractJavaBlock(@NotNull final ASTNode node,
final Wrap wrap,
@NotNull final AlignmentStrategy alignmentStrategy,
final Indent indent,
@NotNull final CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings)
{
this(node, wrap, indent, settings, javaSettings, JavaWrapManager.INSTANCE, alignmentStrategy);
}
private AbstractJavaBlock(@NotNull ASTNode ignored,
@NotNull CommonCodeStyleSettings commonSettings,
@NotNull JavaCodeStyleSettings javaSettings) {
super(ignored, null, null);
mySettings = commonSettings;
myJavaSettings = javaSettings;
myIndentSettings = commonSettings.getIndentOptions();
myIndent = null;
myWrapManager = JavaWrapManager.INSTANCE;
myAlignmentStrategy = AlignmentStrategy.getNullStrategy();
}
protected AbstractJavaBlock(@NotNull final ASTNode node,
final Wrap wrap,
final Indent indent,
@NotNull final CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings,
final JavaWrapManager wrapManager,
@NotNull final AlignmentStrategy alignmentStrategy) {
super(node, wrap, createBlockAlignment(alignmentStrategy, node));
mySettings = settings;
myJavaSettings = javaSettings;
myIndentSettings = settings.getIndentOptions();
myIndent = indent;
myWrapManager = wrapManager;
myAlignmentStrategy = alignmentStrategy;
}
@Nullable
private static Alignment createBlockAlignment(@NotNull AlignmentStrategy strategy, @NotNull ASTNode node) {
// There is a possible case that 'implements' section is incomplete (e.g. ends with comma). We may want to align lbrace
// to the first implemented interface reference then.
if (node.getElementType() == JavaElementType.IMPLEMENTS_LIST) {
return null;
}
return strategy.getAlignment(node.getElementType());
}
@NotNull
public Block createJavaBlock(@NotNull ASTNode child,
@NotNull CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings,
@Nullable Indent indent,
@Nullable Wrap wrap,
Alignment alignment) {
return createJavaBlock(child, settings, javaSettings,indent, wrap, AlignmentStrategy.wrap(alignment));<|fim▁hole|> }
@NotNull
public Block createJavaBlock(@NotNull ASTNode child,
@NotNull CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings,
final Indent indent,
@Nullable Wrap wrap,
@NotNull AlignmentStrategy alignmentStrategy) {
return createJavaBlock(child, settings, javaSettings, indent, wrap, alignmentStrategy, -1);
}
@NotNull
private Block createJavaBlock(@NotNull ASTNode child,
@NotNull CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings,
@Nullable Indent indent,
Wrap wrap,
@NotNull AlignmentStrategy alignmentStrategy,
int startOffset) {
Indent actualIndent = indent == null ? getDefaultSubtreeIndent(child, getJavaIndentOptions(settings)) : indent;
final IElementType elementType = child.getElementType();
Alignment alignment = alignmentStrategy.getAlignment(elementType);
if (child.getPsi() instanceof PsiWhiteSpace) {
String text = child.getText();
int start = CharArrayUtil.shiftForward(text, 0, " \t\n");
int end = CharArrayUtil.shiftBackward(text, text.length() - 1, " \t\n") + 1;
LOG.assertTrue(start < end);
return new PartialWhitespaceBlock(child, new TextRange(start + child.getStartOffset(), end + child.getStartOffset()),
wrap, alignment, actualIndent, settings, javaSettings);
}
if (child.getPsi() instanceof PsiClass) {
return new CodeBlockBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
if (isBlockType(elementType)) {
return new BlockContainingJavaBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
if (isStatement(child, child.getTreeParent())) {
return new CodeBlockBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
if (isBuildInjectedBlocks() &&
child instanceof PsiComment &&
child instanceof PsiLanguageInjectionHost &&
InjectedLanguageUtil.hasInjections((PsiLanguageInjectionHost)child)) {
return new CommentWithInjectionBlock(child, wrap, alignment, indent, settings, javaSettings);
}
if (child instanceof LeafElement) {
final LeafBlock block = new LeafBlock(child, wrap, alignment, actualIndent);
block.setStartOffset(startOffset);
return block;
}
else if (isLikeExtendsList(elementType)) {
return new ExtendsListBlock(child, wrap, alignmentStrategy, settings, javaSettings);
}
else if (elementType == JavaElementType.CODE_BLOCK) {
return new CodeBlockBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
else if (elementType == JavaElementType.LABELED_STATEMENT) {
return new LabeledJavaBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
else if (elementType == JavaDocElementType.DOC_COMMENT) {
return new DocCommentBlock(child, wrap, alignment, actualIndent, settings, javaSettings);
}
else {
final SimpleJavaBlock simpleJavaBlock = new SimpleJavaBlock(child, wrap, alignmentStrategy, actualIndent, settings, javaSettings);
simpleJavaBlock.setStartOffset(startOffset);
return simpleJavaBlock;
}
}
@NotNull
public static Block newJavaBlock(@NotNull ASTNode child,
@NotNull CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings) {
final Indent indent = getDefaultSubtreeIndent(child, getJavaIndentOptions(settings));
return newJavaBlock(child, settings, javaSettings, indent, null, AlignmentStrategy.getNullStrategy());
}
@NotNull
public static Block newJavaBlock(@NotNull ASTNode child,
@NotNull CommonCodeStyleSettings settings,
@NotNull JavaCodeStyleSettings javaSettings,
@Nullable Indent indent,
@Nullable Wrap wrap,
@NotNull AlignmentStrategy strategy) {
return new AbstractJavaBlock(child, settings, javaSettings) {
@Override
protected List<Block> buildChildren() {
return null;
}
}.createJavaBlock(child, settings, javaSettings, indent, wrap, strategy);
}
@NotNull
private static CommonCodeStyleSettings.IndentOptions getJavaIndentOptions(CommonCodeStyleSettings settings) {
CommonCodeStyleSettings.IndentOptions indentOptions = settings.getIndentOptions();
assert indentOptions != null : "Java indent options are not initialized";
return indentOptions;
}
private static boolean isLikeExtendsList(final IElementType elementType) {
return elementType == JavaElementType.EXTENDS_LIST
|| elementType == JavaElementType.IMPLEMENTS_LIST
|| elementType == JavaElementType.THROWS_LIST;
}
private static boolean isBlockType(final IElementType elementType) {
return elementType == JavaElementType.SWITCH_STATEMENT
|| elementType == JavaElementType.FOR_STATEMENT
|| elementType == JavaElementType.WHILE_STATEMENT
|| elementType == JavaElementType.DO_WHILE_STATEMENT
|| elementType == JavaElementType.TRY_STATEMENT
|| elementType == JavaElementType.CATCH_SECTION
|| elementType == JavaElementType.IF_STATEMENT
|| elementType == JavaElementType.METHOD
|| elementType == JavaElementType.ARRAY_INITIALIZER_EXPRESSION
|| elementType == JavaElementType.ANNOTATION_ARRAY_INITIALIZER
|| elementType == JavaElementType.CLASS_INITIALIZER
|| elementType == JavaElementType.SYNCHRONIZED_STATEMENT
|| elementType == JavaElementType.FOREACH_STATEMENT;
}
@Nullable
private static Indent getDefaultSubtreeIndent(@NotNull ASTNode child, @NotNull CommonCodeStyleSettings.IndentOptions indentOptions) {
final ASTNode parent = child.getTreeParent();
final IElementType childNodeType = child.getElementType();
if (childNodeType == JavaElementType.ANNOTATION) {
if (parent.getPsi() instanceof PsiArrayInitializerMemberValue) {
return Indent.getNormalIndent();
}
return Indent.getNoneIndent();
}
final ASTNode prevElement = FormatterUtil.getPreviousNonWhitespaceSibling(child);
if (prevElement != null && prevElement.getElementType() == JavaElementType.MODIFIER_LIST) {
return Indent.getNoneIndent();
}
if (childNodeType == JavaDocElementType.DOC_TAG) return Indent.getNoneIndent();
if (childNodeType == JavaDocTokenType.DOC_COMMENT_LEADING_ASTERISKS) return Indent.getSpaceIndent(1);
if (child.getPsi() instanceof PsiFile) return Indent.getNoneIndent();
if (parent != null) {
final Indent defaultChildIndent = getChildIndent(parent, indentOptions);
if (defaultChildIndent != null) return defaultChildIndent;
}
if (child.getTreeParent() instanceof PsiLambdaExpression && child instanceof PsiCodeBlock) {
return Indent.getNoneIndent();
}
return null;
}
@Nullable
private static Indent getChildIndent(@NotNull ASTNode parent, @NotNull CommonCodeStyleSettings.IndentOptions indentOptions) {
final IElementType parentType = parent.getElementType();
if (parentType == JavaElementType.MODIFIER_LIST) return Indent.getNoneIndent();
if (parentType == JspElementType.JSP_CODE_BLOCK) return Indent.getNormalIndent();
if (parentType == JspElementType.JSP_CLASS_LEVEL_DECLARATION_STATEMENT) return Indent.getNormalIndent();
if (parentType == TokenType.DUMMY_HOLDER) return Indent.getNoneIndent();
if (parentType == JavaElementType.CLASS) return Indent.getNoneIndent();
if (parentType == JavaElementType.IF_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.TRY_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.CATCH_SECTION) return Indent.getNoneIndent();
if (parentType == JavaElementType.FOR_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.FOREACH_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.BLOCK_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.DO_WHILE_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.WHILE_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.SWITCH_STATEMENT) return Indent.getNoneIndent();
if (parentType == JavaElementType.METHOD) return Indent.getNoneIndent();
if (parentType == JavaDocElementType.DOC_COMMENT) return Indent.getNoneIndent();
if (parentType == JavaDocElementType.DOC_TAG) return Indent.getNoneIndent();
if (parentType == JavaDocElementType.DOC_INLINE_TAG) return Indent.getNoneIndent();
if (parentType == JavaElementType.IMPORT_LIST) return Indent.getNoneIndent();
if (parentType == JavaElementType.FIELD) return Indent.getContinuationWithoutFirstIndent(indentOptions.USE_RELATIVE_INDENTS);
if (parentType == JavaElementType.EXPRESSION_STATEMENT) return Indent.getNoneIndent();
if (SourceTreeToPsiMap.treeElementToPsi(parent) instanceof PsiFile) {
return Indent.getNoneIndent();
}
return null;
}
protected static boolean isRBrace(@NotNull final ASTNode child) {
return child.getElementType() == JavaTokenType.RBRACE;
}
@Nullable
@Override
public Spacing getSpacing(Block child1, @NotNull Block child2) {
return JavaSpacePropertyProcessor.getSpacing(getTreeNode(child2), mySettings, myJavaSettings);
}
@Override
public ASTNode getFirstTreeNode() {
return myNode;
}
@Override
public Indent getIndent() {
return myIndent;
}
protected static boolean isStatement(final ASTNode child, @Nullable final ASTNode parentNode) {
if (parentNode != null) {
final IElementType parentType = parentNode.getElementType();
if (parentType == JavaElementType.CODE_BLOCK) return false;
final int role = ((CompositeElement)parentNode).getChildRole(child);
if (parentType == JavaElementType.IF_STATEMENT) return role == ChildRole.THEN_BRANCH || role == ChildRole.ELSE_BRANCH;
if (parentType == JavaElementType.FOR_STATEMENT) return role == ChildRole.LOOP_BODY;
if (parentType == JavaElementType.WHILE_STATEMENT) return role == ChildRole.LOOP_BODY;
if (parentType == JavaElementType.DO_WHILE_STATEMENT) return role == ChildRole.LOOP_BODY;
if (parentType == JavaElementType.FOREACH_STATEMENT) return role == ChildRole.LOOP_BODY;
}
return false;
}
@Nullable
protected Wrap createChildWrap() {
return myWrapManager.createChildBlockWrap(this, getSettings(), this);
}
@Nullable
protected Alignment createChildAlignment() {
IElementType nodeType = myNode.getElementType();
if (nodeType == JavaElementType.POLYADIC_EXPRESSION) nodeType = JavaElementType.BINARY_EXPRESSION;
if (nodeType == JavaElementType.ASSIGNMENT_EXPRESSION) {
if (myNode.getTreeParent() != null
&& myNode.getTreeParent().getElementType() == JavaElementType.ASSIGNMENT_EXPRESSION
&& myAlignment != null) {
return myAlignment;
}
return createAlignment(mySettings.ALIGN_MULTILINE_ASSIGNMENT, null);
}
if (nodeType == JavaElementType.PARENTH_EXPRESSION) {
return createAlignment(mySettings.ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION, null);
}
if (nodeType == JavaElementType.CONDITIONAL_EXPRESSION) {
return createAlignment(mySettings.ALIGN_MULTILINE_TERNARY_OPERATION, null);
}
if (nodeType == JavaElementType.FOR_STATEMENT) {
return createAlignment(mySettings.ALIGN_MULTILINE_FOR, null);
}
if (nodeType == JavaElementType.EXTENDS_LIST || nodeType == JavaElementType.IMPLEMENTS_LIST) {
return createAlignment(mySettings.ALIGN_MULTILINE_EXTENDS_LIST, null);
}
if (nodeType == JavaElementType.THROWS_LIST) {
return createAlignment(mySettings.ALIGN_MULTILINE_THROWS_LIST, null);
}
if (nodeType == JavaElementType.PARAMETER_LIST) {
return createAlignment(mySettings.ALIGN_MULTILINE_PARAMETERS, null);
}
if (nodeType == JavaElementType.RESOURCE_LIST) {
return createAlignment(mySettings.ALIGN_MULTILINE_RESOURCES, null);
}
if (nodeType == JavaElementType.BINARY_EXPRESSION) {
Alignment defaultAlignment = null;
if (shouldInheritAlignment()) {
defaultAlignment = myAlignment;
}
return createAlignment(mySettings.ALIGN_MULTILINE_BINARY_OPERATION, defaultAlignment);
}
if (nodeType == JavaElementType.CLASS || nodeType == JavaElementType.METHOD) {
return null;
}
return null;
}
@Nullable
protected Alignment chooseAlignment(@Nullable Alignment alignment, @Nullable Alignment alignment2, @NotNull ASTNode child) {
if (isTernaryOperatorToken(child)) {
return alignment2;
}
return alignment;
}
private boolean isTernaryOperatorToken(@NotNull final ASTNode child) {
final IElementType nodeType = myNode.getElementType();
if (nodeType == JavaElementType.CONDITIONAL_EXPRESSION) {
IElementType childType = child.getElementType();
return childType == JavaTokenType.QUEST || childType ==JavaTokenType.COLON;
}
else {
return false;
}
}
private boolean shouldInheritAlignment() {
if (myNode instanceof PsiPolyadicExpression) {
final ASTNode treeParent = myNode.getTreeParent();
if (treeParent instanceof PsiPolyadicExpression) {
return JavaFormatterUtil.areSamePriorityBinaryExpressions(myNode, treeParent);
}
}
return false;
}
@Nullable
protected ASTNode processChild(@NotNull final List<Block> result,
@NotNull ASTNode child,
Alignment defaultAlignment,
final Wrap defaultWrap,
final Indent childIndent) {
return processChild(result, child, AlignmentStrategy.wrap(defaultAlignment), defaultWrap, childIndent, -1);
}
@Nullable
protected ASTNode processChild(@NotNull final List<Block> result,
@NotNull ASTNode child,
@NotNull AlignmentStrategy alignmentStrategy,
@Nullable final Wrap defaultWrap,
final Indent childIndent) {
return processChild(result, child, alignmentStrategy, defaultWrap, childIndent, -1);
}
@Nullable
protected ASTNode processChild(@NotNull final List<Block> result,
@NotNull ASTNode child,
@NotNull AlignmentStrategy alignmentStrategy,
final Wrap defaultWrap,
final Indent childIndent,
int childOffset) {
final IElementType childType = child.getElementType();
if (childType == JavaTokenType.CLASS_KEYWORD || childType == JavaTokenType.INTERFACE_KEYWORD) {
myIsAfterClassKeyword = true;
}
if (childType == JavaElementType.METHOD_CALL_EXPRESSION) {
Alignment alignment = shouldAlignChild(child) ? alignmentStrategy.getAlignment(childType) : null;
result.add(createMethodCallExpressionBlock(child, arrangeChildWrap(child, defaultWrap), alignment, childIndent));
}
else {
IElementType nodeType = myNode.getElementType();
if (nodeType == JavaElementType.POLYADIC_EXPRESSION) nodeType = JavaElementType.BINARY_EXPRESSION;
if (childType == JavaTokenType.LBRACE && nodeType == JavaElementType.ARRAY_INITIALIZER_EXPRESSION) {
final Wrap wrap = Wrap.createWrap(getWrapType(mySettings.ARRAY_INITIALIZER_WRAP), false);
child = processParenthesisBlock(JavaTokenType.LBRACE, JavaTokenType.RBRACE,
result,
child,
WrappingStrategy.createDoNotWrapCommaStrategy(wrap),
mySettings.ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION);
}
else if (childType == JavaTokenType.LBRACE && nodeType == JavaElementType.ANNOTATION_ARRAY_INITIALIZER) {
final Wrap wrap = Wrap.createWrap(getWrapType(mySettings.ARRAY_INITIALIZER_WRAP), false);
child = processParenthesisBlock(JavaTokenType.LBRACE, JavaTokenType.RBRACE,
result,
child,
WrappingStrategy.createDoNotWrapCommaStrategy(wrap),
mySettings.ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION);
}
else if (childType == JavaTokenType.LPARENTH && nodeType == JavaElementType.EXPRESSION_LIST) {
final Wrap wrap = Wrap.createWrap(getWrapType(mySettings.CALL_PARAMETERS_WRAP), false);
if (mySettings.PREFER_PARAMETERS_WRAP && !isInsideMethodCall(myNode.getPsi())) {
wrap.ignoreParentWraps();
}
child = processParenthesisBlock(result, child,
WrappingStrategy.createDoNotWrapCommaStrategy(wrap),
mySettings.ALIGN_MULTILINE_PARAMETERS_IN_CALLS);
}
else if (childType == JavaTokenType.LPARENTH && nodeType == JavaElementType.PARAMETER_LIST) {
ASTNode parent = myNode.getTreeParent();
boolean isLambdaParameterList = parent != null && parent.getElementType() == JavaElementType.LAMBDA_EXPRESSION;
Wrap wrapToUse = isLambdaParameterList ? null : getMethodParametersWrap();
WrappingStrategy wrapStrategy = WrappingStrategy.createDoNotWrapCommaStrategy(wrapToUse);
child = processParenthesisBlock(result, child, wrapStrategy, mySettings.ALIGN_MULTILINE_PARAMETERS);
}
else if (childType == JavaTokenType.LPARENTH && nodeType == JavaElementType.RESOURCE_LIST) {
Wrap wrap = Wrap.createWrap(getWrapType(mySettings.RESOURCE_LIST_WRAP), false);
child = processParenthesisBlock(result, child,
WrappingStrategy.createDoNotWrapCommaStrategy(wrap),
mySettings.ALIGN_MULTILINE_RESOURCES);
}
else if (childType == JavaTokenType.LPARENTH && nodeType == JavaElementType.ANNOTATION_PARAMETER_LIST) {
Wrap wrap = Wrap.createWrap(getWrapType(myJavaSettings.ANNOTATION_PARAMETER_WRAP), false);
child = processParenthesisBlock(result, child,
WrappingStrategy.createDoNotWrapCommaStrategy(wrap),
myJavaSettings.ALIGN_MULTILINE_ANNOTATION_PARAMETERS);
}
else if (childType == JavaTokenType.LPARENTH && nodeType == JavaElementType.PARENTH_EXPRESSION) {
child = processParenthesisBlock(result, child,
WrappingStrategy.DO_NOT_WRAP,
mySettings.ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION);
}
else if (childType == JavaElementType.ENUM_CONSTANT && myNode instanceof ClassElement) {
child = processEnumBlock(result, child, ((ClassElement)myNode).findEnumConstantListDelimiterPlace());
}
else if (mySettings.TERNARY_OPERATION_SIGNS_ON_NEXT_LINE && isTernaryOperationSign(child)) {
child = processTernaryOperationRange(result, child, defaultWrap, childIndent);
}
else if (childType == JavaElementType.FIELD) {
child = processField(result, child, alignmentStrategy, defaultWrap, childIndent);
}
else if (childType == JavaElementType.LOCAL_VARIABLE
|| childType == JavaElementType.DECLARATION_STATEMENT
&& (nodeType == JavaElementType.METHOD || nodeType == JavaElementType.CODE_BLOCK))
{
result.add(new SimpleJavaBlock(child, defaultWrap, alignmentStrategy, childIndent, mySettings, myJavaSettings));
}
else {
Alignment alignment = alignmentStrategy.getAlignment(childType);
AlignmentStrategy alignmentStrategyToUse = shouldAlignChild(child)
? AlignmentStrategy.wrap(alignment)
: AlignmentStrategy.getNullStrategy();
if (myAlignmentStrategy.getAlignment(nodeType, childType) != null &&
(nodeType == JavaElementType.IMPLEMENTS_LIST || nodeType == JavaElementType.CLASS)) {
alignmentStrategyToUse = myAlignmentStrategy;
}
Wrap wrap = arrangeChildWrap(child, defaultWrap);
Block block = createJavaBlock(child, mySettings, myJavaSettings, childIndent, wrap, alignmentStrategyToUse, childOffset);
if (block instanceof AbstractJavaBlock) {
final AbstractJavaBlock javaBlock = (AbstractJavaBlock)block;
if (nodeType == JavaElementType.METHOD_CALL_EXPRESSION && childType == JavaElementType.REFERENCE_EXPRESSION ||
nodeType == JavaElementType.REFERENCE_EXPRESSION && childType == JavaElementType.METHOD_CALL_EXPRESSION) {
javaBlock.setReservedWrap(getReservedWrap(nodeType), nodeType);
javaBlock.setReservedWrap(getReservedWrap(childType), childType);
}
else if (nodeType == JavaElementType.BINARY_EXPRESSION) {
javaBlock.setReservedWrap(defaultWrap, nodeType);
}
}
result.add(block);
}
}
return child;
}
private boolean isInsideMethodCall(@NotNull PsiElement element) {
PsiElement e = element.getParent();
int parentsVisited = 0;
while (e != null && !(e instanceof PsiStatement) && parentsVisited < 5) {
if (e instanceof PsiExpressionList) {
return true;
}
e = e.getParent();
parentsVisited++;
}
return false;
}
@NotNull
private Wrap getMethodParametersWrap() {
Wrap preferredWrap = getModifierListWrap();
if (preferredWrap == null) {
return Wrap.createWrap(getWrapType(mySettings.METHOD_PARAMETERS_WRAP), false);
} else {
return Wrap.createChildWrap(preferredWrap, getWrapType(mySettings.METHOD_PARAMETERS_WRAP), false);
}
}
@Nullable
private Wrap getModifierListWrap() {
AbstractJavaBlock parentBlock = getParentBlock();
if (parentBlock != null) {
return parentBlock.getReservedWrap(JavaElementType.MODIFIER_LIST);
}
return null;
}
private ASTNode processField(@NotNull final List<Block> result,
ASTNode child,
@NotNull final AlignmentStrategy alignmentStrategy,
final Wrap defaultWrap,
final Indent childIndent) {
ASTNode lastFieldInGroup = findLastFieldInGroup(child);
if (lastFieldInGroup == child) {
result.add(createJavaBlock(child, getSettings(), myJavaSettings, childIndent, arrangeChildWrap(child, defaultWrap), alignmentStrategy));
return child;
}
else {
final ArrayList<Block> localResult = new ArrayList<Block>();
while (child != null) {
if (!FormatterUtil.containsWhiteSpacesOnly(child)) {
localResult.add(createJavaBlock(
child, getSettings(), myJavaSettings,
Indent.getContinuationWithoutFirstIndent(myIndentSettings.USE_RELATIVE_INDENTS),
arrangeChildWrap(child, defaultWrap),
alignmentStrategy
)
);
}
if (child == lastFieldInGroup) break;
child = child.getTreeNext();
}
if (!localResult.isEmpty()) {
result.add(new SyntheticCodeBlock(localResult, null, getSettings(), myJavaSettings, childIndent, null));
}
return lastFieldInGroup;
}
}
@Nullable
private ASTNode processTernaryOperationRange(@NotNull final List<Block> result,
@NotNull final ASTNode child,
final Wrap defaultWrap,
final Indent childIndent) {
final ArrayList<Block> localResult = new ArrayList<Block>();
final Wrap wrap = arrangeChildWrap(child, defaultWrap);
final Alignment alignment = myReservedAlignment;
final Alignment alignment2 = myReservedAlignment2;
localResult.add(new LeafBlock(child, wrap, chooseAlignment(alignment, alignment2, child), childIndent));
ASTNode current = child.getTreeNext();
while (current != null) {
if (!FormatterUtil.containsWhiteSpacesOnly(current) && current.getTextLength() > 0) {
if (isTernaryOperationSign(current)) break;
current = processChild(localResult, current, chooseAlignment(alignment, alignment2, current), defaultWrap, childIndent);
}
if (current != null) {
current = current.getTreeNext();
}
}
result.add(new SyntheticCodeBlock(localResult, chooseAlignment(alignment, alignment2, child), getSettings(), myJavaSettings, null, wrap));
if (current == null) {
return null;
}
return current.getTreePrev();
}
private boolean isTernaryOperationSign(@NotNull final ASTNode child) {
if (myNode.getElementType() != JavaElementType.CONDITIONAL_EXPRESSION) return false;
final int role = ((CompositeElement)child.getTreeParent()).getChildRole(child);
return role == ChildRole.OPERATION_SIGN || role == ChildRole.COLON;
}
@NotNull
private Block createMethodCallExpressionBlock(@NotNull ASTNode node, Wrap blockWrap, Alignment alignment, Indent indent) {
final ArrayList<ASTNode> nodes = new ArrayList<ASTNode>();
collectNodes(nodes, node);
return new ChainMethodCallsBlockBuilder(alignment, blockWrap, indent, mySettings, myJavaSettings).build(nodes);
}
private static void collectNodes(@NotNull List<ASTNode> nodes, @NotNull ASTNode node) {
ASTNode child = node.getFirstChildNode();
while (child != null) {
if (!FormatterUtil.containsWhiteSpacesOnly(child)) {
if (child.getElementType() == JavaElementType.METHOD_CALL_EXPRESSION || child.getElementType() ==
JavaElementType
.REFERENCE_EXPRESSION) {
collectNodes(nodes, child);
}
else {
nodes.add(child);
}
}
child = child.getTreeNext();
}
}
private boolean shouldAlignChild(@NotNull final ASTNode child) {
int role = getChildRole(child);
final IElementType nodeType = myNode.getElementType();
if (nodeType == JavaElementType.FOR_STATEMENT) {
if (role == ChildRole.FOR_INITIALIZATION || role == ChildRole.CONDITION || role == ChildRole.FOR_UPDATE) {
return true;
}
return false;
}
else if (nodeType == JavaElementType.EXTENDS_LIST || nodeType == JavaElementType.IMPLEMENTS_LIST) {
if (role == ChildRole.REFERENCE_IN_LIST || role == ChildRole.IMPLEMENTS_KEYWORD) {
return true;
}
return false;
}
else if (nodeType == JavaElementType.THROWS_LIST) {
if (role == ChildRole.REFERENCE_IN_LIST) {
return true;
}
return false;
}
else if (nodeType == JavaElementType.CLASS) {
if (role == ChildRole.CLASS_OR_INTERFACE_KEYWORD) return true;
if (myIsAfterClassKeyword) return false;
if (role == ChildRole.MODIFIER_LIST) return true;
return false;
}
else if (JavaElementType.FIELD == nodeType) {
return shouldAlignFieldInColumns(child);
}
else if (nodeType == JavaElementType.METHOD) {
if (role == ChildRole.MODIFIER_LIST) return true;
if (role == ChildRole.TYPE_PARAMETER_LIST) return true;
if (role == ChildRole.TYPE) return true;
if (role == ChildRole.NAME) return true;
if (role == ChildRole.THROWS_LIST && mySettings.ALIGN_THROWS_KEYWORD) return true;
return false;
}
else if (nodeType == JavaElementType.ASSIGNMENT_EXPRESSION) {
if (role == ChildRole.LOPERAND) return true;
if (role == ChildRole.ROPERAND && child.getElementType() == JavaElementType.ASSIGNMENT_EXPRESSION) {
return true;
}
return false;
}
else if (child.getElementType() == JavaTokenType.END_OF_LINE_COMMENT) {
ASTNode previous = child.getTreePrev();
// There is a special case - comment block that is located at the very start of the line. We don't reformat such a blocks,
// hence, no alignment should be applied to them in order to avoid subsequent blocks aligned with the same alignment to
// be located at the left editor edge as well.
CharSequence prevChars;
if (previous != null && previous.getElementType() == TokenType.WHITE_SPACE && (prevChars = previous.getChars()).length() > 0
&& prevChars.charAt(prevChars.length() - 1) == '\n') {
return false;
}
return true;
}
else if (nodeType == JavaElementType.MODIFIER_LIST) {
// There is a possible case that modifier list contains from more than one elements, e.g. 'private final'. It's also possible
// that the list is aligned. We want to apply alignment rule only to the first element then.
ASTNode previous = child.getTreePrev();
if (previous == null || previous.getTreeParent() != myNode) {
return true;
}
return false;
}
else {
return true;
}
}
private static int getChildRole(@NotNull ASTNode child) {
return ((CompositeElement)child.getTreeParent()).getChildRole(child);
}
/**
* Encapsulates alignment retrieval logic for variable declaration use-case assuming that given node is a child node
* of basic variable declaration node.
*
* @param child variable declaration child node which alignment is to be defined
* @return alignment to use for the given node
* @see CodeStyleSettings#ALIGN_GROUP_FIELD_DECLARATIONS
*/
@Nullable
private boolean shouldAlignFieldInColumns(@NotNull ASTNode child) {
// The whole idea of variable declarations alignment is that complete declaration blocks which children are to be aligned hold
// reference to the same AlignmentStrategy object, hence, reuse the same Alignment objects. So, there is no point in checking
// if it's necessary to align sub-blocks if shared strategy is not defined.
if (!mySettings.ALIGN_GROUP_FIELD_DECLARATIONS) {
return false;
}
IElementType childType = child.getElementType();
// We don't want to align subsequent identifiers in single-line declarations like 'int i1, i2, i3'. I.e. only 'i1'
// should be aligned then.
ASTNode previousNode = FormatterUtil.getPreviousNonWhitespaceSibling(child);
if (childType == JavaTokenType.IDENTIFIER && (previousNode == null || previousNode.getElementType() == JavaTokenType.COMMA)) {
return false;
}
return true;
}
@Nullable
public static Alignment createAlignment(final boolean alignOption, @Nullable final Alignment defaultAlignment) {
return alignOption ? createAlignmentOrDefault(null, defaultAlignment) : defaultAlignment;
}
@Nullable
public static Alignment createAlignment(Alignment base, final boolean alignOption, @Nullable final Alignment defaultAlignment) {
return alignOption ? createAlignmentOrDefault(base, defaultAlignment) : defaultAlignment;
}
@Nullable
protected Wrap arrangeChildWrap(final ASTNode child, Wrap defaultWrap) {
return myWrapManager.arrangeChildWrap(child, myNode, mySettings, myJavaSettings, defaultWrap, this);
}
@NotNull
private ASTNode processParenthesisBlock(@NotNull List<Block> result,
@NotNull ASTNode child,
@NotNull WrappingStrategy wrappingStrategy,
final boolean doAlign) {
myUseChildAttributes = true;
final IElementType from = JavaTokenType.LPARENTH;
final IElementType to = JavaTokenType.RPARENTH;
return processParenthesisBlock(from, to, result, child, wrappingStrategy, doAlign);
}
@NotNull
private ASTNode processParenthesisBlock(@NotNull IElementType from,
@Nullable final IElementType to,
@NotNull final List<Block> result,
@NotNull ASTNode child,
@NotNull final WrappingStrategy wrappingStrategy,
final boolean doAlign) {
final Indent externalIndent = Indent.getNoneIndent();
final Indent internalIndent = Indent.getContinuationWithoutFirstIndent(myIndentSettings.USE_RELATIVE_INDENTS);
final Indent internalIndentEnforcedToChildren = Indent.getIndent(Indent.Type.CONTINUATION, myIndentSettings.USE_RELATIVE_INDENTS, true);
AlignmentStrategy alignmentStrategy = AlignmentStrategy.wrap(createAlignment(doAlign, null), JavaTokenType.COMMA);
setChildIndent(internalIndent);
setChildAlignment(alignmentStrategy.getAlignment(null));
boolean methodParametersBlock = true;
ASTNode lBracketParent = child.getTreeParent();
if (lBracketParent != null) {
ASTNode methodCandidate = lBracketParent.getTreeParent();
methodParametersBlock = methodCandidate != null && (methodCandidate.getElementType() == JavaElementType.METHOD
|| methodCandidate.getElementType() == JavaElementType.METHOD_CALL_EXPRESSION);
}
Alignment bracketAlignment = methodParametersBlock && mySettings.ALIGN_MULTILINE_METHOD_BRACKETS ? Alignment.createAlignment() : null;
AlignmentStrategy anonymousClassStrategy = doAlign ? alignmentStrategy
: AlignmentStrategy.wrap(Alignment.createAlignment(),
false,
JavaTokenType.NEW_KEYWORD,
JavaElementType.NEW_EXPRESSION,
JavaTokenType.RBRACE);
setChildIndent(internalIndent);
setChildAlignment(alignmentStrategy.getAlignment(null));
boolean isAfterIncomplete = false;
ASTNode prev = child;
boolean afterAnonymousClass = false;
final boolean enforceIndent = shouldEnforceIndentToChildren();
while (child != null) {
isAfterIncomplete = isAfterIncomplete || child.getElementType() == TokenType.ERROR_ELEMENT ||
child.getElementType() == JavaElementType.EMPTY_EXPRESSION;
if (!FormatterUtil.containsWhiteSpacesOnly(child) && child.getTextLength() > 0) {
if (child.getElementType() == from) {
result.add(createJavaBlock(child, mySettings, myJavaSettings, externalIndent, null, bracketAlignment));
}
else if (child.getElementType() == to) {
result.add(createJavaBlock(child, mySettings, myJavaSettings,
isAfterIncomplete && !afterAnonymousClass ? internalIndent : externalIndent,
null,
isAfterIncomplete ? alignmentStrategy.getAlignment(null) : bracketAlignment)
);
return child;
}
else {
final IElementType elementType = child.getElementType();
Indent indentToUse = enforceIndent ? internalIndentEnforcedToChildren : internalIndent;
AlignmentStrategy alignmentStrategyToUse = canUseAnonymousClassAlignment(child) ? anonymousClassStrategy : alignmentStrategy;
processChild(result, child, alignmentStrategyToUse.getAlignment(elementType), wrappingStrategy.getWrap(elementType), indentToUse);
if (to == null) {//process only one statement
return child;
}
}
isAfterIncomplete = false;
if (child.getElementType() != JavaTokenType.COMMA) {
afterAnonymousClass = isAnonymousClass(child);
}
}
prev = child;
child = child.getTreeNext();
}
return prev;
}
private static boolean canUseAnonymousClassAlignment(@NotNull ASTNode child) {
// The general idea is to handle situations like below:
// test(new Runnable() {
// public void run() {
// }
// }, new Runnable() {
// public void run() {
// }
// }
// );
// I.e. we want to align subsequent anonymous class argument to the previous one if it's not preceded by another argument
// at the same line, e.g.:
// test("this is a long argument", new Runnable() {
// public void run() {
// }
// }, new Runnable() {
// public void run() {
// }
// }
// );
if (!isAnonymousClass(child)) {
return false;
}
for (ASTNode node = child.getTreePrev(); node != null; node = node.getTreePrev()) {
if (node.getElementType() == TokenType.WHITE_SPACE) {
if (StringUtil.countNewLines(node.getChars()) > 0) {
return false;
}
}
else if (node.getElementType() == JavaTokenType.LPARENTH) {
// First method call argument.
return true;
}
else if (node.getElementType() != JavaTokenType.COMMA && !isAnonymousClass(node)) {
return false;
}
}
return true;
}
private boolean shouldEnforceIndentToChildren() {
if (myNode.getElementType() != JavaElementType.EXPRESSION_LIST) {
return false;
}
ASTNode parent = myNode.getTreeParent();
if (parent == null || parent.getElementType() != JavaElementType.METHOD_CALL_EXPRESSION) {
return false;
}
PsiExpression[] arguments = ((PsiExpressionList)myNode.getPsi()).getExpressions();
return JavaFormatterUtil.hasMultilineArguments(arguments) && JavaFormatterUtil.isMultilineExceptArguments(arguments);
}
private static boolean isAnonymousClass(@Nullable ASTNode node) {
if (node == null || node.getElementType() != JavaElementType.NEW_EXPRESSION) {
return false;
}
ASTNode lastChild = node.getLastChildNode();
return lastChild != null && lastChild.getElementType() == JavaElementType.ANONYMOUS_CLASS;
}
@Nullable
private ASTNode processEnumBlock(@NotNull List<Block> result,
@Nullable ASTNode child,
ASTNode last)
{
final WrappingStrategy wrappingStrategy = WrappingStrategy.createDoNotWrapCommaStrategy(Wrap
.createWrap(getWrapType(mySettings.ENUM_CONSTANTS_WRAP), true));
while (child != null) {
if (!FormatterUtil.containsWhiteSpacesOnly(child) && child.getTextLength() > 0) {
result.add(createJavaBlock(child, mySettings, myJavaSettings, Indent.getNormalIndent(),
wrappingStrategy.getWrap(child.getElementType()), AlignmentStrategy.getNullStrategy()));
if (child == last) return child;
}
child = child.getTreeNext();
}
return null;
}
private void setChildAlignment(final Alignment alignment) {
myChildAlignment = alignment;
}
private void setChildIndent(final Indent internalIndent) {
myChildIndent = internalIndent;
}
@Nullable
private static Alignment createAlignmentOrDefault(@Nullable Alignment base, @Nullable final Alignment defaultAlignment) {
if (defaultAlignment == null) {
return base == null ? Alignment.createAlignment() : Alignment.createChildAlignment(base);
}
return defaultAlignment;
}
private int getBraceStyle() {
final PsiElement psiNode = SourceTreeToPsiMap.treeElementToPsi(myNode);
if (psiNode instanceof PsiClass) {
return mySettings.CLASS_BRACE_STYLE;
}
if (psiNode instanceof PsiMethod
|| psiNode instanceof PsiCodeBlock && psiNode.getParent() != null && psiNode.getParent() instanceof PsiMethod) {
return mySettings.METHOD_BRACE_STYLE;
}
return mySettings.BRACE_STYLE;
}
protected Indent getCodeBlockInternalIndent(final int baseChildrenIndent) {
return getCodeBlockInternalIndent(baseChildrenIndent, false);
}
protected Indent getCodeBlockInternalIndent(final int baseChildrenIndent, boolean enforceParentIndent) {
if (isTopLevelClass() && mySettings.DO_NOT_INDENT_TOP_LEVEL_CLASS_MEMBERS) {
return Indent.getNoneIndent();
}
final int braceStyle = getBraceStyle();
return braceStyle == CommonCodeStyleSettings.NEXT_LINE_SHIFTED ?
createNormalIndent(baseChildrenIndent - 1, enforceParentIndent)
: createNormalIndent(baseChildrenIndent, enforceParentIndent);
}
protected static Indent createNormalIndent(final int baseChildrenIndent) {
return createNormalIndent(baseChildrenIndent, false);
}
protected static Indent createNormalIndent(final int baseChildrenIndent, boolean enforceIndentToChildren) {
if (baseChildrenIndent == 1) {
return Indent.getIndent(Indent.Type.NORMAL, false, enforceIndentToChildren);
}
else if (baseChildrenIndent <= 0) {
return Indent.getNoneIndent();
}
else {
LOG.assertTrue(false);
return Indent.getIndent(Indent.Type.NORMAL, false, enforceIndentToChildren);
}
}
private boolean isTopLevelClass() {
return myNode.getElementType() == JavaElementType.CLASS &&
SourceTreeToPsiMap.treeElementToPsi(myNode.getTreeParent()) instanceof PsiFile;
}
protected Indent getCodeBlockExternalIndent() {
final int braceStyle = getBraceStyle();
if (braceStyle == CommonCodeStyleSettings.END_OF_LINE || braceStyle == CommonCodeStyleSettings.NEXT_LINE ||
braceStyle == CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED) {
return Indent.getNoneIndent();
}
return Indent.getNormalIndent();
}
protected Indent getCodeBlockChildExternalIndent(final int newChildIndex) {
final int braceStyle = getBraceStyle();
if (!isAfterCodeBlock(newChildIndex)) {
return Indent.getNormalIndent();
}
if (braceStyle == CommonCodeStyleSettings.NEXT_LINE ||
braceStyle == CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED ||
braceStyle == CommonCodeStyleSettings.END_OF_LINE) {
return Indent.getNoneIndent();
}
return Indent.getNormalIndent();
}
private boolean isAfterCodeBlock(final int newChildIndex) {
if (newChildIndex == 0) return false;
Block blockBefore = getSubBlocks().get(newChildIndex - 1);
return blockBefore instanceof CodeBlockBlock;
}
/**
* <b>Note:</b> this method is considered to be a legacy heritage and is assumed to be removed as soon as formatting processing
* is refactored
*
* @param elementType target element type
* @return <code>null</code> all the time
*/
@Nullable
@Override
public Wrap getReservedWrap(IElementType elementType) {
return myPreferredWraps != null ? myPreferredWraps.get(elementType) : null;
}
/**
* Defines contract for associating operation type and particular wrap instance. I.e. given wrap object <b>may</b> be returned
* from subsequent {@link #getReservedWrap(IElementType)} call if given operation type is used as an argument there.
* <p/>
* Default implementation ({@link AbstractJavaBlock#setReservedWrap(Wrap, IElementType)}) does nothing.
* <p/>
* <b>Note:</b> this method is considered to be a legacy heritage and is assumed to be removed as soon as formatting processing
* is refactored
*
* @param reservedWrap reserved wrap instance
* @param operationType target operation type to associate with the given wrap instance
*/
public void setReservedWrap(final Wrap reservedWrap, final IElementType operationType) {
if (myPreferredWraps == null) {
myPreferredWraps = ContainerUtil.newHashMap();
}
myPreferredWraps.put(operationType, reservedWrap);
}
@Nullable
protected static ASTNode getTreeNode(final Block child2) {
if (child2 instanceof JavaBlock) {
return ((JavaBlock)child2).getFirstTreeNode();
}
if (child2 instanceof LeafBlock) {
return ((LeafBlock)child2).getTreeNode();
}
return null;
}
@Override
@NotNull
public ChildAttributes getChildAttributes(final int newChildIndex) {
if (myUseChildAttributes) {
return new ChildAttributes(myChildIndent, myChildAlignment);
}
if (isAfter(newChildIndex, new IElementType[]{JavaDocElementType.DOC_COMMENT})) {
return new ChildAttributes(Indent.getNoneIndent(), myChildAlignment);
}
return super.getChildAttributes(newChildIndex);
}
@Override
@Nullable
protected Indent getChildIndent() {
return getChildIndent(myNode, myIndentSettings);
}
@NotNull
public CommonCodeStyleSettings getSettings() {
return mySettings;
}
protected boolean isAfter(final int newChildIndex, @NotNull final IElementType[] elementTypes) {
if (newChildIndex == 0) return false;
final Block previousBlock = getSubBlocks().get(newChildIndex - 1);
if (!(previousBlock instanceof AbstractBlock)) return false;
final IElementType previousElementType = ((AbstractBlock)previousBlock).getNode().getElementType();
for (IElementType elementType : elementTypes) {
if (previousElementType == elementType) return true;
}
return false;
}
@Nullable
protected Alignment getUsedAlignment(final int newChildIndex) {
final List<Block> subBlocks = getSubBlocks();
for (int i = 0; i < newChildIndex; i++) {
if (i >= subBlocks.size()) return null;
final Block block = subBlocks.get(i);
final Alignment alignment = block.getAlignment();
if (alignment != null) return alignment;
}
return null;
}
@Override
public boolean isLeaf() {
return ShiftIndentInsideHelper.mayShiftIndentInside(myNode);
}
@Nullable
protected ASTNode composeCodeBlock(@NotNull final List<Block> result,
ASTNode child,
final Indent indent,
final int childrenIndent,
@Nullable final Wrap childWrap) {
final ArrayList<Block> localResult = new ArrayList<Block>();
processChild(localResult, child, AlignmentStrategy.getNullStrategy(), null, Indent.getNoneIndent());
child = child.getTreeNext();
ChildAlignmentStrategyProvider alignmentStrategyProvider = getStrategyProvider();
while (child != null) {
if (FormatterUtil.containsWhiteSpacesOnly(child)) {
child = child.getTreeNext();
continue;
}
Indent childIndent = getIndentForCodeBlock(child, childrenIndent);
AlignmentStrategy alignmentStrategyToUse = alignmentStrategyProvider.getNextChildStrategy(child);
final boolean isRBrace = isRBrace(child);
child = processChild(localResult, child, alignmentStrategyToUse, childWrap, childIndent);
if (isRBrace) {
result.add(createCodeBlockBlock(localResult, indent, childrenIndent));
return child;
}
if (child != null) {
child = child.getTreeNext();
}
}
result.add(createCodeBlockBlock(localResult, indent, childrenIndent));
return null;
}
private ChildAlignmentStrategyProvider getStrategyProvider() {
if (mySettings.ALIGN_GROUP_FIELD_DECLARATIONS && myNode.getElementType() == JavaElementType.CLASS) {
return new SubsequentFieldAligner(mySettings);
}
ASTNode parent = myNode.getTreeParent();
IElementType parentType = parent != null ? parent.getElementType() : null;
if (mySettings.ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS && parentType == JavaElementType.METHOD) {
return new SubsequentVariablesAligner();
}
return ChildAlignmentStrategyProvider.NULL_STRATEGY_PROVIDER;
}
private Indent getIndentForCodeBlock(ASTNode child, int childrenIndent) {
if (child.getElementType() == JavaElementType.CODE_BLOCK
&& (getBraceStyle() == CommonCodeStyleSettings.NEXT_LINE_SHIFTED
|| getBraceStyle() == CommonCodeStyleSettings.NEXT_LINE_SHIFTED2))
{
return Indent.getNormalIndent();
}
return isRBrace(child) ? Indent.getNoneIndent() : getCodeBlockInternalIndent(childrenIndent, false);
}
public AbstractJavaBlock getParentBlock() {
return myParentBlock;
}
public void setParentBlock(@NotNull AbstractJavaBlock parentBlock) {
myParentBlock = parentBlock;
}
@NotNull
public SyntheticCodeBlock createCodeBlockBlock(final List<Block> localResult, final Indent indent, final int childrenIndent) {
final SyntheticCodeBlock result = new SyntheticCodeBlock(localResult, null, getSettings(), myJavaSettings, indent, null);
result.setChildAttributes(new ChildAttributes(getCodeBlockInternalIndent(childrenIndent), null));
return result;
}
}<|fim▁end|> | |
<|file_name|>backend.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations<|fim▁hole|># load and run the onnx model exported from pytorch
# https://github.com/onnx/tutorials/blob/master/tutorials/PytorchOnnxExport.ipynb
import argparse
from singa import device
from singa import sonnx
from singa import tensor
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Load model from pytorch")
parser.add_argument("--use_cpu", action="store_true")
args = parser.parse_args()
if args.use_cpu:
print("Using CPU")
dev = device.get_default_device()
else:
print("Using GPU")
dev = device.create_cuda_gpu()
model = sonnx.load("alexnet.onnx")
backend = sonnx.prepare(model, dev)
input_name = model.graph.inputs[0].name
inputs = tensor.Tensor(shape=(2, 3, 224, 224), device=dev, name=input_name)
inputs.gaussian(0, 0.01)
y = backend.run([inputs])[0]<|fim▁end|> | # under the License.
#
|
<|file_name|>test_get_data.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from unittest import TestCase
from plotly.graph_objs import (Data, Figure, Layout, Line, Margin, Marker,
Scatter, XAxis, YAxis)
class TestGetData(TestCase):
fig = None
def setUp(self):<|fim▁hole|> Scatter(
x=[52698, 43117],
y=[53, 31],
mode='markers',
name='North America',
text=['United States', 'Canada'],
marker=Marker(
color='rgb(164, 194, 244)',
size=12,
line=Line(
color='white',
width=0.5
)
)
),
Scatter(
x=[39317, 37236, 35650, 30066, 29570, 27159, 23557, 21046,
18007],
y=[33, 20, 13, 19, 27, 19, 49, 44, 38],
mode='markers',
name='Europe',
text=['Germany', 'Britain', 'France', 'Spain', 'Italy',
'Czech Rep.', 'Greece', 'Poland'],
marker=Marker(
color='rgb(255, 217, 102)',
size=12,
line=Line(
color='white',
width=0.5
)
)
),
Scatter(
x=[42952, 37037, 33106, 17478, 9813, 5253, 4692, 3899],
y=[23, 42, 54, 89, 14, 99, 93, 70],
mode='markers',
name='Asia/Pacific',
text=['Australia', 'Japan', 'South Korea', 'Malaysia',
'China', 'Indonesia', 'Philippines', 'India'],
marker=Marker(
color='rgb(234, 153, 153)',
size=12,
line=Line(
color='white',
width=0.5
)
)
),
Scatter(
x=[19097, 18601, 15595, 13546, 12026, 7434, 5419],
y=[43, 47, 56, 80, 86, 93, 80],
mode='markers',
name='Latin America',
text=['Chile', 'Argentina', 'Mexico', 'Venezuela',
'Venezuela', 'El Salvador', 'Bolivia'],
marker=Marker(
color='rgb(142, 124, 195)',
size=12,
line=Line(
color='white',
width=0.5
)
)
)
]),
layout=Layout(
title='Quarter 1 Growth',
autosize=False,
width=500,
height=500,
xaxis=XAxis(
title='GDP per Capita',
showgrid=False,
zeroline=False
),
yaxis=YAxis(
title='Percent',
showline=False
),
margin=Margin(
l=65,
r=50,
b=65,
t=90
)
)
)
def test_get_data(self):
data = self.fig.get_data()
comp_data = [
{
'name': 'North America',
'text': ['United States', 'Canada'],
'x': [52698, 43117],
'y': [53, 31]
},
{
'name': 'Europe',
'text': ['Germany', 'Britain', 'France', 'Spain', 'Italy',
'Czech Rep.', 'Greece', 'Poland'],
'x': [39317, 37236, 35650, 30066, 29570, 27159, 23557, 21046,
18007],
'y': [33, 20, 13, 19, 27, 19, 49, 44, 38]
},
{
'name': 'Asia/Pacific',
'text': ['Australia', 'Japan', 'South Korea', 'Malaysia',
'China', 'Indonesia', 'Philippines', 'India'],
'x': [42952, 37037, 33106, 17478, 9813, 5253, 4692, 3899],
'y': [23, 42, 54, 89, 14, 99, 93, 70]},
{
'name': 'Latin America',
'text': ['Chile', 'Argentina', 'Mexico', 'Venezuela',
'Venezuela', 'El Salvador', 'Bolivia'],
'x': [19097, 18601, 15595, 13546, 12026, 7434, 5419],
'y': [43, 47, 56, 80, 86, 93, 80]
}
]
self.assertEqual(data, comp_data)
def test_get_data_flatten(self):
# this is similar to above, except nested objects are flattened
flat_data = self.fig.get_data(flatten=True)
comp_data = {
'Europe.x': [39317, 37236, 35650, 30066, 29570, 27159, 23557,
21046, 18007],
'Europe.y': [33, 20, 13, 19, 27, 19, 49, 44, 38],
'Asia/Pacific.x': [42952, 37037, 33106, 17478, 9813, 5253, 4692,
3899],
'Latin America.text': ['Chile', 'Argentina', 'Mexico', 'Venezuela',
'Venezuela', 'El Salvador', 'Bolivia'],
'North America.x': [52698, 43117],
'Asia/Pacific.y': [23, 42, 54, 89, 14, 99, 93, 70],
'Asia/Pacific.text': ['Australia', 'Japan', 'South Korea',
'Malaysia', 'China', 'Indonesia',
'Philippines', 'India'],
'North America.y': [53, 31],
'North America.text': ['United States', 'Canada'],
'Europe.text': ['Germany', 'Britain', 'France', 'Spain', 'Italy',
'Czech Rep.', 'Greece', 'Poland'],
'Latin America.x': [19097, 18601, 15595, 13546, 12026, 7434, 5419],
'Latin America.y': [43, 47, 56, 80, 86, 93, 80]
}
self.assertEqual(flat_data, comp_data)
# TODO test for Data, Scatter, etc..
def test_flatten_repeated_trace_names(self):
dl = Data([Scatter(name='thesame', x=[1, 2, 3]) for _ in range(3)])
data = dl.get_data(flatten=True)
comp_data = {
'thesame.x': [1, 2, 3],
'thesame_1.x': [1, 2, 3],
'thesame_2.x': [1, 2, 3]
}
self.assertEqual(data, comp_data)<|fim▁end|> | super(TestGetData, self).setUp()
self.fig = Figure(
data=Data([ |
<|file_name|>generate_logo.py<|end_file_name|><|fim▁begin|>import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import PathPatch
from matplotlib.path import Path
from matplotlib.transforms import Bbox
from scipy import stats
x = np.linspace(0, 1, 200)
pdfx = stats.beta(2, 5).pdf(x)
path = Path(np.array([x, pdfx]).transpose())
patch = PathPatch(path, facecolor="none", alpha=0)
plt.gca().add_patch(patch)
cmap = matplotlib.colors.LinearSegmentedColormap.from_list("", ["#00bfbf", "#00bfbf", "#126a8a"])<|fim▁hole|>
im = plt.imshow(
np.array([[1, 0, 0], [1, 1, 0]]),
cmap=cmap,
interpolation="bicubic",
origin="lower",
extent=[0, 1, 0.0, 5],
aspect="auto",
clip_path=patch,
clip_on=True,
)
plt.axis("off")
plt.ylim(0, 5.5)
plt.xlim(0, 0.9)
bbox = Bbox([[0.75, 0.5], [5.4, 2.2]])
# plt.savefig('logo_00.png', dpi=300, bbox_inches=bbox, transparent=True)
plt.text(
x=0.04,
y=-0.01,
s="ArviZ",
clip_on=True,
fontdict={"name": "ubuntu mono", "fontsize": 62},
color="w",
)
plt.savefig("ArviZ.png", dpi=300, bbox_inches=bbox, transparent=True)
plt.savefig("ArviZ.pdf", dpi=300, bbox_inches=bbox, transparent=True)
plt.savefig("ArviZ.svg", dpi=300, bbox_inches=bbox, transparent=True)
plt.savefig("ArviZ.jpg", dpi=300, bbox_inches=bbox, transparent=True)<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.